diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 7da1f9608ee..00000000000 --- a/.flake8 +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 100 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7a8fb51c6cd..9ad35498875 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,9 +12,6 @@ updates: labels: - "CherryPick" - "dependencies" - - "6.13.z" - - "6.12.z" - - "6.11.z" # Maintain dependencies for our GitHub Actions - package-ecosystem: "github-actions" @@ -24,6 +21,3 @@ updates: labels: - "CherryPick" - "dependencies" - - "6.13.z" - - "6.12.z" - - "6.11.z" diff --git a/.github/dependency_tests.yaml b/.github/dependency_tests.yaml index 43715259676..ae8b1133199 100644 --- a/.github/dependency_tests.yaml +++ b/.github/dependency_tests.yaml @@ -9,4 +9,4 @@ pytest: "tests/foreman/ -m 'build_sanity'" python-box: "tests/foreman/cli/test_webhook.py -k 'test_positive_end_to_end'" requests: "tests/foreman/cli/test_repository.py -k 'test_file_repo_contains_only_newer_file'" wait-for: "tests/foreman/api/test_reporttemplates.py -k 'test_positive_schedule_entitlements_report'" -wrapanapi: "tests/foreman/longrun/test_provisioning_computeresource.py -k 'test_positive_provision_vmware_with_host_group'" +wrapanapi: "tests/foreman/api/test_computeresource_gce.py -k 'test_positive_gce_host_provisioned'" diff --git a/.github/workflows/auto_assignment.yaml b/.github/workflows/auto_assignment.yaml index 72c62c984a9..4e09e262542 100644 --- a/.github/workflows/auto_assignment.yaml +++ b/.github/workflows/auto_assignment.yaml @@ -15,6 +15,6 @@ jobs: if: "!contains(github.event.pull_request.labels.*.name, 'Auto_Cherry_Picked')" runs-on: ubuntu-latest steps: - - uses: kentaro-m/auto-assign-action@v1.2.5 + - uses: kentaro-m/auto-assign-action@v2.0.0 with: configuration-path: ".github/auto_assign.yml" diff --git a/.github/workflows/auto_cherry_pick.yml b/.github/workflows/auto_cherry_pick.yml index a419a5bc476..301d9181b9d 100644 --- a/.github/workflows/auto_cherry_pick.yml +++ b/.github/workflows/auto_cherry_pick.yml @@ -3,13 +3,14 @@ name: auto_cherry_pick_commits on: pull_request_target: - types: [closed, labeled] + types: [closed] # Github & Parent PR Env vars env: assignee: ${{ github.event.pull_request.assignee.login }} title: ${{ github.event.pull_request.title }} number: ${{ github.event.number }} + is_dependabot_pr: '' jobs: @@ -22,11 +23,12 @@ jobs: prt_comment: ${{steps.fc.outputs.comment-body}} steps: - name: Find Comment - uses: peter-evans/find-comment@v2 + uses: peter-evans/find-comment@v3 id: fc with: issue-number: ${{ env.number }} body-includes: "trigger: test-robottelo" + direction: last # Auto CherryPicking and Failure Recording auto-cherry-pick: @@ -39,12 +41,24 @@ jobs: label: ${{ github.event.pull_request.labels.*.name }} steps: + # Needed to avoid out-of-memory error + - name: Set Swap Space + uses: pierotofy/set-swap-space@master + with: + swap-size-gb: 10 + ## Robottelo Repo Checkout - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 if: ${{ startsWith(matrix.label, '6.') && matrix.label != github.base_ref }} with: fetch-depth: 0 + ## Set env var for dependencies label PR + - name: Set env var is_dependabot_pr to `dependencies` to set the label + if: contains(github.event.pull_request.labels.*.name, 'dependencies') + run: | + echo "is_dependabot_pr=dependencies" >> $GITHUB_ENV + ## CherryPicking and AutoMerging - name: Cherrypicking to zStream branch id: cherrypick @@ -57,20 +71,22 @@ jobs: Auto_Cherry_Picked ${{ matrix.label }} No-CherryPick + ${{ env.is_dependabot_pr }} assignees: ${{ env.assignee }} - name: Add Parent PR's PRT comment to Auto_Cherry_Picked PR's id: add-parent-prt-comment - if: ${{ always() && steps.cherrypick.outcome == 'success' }} - uses: mshick/add-pr-comment@v2 + if: ${{ always() && needs.find-the-parent-prt-comment.outputs.prt_comment != '' && steps.cherrypick.outcome == 'success' }} + uses: thollander/actions-comment-pull-request@v2 with: - issue: ${{ steps.cherrypick.outputs.number }} - message: ${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} - repo-token: ${{ secrets.CHERRYPICK_PAT }} + message: | + ${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} + pr_number: ${{ steps.cherrypick.outputs.number }} + GITHUB_TOKEN: ${{ secrets.CHERRYPICK_PAT }} - name: is autoMerging enabled for Auto CherryPicked PRs ? if: ${{ always() && steps.cherrypick.outcome == 'success' && contains(github.event.pull_request.labels.*.name, 'AutoMerge_Cherry_Picked') }} - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: github-token: ${{ secrets.CHERRYPICK_PAT }} script: | @@ -81,10 +97,25 @@ jobs: labels: ["AutoMerge_Cherry_Picked"] }) - ## Failure Logging to issues and GChat Group + - name: Check if cherrypick pr is created + id: search_pr + if: always() + run: | + PR_TITLE="[${{ matrix.label }}] ${{ env.title }}" + API_URL="https://api.github.com/repos/${{ github.repository }}/pulls?state=open" + PR_SEARCH_RESULT=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" "$API_URL" | jq --arg title "$PR_TITLE" '.[] | select(.title == $title)') + if [ -n "$PR_SEARCH_RESULT" ]; then + echo "pr_found=true" >> $GITHUB_OUTPUT + echo "PR is Found with title $PR_TITLE" + else + echo "pr_found=false" >> $GITHUB_OUTPUT + echo "PR is not Found with title $PR_TITLE" + fi + + ## Failure Logging to issues - name: Create Github issue on cherrypick failure id: create-issue - if: ${{ always() && steps.cherrypick.outcome == 'failure' }} + if: ${{ always() && steps.search_pr.outputs.pr_found == 'false' && steps.cherrypick.outcome != 'success' && startsWith(matrix.label, '6.') && matrix.label != github.base_ref }} uses: dacbd/create-issue-action@main with: token: ${{ secrets.CHERRYPICK_PAT }} diff --git a/.github/workflows/auto_cherry_pick_merge.yaml b/.github/workflows/auto_cherry_pick_merge.yaml index 303b6caa037..e0cb8e48173 100644 --- a/.github/workflows/auto_cherry_pick_merge.yaml +++ b/.github/workflows/auto_cherry_pick_merge.yaml @@ -40,7 +40,7 @@ jobs: - name: Wait for other status checks to Pass id: waitforstatuschecks - uses: lewagon/wait-on-check-action@v1.3.1 + uses: lewagon/wait-on-check-action@v1.3.3 with: ref: ${{ github.head_ref }} repo-token: ${{ secrets.CHERRYPICK_PAT }} @@ -53,7 +53,7 @@ jobs: uses: omkarkhatavkar/wait-for-status-checks@main with: ref: ${{ github.head_ref }} - context: 'Robottelo Runner' + context: 'Robottelo-Runner' wait-interval: 60 count: 100 @@ -67,7 +67,7 @@ jobs: - id: automerge name: Auto merge of cherry-picked PRs. - uses: "pascalgn/automerge-action@v0.15.6" + uses: "pascalgn/automerge-action@v0.16.2" env: GITHUB_TOKEN: "${{ secrets.CHERRYPICK_PAT }}" MERGE_LABELS: "AutoMerge_Cherry_Picked, Auto_Cherry_Picked" diff --git a/.github/workflows/dependency_merge.yml b/.github/workflows/dependency_merge.yml index 7a85be38583..22ec56e6d17 100644 --- a/.github/workflows/dependency_merge.yml +++ b/.github/workflows/dependency_merge.yml @@ -1,46 +1,32 @@ -name: Dependabot Auto Merge -on: pull_request - -permissions: - pull-requests: write +name: Dependabot Auto Merge - ZStream +on: + pull_request_target: + branches-ignore: + - master jobs: dependabot: name: dependabot-auto-merge runs-on: ubuntu-latest - if: github.event.pull_request.user.login == 'dependabot[bot]' - steps: - - name: Dependabot metadata - id: metadata - uses: dependabot/fetch-metadata@v1 - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 + if: | + contains(github.event.pull_request.labels.*.name, 'dependencies') - - name: Find the tests for the dependency requirement - id: yaml - uses: mikefarah/yq@master + steps: + - id: find-prt-comment + name: Find the prt comment + uses: peter-evans/find-comment@v2 with: - cmd: yq e ".${{ steps.metadata.outputs.dependency-names }}" ./.github/dependency_tests.yaml + issue-number: ${{ github.event.number }} + body-includes: "trigger: test-robottelo" + direction: last - - name: Add the PRT Comment - if: steps.yaml.outputs.result != 'null' - uses: peter-evans/create-or-update-comment@v3 - with: - issue-number: ${{ github.event.pull_request.number }} - body: | - trigger: test-robottelo\r - pytest: ${{ steps.yaml.outputs.result }} - name: Wait for PRT checks to get initiated - if: steps.yaml.outputs.result != 'null' + if: steps.find-prt-comment.outputs.comment-body != '' run: | echo "Waiting for ~ 10 mins, PRT to be initiated." && sleep 600 + - name: Fetch and Verify the PRT status - if: steps.yaml.outputs.result != 'null' + if: steps.find-prt-comment.outputs.comment-body != '' id: outcome uses: omkarkhatavkar/wait-for-status-checks@main with: @@ -51,19 +37,19 @@ jobs: - name: Wait for other status checks to Pass id: waitforstatuschecks - uses: lewagon/wait-on-check-action@v1.3.1 + uses: lewagon/wait-on-check-action@v1.3.3 with: ref: ${{ github.head_ref }} - repo-token: ${{ secrets.CHERRYPICK_PAT }} + repo-token: ${{ secrets.GITHUB_TOKEN }} wait-interval: 60 running-workflow-name: 'dependabot-auto-merge' allowed-conclusions: success,skipped - id: automerge name: Auto merge of dependabot PRs. - uses: "pascalgn/automerge-action@v0.15.6" + uses: "pascalgn/automerge-action@v0.16.2" env: - GITHUB_TOKEN: "${{ secrets.CHERRYPICK_PAT }}" + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" MERGE_LABELS: "dependencies" MERGE_METHOD: "squash" MERGE_RETRIES: 5 diff --git a/.github/workflows/dispatch_release.yml b/.github/workflows/dispatch_release.yml deleted file mode 100644 index dc57c0bc877..00000000000 --- a/.github/workflows/dispatch_release.yml +++ /dev/null @@ -1,31 +0,0 @@ -### The auto release workflow triggered through dispatch request from CI -name: auto-release - -# Run on workflow dispatch from CI -on: - workflow_dispatch: - inputs: - tag_name: - type: string - description: Name of the tag - -jobs: - auto-tag-and-release: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Git User setup - run: "git config --local user.email Satellite-QE.satqe.com && git config --local user.name Satellite-QE" - - - name: Tag latest commit - run: "git tag -a ${{ github.event.inputs.tag_name }} -m 'Tagged By SatelliteQE Automation User'" - - - name: Push the tag to the upstream - run: "git push ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git --tags" - - - name: create a new release from the tag - env: - credentials: ${{ secrets.GH_TOKEN }} - run: "curl -L -X POST -H \"Authorization: Bearer ${{ secrets.SATQE_GH_TOKEN }}\" ${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases -d '{\"tag_name\": \"${{ github.event.inputs.tag_name }}\", \"target_commitish\":\"master\", \"name\":\"${{ github.event.inputs.tag_name }}\", \"draft\":false, \"prerelease\":true, \"generate_release_notes\": true}'" diff --git a/.github/workflows/merge_to_master.yml b/.github/workflows/merge_to_master.yml deleted file mode 100644 index 915e70aef0b..00000000000 --- a/.github/workflows/merge_to_master.yml +++ /dev/null @@ -1,102 +0,0 @@ -# CI stages to execute against master branch on PR merge -name: update_robottelo_image - -on: - push: - branches: - - master - -env: - PYCURL_SSL_LIBRARY: openssl - ROBOTTELO_BUGZILLA__API_KEY: ${{ secrets.BUGZILLA_KEY }} - -jobs: - codechecks: - name: Code Quality - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.9', '3.10', '3.11'] - steps: - - name: Checkout Robottelo - uses: actions/checkout@v3 - - - name: Set Up Python-${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Dependencies - run: | - sudo apt-get update -y - sudo apt-get install -y libgnutls28-dev libcurl4-openssl-dev libssl-dev - wget https://raw.githubusercontent.com/SatelliteQE/broker/master/broker_settings.yaml.example - # link vs compile time ssl implementations can break the environment when installing requirements - # Uninstall pycurl - its likely not installed, but in case the ubuntu-latest packages change - # Then compile and install it with PYCURL_SSL_LIBRARY set to openssl - pip install -U pip - pip uninstall -y pycurl - pip install --compile --no-cache-dir pycurl - pip install -U --no-cache-dir -r requirements.txt -r requirements-optional.txt - for conffile in conf/*.yaml.template; do mv -- "$conffile" "${conffile%.yaml.template}.yaml"; done - cp broker_settings.yaml.example broker_settings.yaml - cp .env.example .env - - - name: Pre Commit Checks - uses: pre-commit/action@v3.0.0 - - - name: Collect Tests - run: | - pytest --collect-only --disable-pytest-warnings tests/foreman/ tests/robottelo/ - pytest --collect-only --disable-pytest-warnings -m pre_upgrade tests/upgrades/ - pytest --collect-only --disable-pytest-warnings -m post_upgrade tests/upgrades/ - - - name: Make Docs - run: | - make test-docstrings - make docs - - - name: Analysis (git diff) - if: failure() - run: git diff - - - robottelo_container: - needs: codechecks - name: Update Robottelo container image on Quay. - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Get image tag - id: image_tag - run: | - echo -n ::set-output name=IMAGE_TAG:: - TAG="${GITHUB_REF##*/}" - if [ "${TAG}" == "master" ]; then - TAG="latest" - fi - echo "${TAG}" - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Login to Quay Container Registry - uses: docker/login-action@v2 - with: - registry: ${{ secrets.QUAY_SERVER }} - username: ${{ secrets.QUAY_USERNAME }} - password: ${{ secrets.QUAY_PASSWORD }} - - - name: Build and push image to Quay - uses: docker/build-push-action@v4 - with: - context: . - file: ./Dockerfile - push: true - tags: ${{ secrets.QUAY_SERVER }}/${{ secrets.QUAY_NAMESPACE }}/robottelo:${{ steps.image_tag.outputs.IMAGE_TAG }} diff --git a/.github/workflows/prt_labels.yml b/.github/workflows/prt_labels.yml new file mode 100644 index 00000000000..52dd68589b3 --- /dev/null +++ b/.github/workflows/prt_labels.yml @@ -0,0 +1,49 @@ +name: Remove the PRT label, for the new commit + +on: + pull_request_target: + types: ["synchronize"] + +jobs: + prt_labels_remover: + name: remove the PRT label when amendments or new commits added to PR + runs-on: ubuntu-latest + if: "(contains(github.event.pull_request.labels.*.name, 'PRT-Passed') || contains(github.event.pull_request.labels.*.name, 'PRT-Failed'))" + steps: + - name: Avoid the race condition as PRT result will be cleaned + run: | + echo "Avoiding the race condition if prt result will be cleaned" && sleep 60 + + - name: Fetch the PRT status + id: prt + uses: omkarkhatavkar/wait-for-status-checks@main + with: + ref: ${{ github.head_ref }} + context: 'Robottelo-Runner' + wait-interval: 2 + count: 5 + + - name: remove the PRT Passed/Failed label, for new commit + if: always() && ${{steps.prt.outputs.result}} == 'not_found' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.CHERRYPICK_PAT }} + script: | + const prNumber = '${{ github.event.number }}'; + const issue = await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + }); + const labelsToRemove = ['PRT-Failed', 'PRT-Passed']; + const labelsToRemoveFiltered = labelsToRemove.filter(label => issue.data.labels.some(({ name }) => name === label)); + if (labelsToRemoveFiltered.length > 0) { + await Promise.all(labelsToRemoveFiltered.map(async label => { + await github.rest.issues.removeLabel({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + name: label + }); + })); + } diff --git a/.github/workflows/prt_result.yml b/.github/workflows/prt_result.yml new file mode 100644 index 00000000000..93227fcf272 --- /dev/null +++ b/.github/workflows/prt_result.yml @@ -0,0 +1,84 @@ +### The prt result workflow triggered through dispatch request from CI +name: post-prt-result + +# Run on workflow dispatch from CI +on: + workflow_dispatch: + inputs: + pr_number: + type: string + description: pr number for PRT run + build_number: + type: string + description: build number for PRT run + pytest_result: + type: string + description: pytest summary result line + build_status: + type: string + description: status of jenkins build e.g. success, unstable or error + prt_comment: + type: string + description: prt pytest comment triggered the PRT checks + + +jobs: + post-the-prt-result: + runs-on: ubuntu-latest + + steps: + - name: Add last PRT result into the github comment + id: add-prt-comment + if: ${{ always() && github.event.inputs.pytest_result != '' }} + uses: thollander/actions-comment-pull-request@v2 + with: + message: | + **PRT Result** + ``` + Build Number: ${{ github.event.inputs.build_number }} + Build Status: ${{ github.event.inputs.build_status }} + PRT Comment: ${{ github.event.inputs.prt_comment }} + Test Result : ${{ github.event.inputs.pytest_result }} + ``` + pr_number: ${{ github.event.inputs.pr_number }} + GITHUB_TOKEN: ${{ secrets.CHERRYPICK_PAT }} + + - name: Add the PRT passed/failed labels + id: prt-status + if: ${{ always() && github.event.inputs.build_status != '' }} + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.CHERRYPICK_PAT }} + script: | + const prNumber = ${{ github.event.inputs.pr_number }}; + const buildStatus = "${{ github.event.inputs.build_status }}"; + const labelToAdd = buildStatus === "SUCCESS" ? "PRT-Passed" : "PRT-Failed"; + github.rest.issues.addLabels({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [labelToAdd] + }); + - name: Remove failed label on test pass or vice-versa + if: ${{ always() && github.event.inputs.build_status != '' }} + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.CHERRYPICK_PAT }} + script: | + const prNumber = ${{ github.event.inputs.pr_number }}; + const issue = await github.rest.issues.get({ + owner: context.issue.owner, + repo: context.issue.repo, + issue_number: prNumber, + }); + const buildStatus = "${{ github.event.inputs.build_status }}"; + const labelToRemove = buildStatus === "SUCCESS" ? "PRT-Failed" : "PRT-Passed"; + const labelExists = issue.data.labels.some(({ name }) => name === labelToRemove); + if (labelExists) { + github.rest.issues.removeLabel({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + name: [labelToRemove] + }); + } diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 972c1960983..3580cf23a3f 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -15,13 +15,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11', '3.12'] steps: - name: Checkout Robottelo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set Up Python-${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -46,18 +46,25 @@ jobs: - name: Collect Tests run: | + # To skip vault login in pull request checks + export VAULT_SECRET_ID_FOR_DYNACONF=somesecret pytest --collect-only --disable-pytest-warnings tests/foreman/ tests/robottelo/ pytest --collect-only --disable-pytest-warnings -m pre_upgrade tests/upgrades/ pytest --collect-only --disable-pytest-warnings -m post_upgrade tests/upgrades/ - name: Collect Tests with xdist run: | + # To skip vault login in pull request checks + export VAULT_SECRET_ID_FOR_DYNACONF=somesecret pytest --collect-only --setup-plan --disable-pytest-warnings -n 2 tests/foreman/ tests/robottelo/ pytest --collect-only --setup-plan --disable-pytest-warnings -n 2 -m pre_upgrade tests/upgrades/ pytest --collect-only --setup-plan --disable-pytest-warnings -n 2 -m post_upgrade tests/upgrades/ - name: Run Robottelo's Tests - run: pytest -sv tests/robottelo/ + run: | + # To skip vault login in pull request checks + export VAULT_SECRET_ID_FOR_DYNACONF=somesecret + pytest -sv tests/robottelo/ - name: Make Docs run: | diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 85daa8f4292..2c5edda7dda 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -16,7 +16,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-issue-stale: 90 diff --git a/.github/workflows/update_robottelo_image.yml b/.github/workflows/update_robottelo_image.yml new file mode 100644 index 00000000000..945f76b15dc --- /dev/null +++ b/.github/workflows/update_robottelo_image.yml @@ -0,0 +1,44 @@ +# Update robottelo image on quay. +name: update_robottelo_image + +on: + push: + branches: + - master + - 6.*.z + workflow_dispatch: + +jobs: + robottelo_container: + name: Update Robottelo container image on Quay. + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Get image tag + id: image_tag + run: | + TAG="${GITHUB_REF##*/}" + TAG=${TAG/master/latest} + echo "IMAGE_TAG=$TAG" >> $GITHUB_OUTPUT + + - name: Build Robottelo image + id: build-image + uses: redhat-actions/buildah-build@v2 + with: + image: robottelo + tags: ${{ steps.image_tag.outputs.IMAGE_TAG }} + containerfiles: | + ./Dockerfile + + - name: Push Robottelo image to quay.io + id: push-to-quay + uses: redhat-actions/push-to-registry@v2 + with: + image: robottelo + tags: ${{ steps.image_tag.outputs.IMAGE_TAG }} + registry: ${{ secrets.QUAY_SERVER }}/${{ secrets.QUAY_NAMESPACE }} + username: ${{ secrets.QUAY_USERNAME }} + password: ${{ secrets.QUAY_PASSWORD }} diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index e19f5b02a4d..a08485c8f49 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -14,13 +14,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.9] + python-version: [3.12] steps: - name: Checkout Robottelo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set Up Python-${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.gitignore b/.gitignore index 07bd9ff5076..37a548e7d4f 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,7 @@ ven*/ # pytest-fixture-tools artifacts artifacts/ + +# pyvcloud artifacts +# workaround till https://github.com/vmware/pyvcloud/pull/766 is merged and released +**vcd_sdk.log diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7626efe6a69..c86df504763 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,31 +1,22 @@ # configuration for pre-commit git hooks repos: -- repo: https://github.com/asottile/reorder_python_imports - rev: v3.9.0 - hooks: - - id: reorder-python-imports - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: - id: trailing-whitespace exclude: tests/foreman/data/ - - id: end-of-file-fixer - id: check-yaml - id: debug-statements -- repo: https://github.com/asottile/pyupgrade - rev: v3.3.0 - hooks: - - id: pyupgrade - args: [--py38-plus] - repo: https://github.com/psf/black rev: 22.10.0 hooks: - id: black -- repo: https://github.com/pycqa/flake8 - rev: 6.0.0 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.277 hooks: - - id: flake8 + - id: ruff + args: [--fix, --exit-non-zero-on-fix] - repo: local hooks: - id: fix-uuids diff --git a/Dockerfile b/Dockerfile index de7b12ac71d..2b88a306431 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM fedora +FROM fedora:38 MAINTAINER https://github.com/SatelliteQE RUN dnf install -y gcc git make cmake libffi-devel openssl-devel python3-devel \ @@ -6,7 +6,6 @@ RUN dnf install -y gcc git make cmake libffi-devel openssl-devel python3-devel \ COPY / /robottelo/ WORKDIR /robottelo -RUN curl https://raw.githubusercontent.com/SatelliteQE/broker/master/broker_settings.yaml.example -o broker_settings.yaml ENV PYCURL_SSL_LIBRARY=openssl RUN pip install -r requirements.txt diff --git a/Makefile b/Makefile index 2f73c60d3b4..ff0920c70ee 100644 --- a/Makefile +++ b/Makefile @@ -73,6 +73,7 @@ test-docstrings: uuid-check testimony $(TESTIMONY_OPTIONS) validate tests/foreman/ui testimony $(TESTIMONY_OPTIONS) validate tests/foreman/virtwho testimony $(TESTIMONY_OPTIONS) validate tests/foreman/maintain + testimony $(TESTIMONY_OPTIONS) validate tests/upgrades test-robottelo: $(info "Running robottelo framework unit tests...") diff --git a/conf/broker.yaml.template b/conf/broker.yaml.template index 5f5067c321a..0c039df13bb 100644 --- a/conf/broker.yaml.template +++ b/conf/broker.yaml.template @@ -1,7 +1,5 @@ BROKER: - # The path where your broker settings and inventory are located - # If you leave it blank, the default is the output of `broker --version` - BROKER_DIRECTORY: + # Broker has its own config which you can find by running `broker --version` HOST_WORKFLOWS: POWER_CONTROL: vm-power-operation EXTEND: extend-vm diff --git a/conf/capsule.yaml.template b/conf/capsule.yaml.template index cde2ecf41b5..b618bf57ea9 100644 --- a/conf/capsule.yaml.template +++ b/conf/capsule.yaml.template @@ -1,7 +1,6 @@ CAPSULE: - # Capsule name to be used in the CapsuleVirtualMachine class (will be deprecated) - SATELLITE_VERSION_TAG: "@jinja {{this.robottelo.satellite_version | replace('.', '')}}" - INSTANCE_NAME: "@format qe-sat{this[capsule].satellite_version_tag}-rhel7-tierX-capsule" + # Capsule hostname for N-minus testing + HOSTNAME: VERSION: # The full release version (6.9.2) RELEASE: # populate with capsule version @@ -13,6 +12,8 @@ CAPSULE: # The base os rhel version where the capsule installed # RHEL_VERSION: # The Ansible Tower workflow used to deploy a capsule - DEPLOY_WORKFLOW: deploy-sat-capsule + DEPLOY_WORKFLOWS: + PRODUCT: deploy-capsule # workflow to deploy OS with product running on top of it + OS: deploy-rhel # workflow to deploy OS that is ready to run the product # Dictionary of arguments which should be passed along to the deploy workflow - # DEPLOY_ARGUMENTS: + DEPLOY_ARGUMENTS: diff --git a/conf/docker.yaml.template b/conf/docker.yaml.template index 81fc6a84dd0..4176b38ee8a 100644 --- a/conf/docker.yaml.template +++ b/conf/docker.yaml.template @@ -9,3 +9,23 @@ DOCKER: PRIVATE_REGISTRY_USERNAME: # Private docker registry password PRIVATE_REGISTRY_PASSWORD: + # Image Pass Registry + IMAGE_REGISTRY: + # image repository URL + URL: + # Pull a non-namespace image using the image pass registry proxy + NON_NAMESPACE: + # Proxy for the non-namespace image + PROXY: + # Username for the non-namespace image pass registry proxy + USERNAME: + # Password for the non-namespace image pass registry proxy + PASSWORD: + # Pull a namespace image using the image pass registry proxy + NAMESPACE: + # proxy for the namespace image + PROXY: + # Username for the namespace image pass registry proxy + USERNAME: + # Password for the namespace image pass registry proxy + PASSWORD: diff --git a/conf/dynaconf_hooks.py b/conf/dynaconf_hooks.py index 116207aa165..85baf7d52cb 100644 --- a/conf/dynaconf_hooks.py +++ b/conf/dynaconf_hooks.py @@ -1,5 +1,9 @@ +from inspect import getmembers, isfunction import json from pathlib import Path +import sys + +from box import Box from robottelo.logging import logger from robottelo.utils.ohsnap import dogfood_repository @@ -22,6 +26,7 @@ def post(settings): ) data = get_repos_config(settings) write_cache(settings_cache_path, data) + config_migrations(settings, data) data['dynaconf_merge'] = True return data @@ -33,7 +38,32 @@ def write_cache(path, data): def read_cache(path): logger.info(f'Using settings cache file: {path}') - return json.loads(path.read_text()) + return Box(json.loads(path.read_text())) + + +def config_migrations(settings, data): + """Run config migrations + + Fetch the config migrations from the conf/migrations.py file and run them. + + :param settings: dynaconf settings object + :type settings: LazySettings + :param data: settings data to be merged with the rest of the settings + :type data: dict + """ + logger.info('Running config migration hooks') + sys.path.append(str(Path(__file__).parent)) + from conf import migrations + + migration_functions = [ + mf for mf in getmembers(migrations, isfunction) if mf[0].startswith('migration_') + ] + # migration_functions is a sorted list of tuples (name, function) + for name, func in migration_functions: + logger.debug(f'Running {name}') + func(settings, data) + logger.debug(f'Finished running {name}') + logger.info('Finished running config migration hooks') def get_repos_config(settings): @@ -46,7 +76,7 @@ def get_repos_config(settings): 'The Ohsnap URL is invalid! Post-configuration hooks will not run. ' 'Default configuration will be used.' ) - return {'REPOS': data} + return Box({'REPOS': data}) def get_ohsnap_repos(settings): @@ -55,9 +85,9 @@ def get_ohsnap_repos(settings): settings, repo='capsule', product='capsule', - release=settings.server.version.release, - os_release=settings.server.version.rhel_version, - snap=settings.server.version.snap, + release=settings.capsule.version.release, + os_release=settings.capsule.version.rhel_version, + snap=settings.capsule.version.snap, ) data['SATELLITE_REPO'] = get_ohsnap_repo_url( @@ -127,7 +157,7 @@ def get_dogfood_satclient_repos(settings): def get_ohsnap_repo_url(settings, repo, product=None, release=None, os_release=None, snap=''): - repourl = dogfood_repository( + return dogfood_repository( settings.ohsnap, repo=repo, product=product, @@ -135,4 +165,3 @@ def get_ohsnap_repo_url(settings, repo, product=None, release=None, os_release=N os_release=os_release, snap=snap, ).baseurl - return repourl diff --git a/conf/flavors.yaml.template b/conf/flavors.yaml.template new file mode 100644 index 00000000000..73695663cc8 --- /dev/null +++ b/conf/flavors.yaml.template @@ -0,0 +1,9 @@ +FLAVORS: + # 6 CPU, 24 GB RAM, 100 GB disk + DEFAULT: satqe-ssd.standard.std + # 16 CPU, 32 GB RAM, 160 GB disk + LARGE: satqe-ssd.standard.xxxl + # 6 CPU, 24 GB RAM, 500 GB disk + UPGRADE: satqe-ssd.upgrade.std + # 8 CPU, 32 GB RAM, 500 GB disk + CUSTOM_DB: satqe-ssd.customerdb.std diff --git a/conf/infoblox.yaml.template b/conf/infoblox.yaml.template new file mode 100644 index 00000000000..69398945eb0 --- /dev/null +++ b/conf/infoblox.yaml.template @@ -0,0 +1,18 @@ +INFOBLOX: + HOSTNAME: infoblox.example.com + # username: Login for Infoblox instance + USERNAME: + # password: Password for Infoblox instance + PASSWORD: + #Domain + DOMAIN: + #Network Address + NETWORK: + #Network Prefix + NETWORK_PREFIX: + #Network Netmask + NETMASK: + #Starting range of IP + START_RANGE: + #Ending range of IP + END_RANGE: diff --git a/conf/migrations.py b/conf/migrations.py new file mode 100644 index 00000000000..b263fa352d6 --- /dev/null +++ b/conf/migrations.py @@ -0,0 +1,37 @@ +"""Robottelo configuration migrations + +This module contains functions that are run after the configuration is loaded. Each function +should be named `migration__` and accept two parameters: `settings` and +`data`. `settings` is a `dynaconf` `Box` object containing the configuration. `data` is a +`dict` that can be used to store settings that will be merged with the rest of the settings. +The functions should not return anything. +""" + +from packaging.version import Version + +from robottelo.logging import logger + + +def migration_231129_deploy_workflow(settings, data): + """Migrates {server,capsule}.deploy_workflow to {server,capsule}.deploy_workflows""" + for product_type in ['server', 'capsule']: + # If the product_type has a deploy_workflow and it is a string, and + # it does not have a deploy_workflows set + if ( + settings[product_type].get('deploy_workflow') + and isinstance(settings[product_type].deploy_workflow, str) + and not settings[product_type].get('deploy_workflows') + ): + sat_rhel_version = settings[product_type].version.rhel_version + data[product_type] = {} + # Set the deploy_workflows to a dict with product and os keys + # Get the OS workflow from the content_host config + data[product_type].deploy_workflows = { + 'product': settings[product_type].deploy_workflow, + 'os': settings.content_host[ + f'rhel{Version(str(sat_rhel_version)).major}' + ].vm.workflow, + } + logger.info( + f'Migrated {product_type}.DEPLOY_WORKFLOW to {product_type}.DEPLOY_WORKFLOWS' + ) diff --git a/conf/oscap.yaml.template b/conf/oscap.yaml.template index bfeec7103fb..f23ec46ffb9 100644 --- a/conf/oscap.yaml.template +++ b/conf/oscap.yaml.template @@ -1,2 +1,4 @@ OSCAP: CONTENT_PATH: /usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml + # see: robottelo/constants/__init__.py OSCAP_PROFILE + PROFILE: security7 diff --git a/conf/osp.yaml.template b/conf/osp.yaml.template index 836e6fe9657..be8adc427d2 100644 --- a/conf/osp.yaml.template +++ b/conf/osp.yaml.template @@ -1,16 +1,33 @@ OSP: # Openstack to be added as a compute resource + HOSTNAME: + OSP16: hostname.example.com + OSP17: hostname.example.com + # Openstack authentication url + AUTH_URL: + OSP16: '@format http://{this.osp.hostname.osp16}:5000' + OSP17: '@format http://{this.osp.hostname.osp17}:5000' # Openstack api url For eg: https://hostname:port/v3/auth/tokens - HOSTNAME: https://hostname:port/v3/auth/tokens + API_URL: + OSP16: '@format {this.osp.auth_url.osp16}/v3/auth/tokens' + OSP17: '@format {this.osp.auth_url.osp17}/v3/auth/tokens' # Login for Openstack - USERNAME: psi-satellite-jenkins + USERNAME: # Password for Openstack PASSWORD: + # Name of Security group + SECURITY_GROUP: # Openstack tenant to be used - TENANT: satellite-jenkins - # Name of group for provisioning - SECURITY_GROUP: satellite5 + TENANT: # Name of VM to power On/Off & delete - VM_NAME: automation-robottelo-vm + VM_NAME: # ID of the domain for the project PROJECT_DOMAIN_ID: + # Openstack network name + NETWORK_NAME: + # Openstack rhel8 image name + RHEL8_IMAGE: + # Openstack template's configuration flavour name + FLAVOR_NAME: + # Openstack ssh-key configuration + SSHKEY: diff --git a/conf/server.yaml.template b/conf/server.yaml.template index 5c08431532b..c5b844e6509 100644 --- a/conf/server.yaml.template +++ b/conf/server.yaml.template @@ -27,7 +27,9 @@ SERVER: # this setting determines if they will be automatically checked in AUTO_CHECKIN: False # The Ansible Tower workflow used to deploy a satellite - DEPLOY_WORKFLOW: "deploy-sat-jenkins" + DEPLOY_WORKFLOWS: + PRODUCT: deploy-satellite # workflow to deploy OS with product running on top of it + OS: deploy-rhel # workflow to deploy OS that is ready to run the product # Dictionary of arguments which should be passed along to the deploy workflow # DEPLOY_ARGUMENTS: # HTTP scheme when building the server URL @@ -48,6 +50,10 @@ SERVER: ADMIN_USERNAME: admin # Admin password when accessing API and UI ADMIN_PASSWORD: changeme + # Set to true to verify against the certificate given in REQUESTS_CA_BUNDLE + # Or specify path to certificate path or directory + # see: https://requests.readthedocs.io/en/latest/user/advanced/#ssl-cert-verification + VERIFY_CA: false SSH_CLIENT: # Specify port number for ssh client, Default: 22 diff --git a/conf/ui.yaml.template b/conf/ui.yaml.template index 750f715f7ed..2817b81120a 100644 --- a/conf/ui.yaml.template +++ b/conf/ui.yaml.template @@ -24,13 +24,15 @@ UI: WEBDRIVER: chrome # Binary location for selected wedriver (not needed if using saucelabs) WEBDRIVER_BINARY: /usr/bin/chromedriver - + RECORD_VIDEO: false + GRID_URL: http://infra-grid.example.com:4444 # Web_Kaifuku Settings (checkout https://github.com/RonnyPfannschmidt/webdriver_kaifuku) WEBKAIFUKU: webdriver: chrome/remote webdriver_options: command_executor: http://localhost:4444/wd/hub desired_capabilities: + se:recordVideo: '@jinja {{ this.ui.record_video }}' browserName: chrome chromeOptions: args: diff --git a/conf/vmware.yaml.template b/conf/vmware.yaml.template index b861b1aed80..55a8b7f5e6d 100644 --- a/conf/vmware.yaml.template +++ b/conf/vmware.yaml.template @@ -1,26 +1,36 @@ VMWARE: - # Vmware to be added as a compute resource - # vmware vcenter URL, e.g. example.com - VCENTER: - # Login for vmware + # VMware to be added as a compute resource + # VCENTER: vmware vcenter URL + VCENTER7: + HOSTNAME: + HYPERVISOR: + # mac_address: Mac address of the vm_name + MAC_ADDRESS: + VCENTER8: + HOSTNAME: + HYPERVISOR: + # mac_address: Mac address of the vm_name + MAC_ADDRESS: + # USERNAME: Login for vmware USERNAME: - # Password for vmware + # PASSWORD: Password for vmware PASSWORD: - # vmware datacenter + # DATACENTER: vmware datacenter DATACENTER: - # Name of VM that should be used + # CLUSTER: vmware cluster + CLUSTER: + # DATASTORE: vmware datastore + DATASTORE: + # VM_NAME: Name of VM to power On/Off & delete VM_NAME: - - # Vmware Compute resource image data - # Operating system of the image + # VMware Compute resource image data + # IMAGE_OS: Operating system of the image IMAGE_OS: - # Architecture of the image + # IMAGE_ARCH: Architecture of the image IMAGE_ARCH: - # Login to the image + # IMAGE_USERNAME: Login to the image IMAGE_USERNAME: - # Password of that user + # IMAGE_PASSWORD: Password to the image IMAGE_PASSWORD: - # Image name on the external provider + # IMAGE_NAME: Image name on the external provider IMAGE_NAME: - # Interface used for some tests; not required to work with provisioning, not required to be in VLAN - INTERFACE: diff --git a/conftest.py b/conftest.py index 0ade0cb33de..f54b9cce479 100644 --- a/conftest.py +++ b/conftest.py @@ -3,6 +3,7 @@ pytest_plugins = [ # Plugins + 'pytest_plugins.auto_vault', 'pytest_plugins.disable_rp_params', 'pytest_plugins.external_logging', 'pytest_plugins.fixture_markers', @@ -16,9 +17,11 @@ 'pytest_plugins.settings_skip', 'pytest_plugins.rerun_rp.rerun_rp', 'pytest_plugins.fspath_plugins', - 'pytest_plugins.fixture_collection', 'pytest_plugins.factory_collection', 'pytest_plugins.requirements.update_requirements', + 'pytest_plugins.sanity_plugin', + 'pytest_plugins.video_cleanup', + 'pytest_plugins.capsule_n-minus', # Fixtures 'pytest_fixtures.core.broker', 'pytest_fixtures.core.sat_cap_factory', @@ -36,6 +39,7 @@ 'pytest_fixtures.component.computeprofile', 'pytest_fixtures.component.contentview', 'pytest_fixtures.component.domain', + 'pytest_fixtures.component.discovery', 'pytest_fixtures.component.host', 'pytest_fixtures.component.hostgroup', 'pytest_fixtures.component.http_proxy', @@ -50,6 +54,8 @@ 'pytest_fixtures.component.provision_gce', 'pytest_fixtures.component.provision_libvirt', 'pytest_fixtures.component.provision_pxe', + 'pytest_fixtures.component.provision_capsule_pxe', + 'pytest_fixtures.component.provision_vmware', 'pytest_fixtures.component.provisioning_template', 'pytest_fixtures.component.puppet', 'pytest_fixtures.component.repository', @@ -63,6 +69,7 @@ 'pytest_fixtures.component.templatesync', 'pytest_fixtures.component.user', 'pytest_fixtures.component.user_role', + 'pytest_fixtures.component.virtwho_config', # upgrade 'pytest_plugins.upgrade.scenario_workers', ] diff --git a/docs/conf.py b/docs/conf.py index c30e6c820b2..b143f7ac6fe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,9 +4,13 @@ http://sphinx-doc.org/config.html """ +import builtins import os import sys +# Set the __sphinx_build__ variable to True. This is used to skip config generation +builtins.__sphinx_build__ = True + def skip_data(app, what, name, obj, skip, options): """Skip double generating docs for robottelo.utils.decorators.func_shared.shared""" diff --git a/pyproject.toml b/pyproject.toml index 2a45bb81ace..72a8e2b8f3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,48 @@ exclude = ''' )/ ''' +[tool.ruff] +target-version = "py311" +fixable = ["ALL"] + +select = [ + "B", # bugbear + # "C90", # mccabe + "E", # pycodestyle + "F", # flake8 + "I", # isort + # "Q", # flake8-quotes + "PT", # flake8-pytest + "RET", # flake8-return + "SIM", # flake8-simplify + "UP", # pyupgrade + "W", # pycodestyle +] + +ignore = [ + "B019", # lru_cache can lead to memory leaks - acceptable tradeoff + "E501", # line too long - handled by black + "PT004", # pytest underscrore prefix for non-return fixtures + "PT005", # pytest no underscrore prefix for return fixtures +] + +[tool.ruff.isort] +force-sort-within-sections = true +known-first-party = [ + "robottelo", +] +combine-as-imports = true + +[tool.ruff.flake8-pytest-style] +fixture-parentheses = false +mark-parentheses = false + +[tool.ruff.flake8-quotes] +inline-quotes = "single" + +[tool.ruff.mccabe] +max-complexity = 20 + [tool.pytest.ini_options] junit_logging = 'all' addopts = '--show-capture=no' diff --git a/pytest_fixtures/component/acs.py b/pytest_fixtures/component/acs.py index df222b90553..065e64d935f 100644 --- a/pytest_fixtures/component/acs.py +++ b/pytest_fixtures/component/acs.py @@ -1,8 +1,7 @@ # Alternate Content Sources fixtures import pytest -from robottelo.constants.repos import CUSTOM_FILE_REPO -from robottelo.constants.repos import CUSTOM_RPM_REPO +from robottelo.constants.repos import CUSTOM_FILE_REPO, CUSTOM_RPM_REPO @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/activationkey.py b/pytest_fixtures/component/activationkey.py index 05c479c65b7..0be96945b99 100644 --- a/pytest_fixtures/component/activationkey.py +++ b/pytest_fixtures/component/activationkey.py @@ -6,27 +6,30 @@ @pytest.fixture(scope='module') -def module_activation_key(module_org, module_target_sat): - return module_target_sat.api.ActivationKey(organization=module_org).create() +def module_activation_key(module_entitlement_manifest_org, module_target_sat): + """Create activation key using default CV and library environment.""" + return module_target_sat.api.ActivationKey( + content_view=module_entitlement_manifest_org.default_content_view.id, + environment=module_entitlement_manifest_org.library.id, + organization=module_entitlement_manifest_org, + ).create() @pytest.fixture(scope='module') def module_ak(module_lce, module_org, module_target_sat): - ak = module_target_sat.api.ActivationKey( + return module_target_sat.api.ActivationKey( environment=module_lce, organization=module_org, ).create() - return ak @pytest.fixture(scope='module') def module_ak_with_cv(module_lce, module_org, module_promoted_cv, module_target_sat): - ak = module_target_sat.api.ActivationKey( + return module_target_sat.api.ActivationKey( content_view=module_promoted_cv, environment=module_lce, organization=module_org, ).create() - return ak @pytest.fixture(scope='module') @@ -39,7 +42,7 @@ def module_ak_with_synced_repo(module_org, module_target_sat): {'product-id': new_product['id'], 'content-type': 'yum'} ) Repository.synchronize({'id': new_repo['id']}) - ak = module_target_sat.cli_factory.make_activation_key( + return module_target_sat.cli_factory.make_activation_key( { 'lifecycle-environment': 'Library', 'content-view': 'Default Organization View', @@ -47,4 +50,3 @@ def module_ak_with_synced_repo(module_org, module_target_sat): 'auto-attach': False, } ) - return ak diff --git a/pytest_fixtures/component/architecture.py b/pytest_fixtures/component/architecture.py index 96d758f3d64..5ff90f3cb37 100644 --- a/pytest_fixtures/component/architecture.py +++ b/pytest_fixtures/component/architecture.py @@ -6,22 +6,20 @@ @pytest.fixture(scope='session') def default_architecture(session_target_sat): - arch = ( + return ( session_target_sat.api.Architecture() .search(query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0] .read() ) - return arch @pytest.fixture(scope='session') def session_puppet_default_architecture(session_puppet_enabled_sat): - arch = ( + return ( session_puppet_enabled_sat.api.Architecture() .search(query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0] .read() ) - return arch @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/computeprofile.py b/pytest_fixtures/component/computeprofile.py index 859dc64abd0..3f2cce6043c 100644 --- a/pytest_fixtures/component/computeprofile.py +++ b/pytest_fixtures/component/computeprofile.py @@ -1,6 +1,6 @@ # Compute Profile Fixtures -import pytest from nailgun import entities +import pytest @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/contentview.py b/pytest_fixtures/component/contentview.py index e82f69d04b5..79d66014949 100644 --- a/pytest_fixtures/component/contentview.py +++ b/pytest_fixtures/component/contentview.py @@ -1,6 +1,6 @@ # Content View Fixtures -import pytest from nailgun.entity_mixins import call_entity_method_with_timeout +import pytest from robottelo.constants import DEFAULT_CV @@ -36,12 +36,11 @@ def module_ak_cv_lce(module_org, module_lce, module_published_cv, module_target_ content_view_version = module_published_cv.version[0] content_view_version.promote(data={'environment_ids': module_lce.id}) module_published_cv = module_published_cv.read() - module_ak_with_cv_lce = module_target_sat.api.ActivationKey( + return module_target_sat.api.ActivationKey( content_view=module_published_cv, environment=module_lce, organization=module_org, ).create() - return module_ak_with_cv_lce @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/discovery.py b/pytest_fixtures/component/discovery.py new file mode 100644 index 00000000000..c9d4375484a --- /dev/null +++ b/pytest_fixtures/component/discovery.py @@ -0,0 +1,42 @@ +from fauxfactory import gen_string +import pytest + + +@pytest.fixture(scope='module') +def module_discovery_hostgroup(module_org, module_location, module_target_sat): + host = module_target_sat.api.Host(organization=module_org, location=module_location).create() + return module_target_sat.api.HostGroup( + organization=[module_org], + location=[module_location], + medium=host.medium, + root_pass=gen_string('alpha'), + operatingsystem=host.operatingsystem, + ptable=host.ptable, + domain=host.domain, + architecture=host.architecture, + ).create() + + +@pytest.fixture(scope='module') +def discovery_org(module_org, module_target_sat): + discovery_org = module_target_sat.update_setting('discovery_organization', module_org.name) + yield module_org + module_target_sat.update_setting('discovery_organization', discovery_org) + + +@pytest.fixture(scope='module') +def discovery_location(module_location, module_target_sat): + discovery_loc = module_target_sat.update_setting('discovery_location', module_location.name) + yield module_location + module_target_sat.update_setting('discovery_location', discovery_loc) + + +@pytest.fixture(scope='module') +def provisioning_env(module_target_sat, discovery_org, discovery_location): + # Build PXE default template to get default PXE file + module_target_sat.cli.ProvisioningTemplate().build_pxe_default() + return module_target_sat.api_factory.configure_provisioning( + org=discovery_org, + loc=discovery_location, + os=f'Redhat {module_target_sat.cli_factory.RHELRepository().repo_data["version"]}', + ) diff --git a/pytest_fixtures/component/domain.py b/pytest_fixtures/component/domain.py index 5cc524ff3b5..57a1c265b30 100644 --- a/pytest_fixtures/component/domain.py +++ b/pytest_fixtures/component/domain.py @@ -1,15 +1,15 @@ # Domain Fixtures import pytest -from nailgun import entities @pytest.fixture(scope='session') def default_domain(session_target_sat, default_smart_proxy): domain_name = session_target_sat.hostname.partition('.')[-1] - dom = entities.Domain().search(query={'search': f'name={domain_name}'})[0] - dom.dns = default_smart_proxy - dom.update(['dns']) - return entities.Domain(id=dom.id).read() + dom = session_target_sat.api.Domain().search(query={'search': f'name={domain_name}'})[0] + if 'dns' in session_target_sat.get_features(): + dom.dns = default_smart_proxy + dom.update(['dns']) + return session_target_sat.api.Domain(id=dom.id).read() @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/host.py b/pytest_fixtures/component/host.py index 1fb55a33001..ece968e28e7 100644 --- a/pytest_fixtures/component/host.py +++ b/pytest_fixtures/component/host.py @@ -1,14 +1,9 @@ # Host Specific Fixtures -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import DEFAULT_CV, ENVIRONMENT, PRDS, REPOS, REPOSET @pytest.fixture @@ -26,9 +21,8 @@ def module_model(): return entities.Model().create() -@pytest.mark.skip_if_not_set('clients', 'fake_manifest') @pytest.fixture(scope="module") -def setup_rhst_repo(): +def setup_rhst_repo(module_target_sat): """Prepare Satellite tools repository for usage in specified organization""" org = entities.Organization().create() cv = entities.ContentView(organization=org).create() @@ -38,7 +32,7 @@ def setup_rhst_repo(): organization=org, ).create() repo_name = 'rhst7' - setup_org_for_a_rh_repo( + module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET[repo_name], diff --git a/pytest_fixtures/component/hostgroup.py b/pytest_fixtures/component/hostgroup.py index 43b2668b6e2..65be6196183 100644 --- a/pytest_fixtures/component/hostgroup.py +++ b/pytest_fixtures/component/hostgroup.py @@ -1,6 +1,6 @@ # Hostgroup Fixtures -import pytest from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.logging import logger diff --git a/pytest_fixtures/component/http_proxy.py b/pytest_fixtures/component/http_proxy.py index ee8efc9c16a..a98e7409699 100644 --- a/pytest_fixtures/component/http_proxy.py +++ b/pytest_fixtures/component/http_proxy.py @@ -1,9 +1,16 @@ import pytest from robottelo.config import settings +from robottelo.hosts import ProxyHost -@pytest.fixture(scope='function') +@pytest.fixture(scope='session') +def session_auth_proxy(session_target_sat): + """Instantiates authenticated HTTP proxy as a session-scoped fixture""" + return ProxyHost(settings.http_proxy.auth_proxy_url) + + +@pytest.fixture def setup_http_proxy(request, module_manifest_org, target_sat): """Create a new HTTP proxy and set related settings based on proxy""" http_proxy = target_sat.api_factory.make_http_proxy(module_manifest_org, request.param) diff --git a/pytest_fixtures/component/katello_agent.py b/pytest_fixtures/component/katello_agent.py index b25304cb8ae..3fafd4315d0 100644 --- a/pytest_fixtures/component/katello_agent.py +++ b/pytest_fixtures/component/katello_agent.py @@ -1,5 +1,5 @@ -import pytest from box import Box +import pytest from robottelo.config import settings from robottelo.utils.installer import InstallerCommand @@ -21,7 +21,7 @@ def sat_with_katello_agent(module_target_sat): InstallerCommand('foreman-proxy-content-enable-katello-agent true') ) assert result.status == 0 - yield module_target_sat + return module_target_sat @pytest.fixture @@ -33,7 +33,7 @@ def katello_agent_client_for_upgrade(sat_with_katello_agent, sat_upgrade_chost, ) sat_upgrade_chost.install_katello_agent() host_info = sat_with_katello_agent.cli.Host.info({'name': sat_upgrade_chost.hostname}) - yield Box( + return Box( { 'client': sat_upgrade_chost, 'host_info': host_info, @@ -48,8 +48,10 @@ def katello_agent_client(sat_with_katello_agent, rhel_contenthost): org = sat_with_katello_agent.api.Organization().create() client_repo = settings.repos['SATCLIENT_REPO'][f'RHEL{rhel_contenthost.os_version.major}'] sat_with_katello_agent.register_host_custom_repo( - org, rhel_contenthost, [client_repo, settings.repos.yum_1.url] + org, + rhel_contenthost, + [client_repo, settings.repos.yum_1.url], ) rhel_contenthost.install_katello_agent() host_info = sat_with_katello_agent.cli.Host.info({'name': rhel_contenthost.hostname}) - yield Box({'client': rhel_contenthost, 'host_info': host_info, 'sat': sat_with_katello_agent}) + return Box({'client': rhel_contenthost, 'host_info': host_info, 'sat': sat_with_katello_agent}) diff --git a/pytest_fixtures/component/katello_certs_check.py b/pytest_fixtures/component/katello_certs_check.py index 3dd8d71df83..1ee97acc565 100644 --- a/pytest_fixtures/component/katello_certs_check.py +++ b/pytest_fixtures/component/katello_certs_check.py @@ -1,30 +1,19 @@ # katello_certs_check Fixtures from pathlib import Path -import pytest -from broker import Broker from fauxfactory import gen_string +import pytest from robottelo.constants import CERT_DATA as cert_data from robottelo.hosts import Capsule -from robottelo.hosts import ContentHost @pytest.fixture -def certs_vm_setup(request): - """Create VM and register content host""" - target_cores = request.param.get('target_cores', 1) - target_memory = request.param.get('target_memory', '1GiB') - with Broker( - nick=request.param['nick'], - host_class=ContentHost, - target_cores=target_cores, - target_memory=target_memory, - ) as host: - cert_data['key_file_name'] = f'{host.hostname}/{host.hostname}.key' - cert_data['cert_file_name'] = f'{host.hostname}/{host.hostname}.crt' - host.custom_cert_generate(cert_data['capsule_hostname']) - yield cert_data, host +def certs_data(sat_ready_rhel): + cert_data['key_file_name'] = f'{sat_ready_rhel.hostname}/{sat_ready_rhel.hostname}.key' + cert_data['cert_file_name'] = f'{sat_ready_rhel.hostname}/{sat_ready_rhel.hostname}.crt' + sat_ready_rhel.custom_cert_generate(cert_data['capsule_hostname']) + return cert_data @pytest.fixture diff --git a/pytest_fixtures/component/lce.py b/pytest_fixtures/component/lce.py index 368c1329131..2a3f113e9c8 100644 --- a/pytest_fixtures/component/lce.py +++ b/pytest_fixtures/component/lce.py @@ -16,7 +16,7 @@ def module_lce(module_org, module_target_sat): return module_target_sat.api.LifecycleEnvironment(organization=module_org).create() -@pytest.fixture(scope='function') +@pytest.fixture def function_lce(function_org, target_sat): return target_sat.api.LifecycleEnvironment(organization=function_org).create() @@ -31,7 +31,7 @@ def module_lce_library(module_org, module_target_sat): ) -@pytest.fixture(scope='function') +@pytest.fixture def function_lce_library(function_org, target_sat): """Returns the Library lifecycle environment from chosen organization""" return ( diff --git a/pytest_fixtures/component/maintain.py b/pytest_fixtures/component/maintain.py index 089e95c5023..68aa82f5c86 100644 --- a/pytest_fixtures/component/maintain.py +++ b/pytest_fixtures/component/maintain.py @@ -6,19 +6,42 @@ from robottelo import constants from robottelo.config import settings from robottelo.constants import SATELLITE_MAINTAIN_YML -from robottelo.hosts import Satellite +from robottelo.hosts import Capsule, Satellite, SatelliteHostError +from robottelo.logging import logger +synced_repos = pytest.StashKey[dict] -@pytest.fixture(scope='session') -def sat_maintain(request, session_target_sat, session_capsule_configured): + +@pytest.fixture(scope='module') +def module_stash(request): + """Module scoped stash for storing data between tests""" + # Please refer the documentation for more details on stash + # https://docs.pytest.org/en/latest/reference/reference.html#stash + request.node.stash[synced_repos] = {} + return request.node.stash + + +@pytest.fixture(scope='module') +def sat_maintain(request, module_target_sat, module_capsule_configured): if settings.remotedb.server: yield Satellite(settings.remotedb.server) else: - session_target_sat.register_to_cdn(pool_ids=settings.subscription.fm_rhn_poolid.split()) - hosts = {'satellite': session_target_sat, 'capsule': session_capsule_configured} + module_target_sat.register_to_cdn(pool_ids=settings.subscription.fm_rhn_poolid.split()) + hosts = {'satellite': module_target_sat, 'capsule': module_capsule_configured} yield hosts[request.param] +@pytest.fixture +def start_satellite_services(sat_maintain): + """Teardown for satellite-maintain tests to ensure that all Satellite services are started""" + yield + logger.info('Ensuring that all %s services are running', sat_maintain.__class__.__name__) + result = sat_maintain.cli.Service.start() + if result.status != 0: + logger.error('Unable to start all %s services', sat_maintain.__class__.__name__) + raise SatelliteHostError('Failed to start Satellite services') + + @pytest.fixture def setup_backup_tests(request, sat_maintain): """Teardown for backup/restore tests""" @@ -30,47 +53,55 @@ def _finalize(): @pytest.fixture(scope='module') -def module_synced_repos(session_target_sat, session_capsule_configured, module_sca_manifest): - org = session_target_sat.api.Organization().create() - session_target_sat.upload_manifest(org.id, module_sca_manifest.content) - # sync custom repo - cust_prod = session_target_sat.api.Product(organization=org).create() - cust_repo = session_target_sat.api.Repository( - url=settings.repos.yum_1.url, product=cust_prod - ).create() - cust_repo.sync() - - # sync RH repo - product = session_target_sat.api.Product( - name=constants.PRDS['rhae'], organization=org.id - ).search()[0] - r_set = session_target_sat.api.RepositorySet( - name=constants.REPOSET['rhae2'], product=product - ).search()[0] - payload = {'basearch': constants.DEFAULT_ARCHITECTURE, 'product_id': product.id} - r_set.enable(data=payload) - result = session_target_sat.api.Repository(name=constants.REPOS['rhae2']['name']).search( - query={'organization_id': org.id} - ) - rh_repo_id = result[0].id - rh_repo = session_target_sat.api.Repository(id=rh_repo_id).read() - rh_repo.sync() - - # assign the Library LCE to the Capsule - lce = session_target_sat.api.LifecycleEnvironment(organization=org).search( - query={'search': f'name={constants.ENVIRONMENT}'} - )[0] - session_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( - data={'environment_id': lce.id} - ) - result = session_capsule_configured.nailgun_capsule.content_lifecycle_environments() - assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] - - # sync the Capsule - sync_status = session_capsule_configured.nailgun_capsule.content_sync() - assert sync_status['result'] == 'success' +def module_synced_repos(sat_maintain, module_capsule_configured, module_sca_manifest, module_stash): + if not module_stash[synced_repos]: + org = sat_maintain.satellite.api.Organization().create() + sat_maintain.satellite.upload_manifest(org.id, module_sca_manifest.content) + # sync custom repo + cust_prod = sat_maintain.satellite.api.Product(organization=org).create() + cust_repo = sat_maintain.satellite.api.Repository( + url=settings.repos.yum_1.url, product=cust_prod + ).create() + cust_repo.sync() + + # sync RH repo + product = sat_maintain.satellite.api.Product( + name=constants.PRDS['rhae'], organization=org.id + ).search()[0] + r_set = sat_maintain.satellite.api.RepositorySet( + name=constants.REPOSET['rhae2'], product=product + ).search()[0] + payload = {'basearch': constants.DEFAULT_ARCHITECTURE, 'product_id': product.id} + r_set.enable(data=payload) + result = sat_maintain.satellite.api.Repository( + name=constants.REPOS['rhae2']['name'] + ).search(query={'organization_id': org.id}) + rh_repo_id = result[0].id + rh_repo = sat_maintain.satellite.api.Repository(id=rh_repo_id).read() + rh_repo.sync() + + module_stash[synced_repos]['rh_repo'] = rh_repo + module_stash[synced_repos]['cust_repo'] = cust_repo + module_stash[synced_repos]['org'] = org + + if type(sat_maintain) is Capsule: + # assign the Library LCE to the Capsule + lce = sat_maintain.satellite.api.LifecycleEnvironment( + organization=module_stash[synced_repos]['org'] + ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] + module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': lce.id} + ) + result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() + assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] + # sync the Capsule + sync_status = module_capsule_configured.nailgun_capsule.content_sync() + assert sync_status['result'] == 'success' - yield {'custom': cust_repo, 'rh': rh_repo} + return { + 'custom': module_stash[synced_repos]['cust_repo'], + 'rh': module_stash[synced_repos]['rh_repo'], + } @pytest.fixture @@ -80,7 +111,7 @@ def setup_sync_plan(request, sat_maintain): """ org = sat_maintain.api.Organization().create() # Setup sync-plan - new_sync_plan = sat_maintain.cli_factory.make_sync_plan( + new_sync_plan = sat_maintain.cli_factory.sync_plan( { 'enabled': 'true', 'interval': 'weekly', diff --git a/pytest_fixtures/component/os.py b/pytest_fixtures/component/os.py index 6d11e9c94fd..0c94baa09ce 100644 --- a/pytest_fixtures/component/os.py +++ b/pytest_fixtures/component/os.py @@ -1,6 +1,6 @@ # Operating System Fixtures -import pytest from nailgun import entities +import pytest @pytest.fixture(scope='session') @@ -26,8 +26,7 @@ def default_os( os.ptable.append(default_partitiontable) os.provisioning_template.append(default_pxetemplate) os.update(['architecture', 'ptable', 'provisioning_template']) - os = entities.OperatingSystem(id=os.id).read() - return os + return entities.OperatingSystem(id=os.id).read() @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/oscap.py b/pytest_fixtures/component/oscap.py index 6fdd3866c29..786788914f2 100644 --- a/pytest_fixtures/component/oscap.py +++ b/pytest_fixtures/component/oscap.py @@ -1,15 +1,11 @@ from pathlib import PurePath -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.factory import make_scapcontent -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import OSCAP_PROFILE -from robottelo.constants import OSCAP_TAILORING_FILE +from robottelo.config import robottelo_tmp_dir, settings +from robottelo.constants import OSCAP_PROFILE, OSCAP_TAILORING_FILE, DataFile @pytest.fixture(scope="session") @@ -32,16 +28,18 @@ def oscap_content_path(session_target_sat): @pytest.fixture(scope="module") -def scap_content(import_ansible_roles): +def scap_content(import_ansible_roles, module_target_sat): title = f"rhel-content-{gen_string('alpha')}" - scap_info = make_scapcontent({'title': title, 'scap-file': f'{settings.oscap.content_path}'}) + scap_info = module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': f'{settings.oscap.content_path}'} + ) scap_id = scap_info['id'] scap_info = entities.ScapContents(id=scap_id).read() scap_profile_id = [ profile['id'] for profile in scap_info.scap_content_profiles - if OSCAP_PROFILE['security7'] in profile['title'] + if OSCAP_PROFILE[settings.oscap.profile] in profile['title'] ][0] return { "title": title, diff --git a/pytest_fixtures/component/provision_azure.py b/pytest_fixtures/component/provision_azure.py index d1fe7f1f128..7e2a05f0707 100644 --- a/pytest_fixtures/component/provision_azure.py +++ b/pytest_fixtures/component/provision_azure.py @@ -1,36 +1,38 @@ # Azure CR Fixtures -import pytest from fauxfactory import gen_string +import pytest from wrapanapi import AzureSystem from robottelo.config import settings -from robottelo.constants import AZURERM_RHEL7_FT_BYOS_IMG_URN -from robottelo.constants import AZURERM_RHEL7_FT_CUSTOM_IMG_URN -from robottelo.constants import AZURERM_RHEL7_FT_GALLERY_IMG_URN -from robottelo.constants import AZURERM_RHEL7_FT_IMG_URN -from robottelo.constants import AZURERM_RHEL7_UD_IMG_URN -from robottelo.constants import DEFAULT_ARCHITECTURE +from robottelo.constants import ( + AZURERM_RHEL7_FT_BYOS_IMG_URN, + AZURERM_RHEL7_FT_CUSTOM_IMG_URN, + AZURERM_RHEL7_FT_GALLERY_IMG_URN, + AZURERM_RHEL7_FT_IMG_URN, + AZURERM_RHEL7_UD_IMG_URN, + DEFAULT_ARCHITECTURE, +) @pytest.fixture(scope='session') def sat_azure(request, session_puppet_enabled_sat, session_target_sat): hosts = {'sat': session_target_sat, 'puppet_sat': session_puppet_enabled_sat} - yield hosts[request.param] + return hosts[request.param] @pytest.fixture(scope='module') def sat_azure_org(sat_azure): - yield sat_azure.api.Organization().create() + return sat_azure.api.Organization().create() @pytest.fixture(scope='module') def sat_azure_loc(sat_azure): - yield sat_azure.api.Location().create() + return sat_azure.api.Location().create() @pytest.fixture(scope='module') def sat_azure_domain(sat_azure, sat_azure_loc, sat_azure_org): - yield sat_azure.api.Domain(location=[sat_azure_loc], organization=[sat_azure_org]).create() + return sat_azure.api.Domain(location=[sat_azure_loc], organization=[sat_azure_org]).create() @pytest.fixture(scope='module') @@ -42,17 +44,16 @@ def sat_azure_default_os(sat_azure): @pytest.fixture(scope='module') def sat_azure_default_architecture(sat_azure): - arch = ( + return ( sat_azure.api.Architecture() .search(query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0] .read() ) - return arch @pytest.fixture(scope='session') def azurerm_settings(): - deps = { + return { 'tenant': settings.azurerm.tenant_id, 'app_ident': settings.azurerm.client_id, 'sub_id': settings.azurerm.subscription_id, @@ -60,7 +61,6 @@ def azurerm_settings(): 'secret': settings.azurerm.client_secret, 'region': settings.azurerm.azure_region.lower().replace(' ', ''), } - return deps @pytest.fixture(scope='session') @@ -84,7 +84,7 @@ def azurermclient(azurerm_settings): @pytest.fixture(scope='module') def module_azurerm_cr(azurerm_settings, sat_azure_org, sat_azure_loc, sat_azure): """Create AzureRM Compute Resource""" - azure_cr = sat_azure.api.AzureRMComputeResource( + return sat_azure.api.AzureRMComputeResource( name=gen_string('alpha'), provider='AzureRm', tenant=azurerm_settings['tenant'], @@ -95,7 +95,6 @@ def module_azurerm_cr(azurerm_settings, sat_azure_org, sat_azure_loc, sat_azure) organization=[sat_azure_org], location=[sat_azure_loc], ).create() - return azure_cr @pytest.fixture(scope='module') @@ -106,7 +105,7 @@ def module_azurerm_finishimg( module_azurerm_cr, ): """Creates Finish Template image on AzureRM Compute Resource""" - finish_image = sat_azure.api.Image( + return sat_azure.api.Image( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, name=gen_string('alpha'), @@ -114,7 +113,6 @@ def module_azurerm_finishimg( username=settings.azurerm.username, uuid=AZURERM_RHEL7_FT_IMG_URN, ).create() - return finish_image @pytest.fixture(scope='module') @@ -125,7 +123,7 @@ def module_azurerm_byos_finishimg( sat_azure, ): """Creates BYOS Finish Template image on AzureRM Compute Resource""" - finish_image = sat_azure.api.Image( + return sat_azure.api.Image( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, name=gen_string('alpha'), @@ -133,7 +131,6 @@ def module_azurerm_byos_finishimg( username=settings.azurerm.username, uuid=AZURERM_RHEL7_FT_BYOS_IMG_URN, ).create() - return finish_image @pytest.fixture(scope='module') @@ -144,7 +141,7 @@ def module_azurerm_cloudimg( module_azurerm_cr, ): """Creates cloudinit image on AzureRM Compute Resource""" - finish_image = sat_azure.api.Image( + return sat_azure.api.Image( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, name=gen_string('alpha'), @@ -153,7 +150,6 @@ def module_azurerm_cloudimg( uuid=AZURERM_RHEL7_UD_IMG_URN, user_data=True, ).create() - return finish_image @pytest.fixture(scope='module') @@ -164,7 +160,7 @@ def module_azurerm_gallery_finishimg( module_azurerm_cr, ): """Creates Shared Gallery Finish Template image on AzureRM Compute Resource""" - finish_image = sat_azure.api.Image( + return sat_azure.api.Image( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, name=gen_string('alpha'), @@ -172,7 +168,6 @@ def module_azurerm_gallery_finishimg( username=settings.azurerm.username, uuid=AZURERM_RHEL7_FT_GALLERY_IMG_URN, ).create() - return finish_image @pytest.fixture(scope='module') @@ -183,7 +178,7 @@ def module_azurerm_custom_finishimg( module_azurerm_cr, ): """Creates Custom Finish Template image on AzureRM Compute Resource""" - finish_image = sat_azure.api.Image( + return sat_azure.api.Image( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, name=gen_string('alpha'), @@ -191,4 +186,3 @@ def module_azurerm_custom_finishimg( username=settings.azurerm.username, uuid=AZURERM_RHEL7_FT_CUSTOM_IMG_URN, ).create() - return finish_image diff --git a/pytest_fixtures/component/provision_capsule_pxe.py b/pytest_fixtures/component/provision_capsule_pxe.py new file mode 100644 index 00000000000..4360ba6e4bd --- /dev/null +++ b/pytest_fixtures/component/provision_capsule_pxe.py @@ -0,0 +1,272 @@ +import ipaddress + +from box import Box +from broker import Broker +from fauxfactory import gen_string +from packaging.version import Version +import pytest + +from robottelo import constants +from robottelo.config import settings + + +@pytest.fixture(scope='module') +def capsule_provisioning_sat( + request, + module_target_sat, + module_sca_manifest_org, + module_location, + module_capsule_configured, +): + """ + This fixture sets up the Satellite for PXE provisioning. + It calls a workflow using broker to set up the network and to run satellite-installer. + It uses the artifacts from the workflow to create all the necessary Satellite entities + that are later used by the tests. + """ + # Assign org and loc + capsule = module_capsule_configured.nailgun_smart_proxy + capsule.location = [module_location] + capsule.update(['location']) + capsule.organization = [module_sca_manifest_org] + capsule.update(['organization']) + + provisioning_type = getattr(request, 'param', '') + sat = module_target_sat + provisioning_domain_name = f"{gen_string('alpha').lower()}.foo" + broker_data_out = Broker().execute( + workflow='configure-install-sat-provisioning-rhv', + artifacts='last', + target_vlan_id=settings.provisioning.vlan_id, + target_host=module_capsule_configured.name, + provisioning_dns_zone=provisioning_domain_name, + sat_version='stream' if sat.is_stream else sat.version, + deploy_scenario='capsule', + ) + + broker_data_out = Box(**broker_data_out['data_out']) + provisioning_interface = ipaddress.ip_interface(broker_data_out.provisioning_addr_ipv4) + provisioning_network = provisioning_interface.network + # TODO: investigate DNS setup issue on Satellite, + # we might need to set up Sat's DNS server as the primary one on the Sat host + provisioning_upstream_dns_primary = ( + broker_data_out.provisioning_upstream_dns.pop() + ) # There should always be at least one upstream DNS + provisioning_upstream_dns_secondary = ( + broker_data_out.provisioning_upstream_dns.pop() + if len(broker_data_out.provisioning_upstream_dns) + else None + ) + domain = sat.api.Domain( + location=[module_location], + organization=[module_sca_manifest_org], + dns=capsule.id, + name=provisioning_domain_name, + ).create() + + subnet = sat.api.Subnet( + location=[module_location], + organization=[module_sca_manifest_org], + network=str(provisioning_network.network_address), + mask=str(provisioning_network.netmask), + gateway=broker_data_out.provisioning_gw_ipv4, + from_=broker_data_out.provisioning_host_range_start, + to=broker_data_out.provisioning_host_range_end, + dns_primary=provisioning_upstream_dns_primary, + dns_secondary=provisioning_upstream_dns_secondary, + boot_mode='DHCP', + ipam='DHCP', + dhcp=capsule.id, + tftp=capsule.id, + template=capsule.id, + dns=capsule.id, + httpboot=capsule.id, + # discovery=capsule.id, + remote_execution_proxy=[capsule.id], + domain=[domain.id], + ).create() + + return Box( + sat=sat, + domain=domain, + subnet=subnet, + provisioning_type=provisioning_type, + broker_data=broker_data_out, + ) + + +@pytest.fixture(scope='module') +def capsule_provisioning_lce_sync_setup(module_capsule_configured, module_lce_library): + """This fixture adds the lifecycle environment to the capsule and syncs the content""" + module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': module_lce_library.id} + ) + sync_status = module_capsule_configured.nailgun_capsule.content_sync(timeout='60m') + assert sync_status['result'] == 'success', 'Capsule sync task failed.' + + +@pytest.fixture +def capsule_provisioning_hostgroup( + module_target_sat, + capsule_provisioning_sat, + module_sca_manifest_org, + module_location, + default_architecture, + capsule_provisioning_rhel_content, + module_lce_library, + default_partitiontable, + pxe_loader, + module_capsule_configured, +): + capsule = module_capsule_configured.nailgun_smart_proxy + provisioning_ip = capsule_provisioning_sat.broker_data.provisioning_addr_ipv4 + provisioning_ip = ipaddress.ip_interface(provisioning_ip).ip + return capsule_provisioning_sat.sat.api.HostGroup( + organization=[module_sca_manifest_org], + location=[module_location], + architecture=default_architecture, + domain=capsule_provisioning_sat.domain, + content_source=capsule.id, + content_view=capsule_provisioning_rhel_content.cv, + kickstart_repository=capsule_provisioning_rhel_content.ksrepo, + lifecycle_environment=module_lce_library, + root_pass=settings.provisioning.host_root_password, + operatingsystem=capsule_provisioning_rhel_content.os, + ptable=default_partitiontable, + subnet=capsule_provisioning_sat.subnet, + pxe_loader=pxe_loader.pxe_loader, + group_parameters_attributes=[ + { + 'name': 'remote_execution_ssh_keys', + 'parameter_type': 'string', + 'value': settings.provisioning.host_ssh_key_pub, + }, + # assign AK in order the hosts to be subscribed + { + 'name': 'kt_activation_keys', + 'parameter_type': 'string', + 'value': capsule_provisioning_rhel_content.ak.name, + }, + { + 'name': 'http_proxy', + 'parameter_type': 'string', + 'value': str(provisioning_ip), + }, + { + 'name': 'http_proxy_port', + 'parameter_type': 'string', + 'value': '80', + }, + ], + ).create() + + +@pytest.fixture(scope='module') +def capsule_provisioning_rhel_content( + request, + capsule_provisioning_sat, + module_sca_manifest_org, + module_lce_library, +): + """ + This fixture sets up kickstart repositories for a specific RHEL version + that is specified in `request.param`. + """ + sat = capsule_provisioning_sat.sat + rhel_ver = request.param['rhel_version'] + repo_names = [] + if int(rhel_ver) <= 7: + repo_names.append(f'rhel{rhel_ver}') + else: + repo_names.append(f'rhel{rhel_ver}_bos') + repo_names.append(f'rhel{rhel_ver}_aps') + rh_repos = [] + tasks = [] + rh_repo_id = "" + content_view = sat.api.ContentView(organization=module_sca_manifest_org).create() + + # Custom Content for Client repo + custom_product = sat.api.Product( + organization=module_sca_manifest_org, name=f'rhel{rhel_ver}_{gen_string("alpha")}' + ).create() + client_repo = sat.api.Repository( + organization=module_sca_manifest_org, + product=custom_product, + content_type='yum', + url=settings.repos.SATCLIENT_REPO[f'rhel{rhel_ver}'], + ).create() + task = client_repo.sync(synchronous=False) + tasks.append(task) + content_view.repository = [client_repo] + + for name in repo_names: + rh_kickstart_repo_id = sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=constants.REPOS['kickstart'][name]['product'], + repo=constants.REPOS['kickstart'][name]['name'], + reposet=constants.REPOS['kickstart'][name]['reposet'], + releasever=constants.REPOS['kickstart'][name]['version'], + ) + # do not sync content repos for discovery based provisioning. + if capsule_provisioning_sat.provisioning_type != 'discovery': + rh_repo_id = sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=constants.REPOS[name]['product'], + repo=constants.REPOS[name]['name'], + reposet=constants.REPOS[name]['reposet'], + releasever=constants.REPOS[name]['releasever'], + ) + + # Sync step because repo is not synced by default + for repo_id in [rh_kickstart_repo_id, rh_repo_id]: + if repo_id: + rh_repo = sat.api.Repository(id=repo_id).read() + task = rh_repo.sync(synchronous=False) + tasks.append(task) + rh_repos.append(rh_repo) + content_view.repository.append(rh_repo) + content_view.update(['repository']) + for task in tasks: + sat.wait_for_tasks( + search_query=(f'id = {task["id"]}'), + poll_timeout=2500, + ) + task_status = sat.api.ForemanTask(id=task['id']).poll() + assert task_status['result'] == 'success' + rhel_xy = Version( + constants.REPOS['kickstart'][f'rhel{rhel_ver}']['version'] + if rhel_ver == 7 + else constants.REPOS['kickstart'][f'rhel{rhel_ver}_bos']['version'] + ) + o_systems = sat.api.OperatingSystem().search( + query={'search': f'family=Redhat and major={rhel_xy.major} and minor={rhel_xy.minor}'} + ) + assert o_systems, f'Operating system RHEL {rhel_xy} was not found' + os = o_systems[0].read() + # return only the first kickstart repo - RHEL X KS or RHEL X BaseOS KS + ksrepo = rh_repos[0] + publish = content_view.publish() + task_status = sat.wait_for_tasks( + search_query=(f'Actions::Katello::ContentView::Publish and id = {publish["id"]}'), + search_rate=15, + max_tries=10, + ) + assert task_status[0].result == 'success' + content_view = sat.api.ContentView( + organization=module_sca_manifest_org, name=content_view.name + ).search()[0] + ak = sat.api.ActivationKey( + organization=module_sca_manifest_org, + content_view=content_view, + environment=module_lce_library, + ).create() + + # Ensure client repo is enabled in the activation key + content = ak.product_content(data={'content_access_mode_all': '1'})['results'] + client_repo_label = [repo['label'] for repo in content if repo['name'] == client_repo.name][0] + ak.content_override( + data={'content_overrides': [{'content_label': client_repo_label, 'value': '1'}]} + ) + return Box(os=os, ak=ak, ksrepo=ksrepo, cv=content_view) diff --git a/pytest_fixtures/component/provision_gce.py b/pytest_fixtures/component/provision_gce.py index 7006313ea51..64b706d772e 100644 --- a/pytest_fixtures/component/provision_gce.py +++ b/pytest_fixtures/component/provision_gce.py @@ -2,36 +2,40 @@ import json from tempfile import mkstemp -import pytest from fauxfactory import gen_string +import pytest from wrapanapi.systems.google import GoogleCloudSystem from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_PTABLE -from robottelo.constants import FOREMAN_PROVIDERS +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + DEFAULT_PTABLE, + FOREMAN_PROVIDERS, + GCE_RHEL_CLOUD_PROJECTS, + GCE_TARGET_RHEL_IMAGE_NAME, +) from robottelo.exceptions import GCECertNotFoundError @pytest.fixture(scope='session') def sat_gce(request, session_puppet_enabled_sat, session_target_sat): hosts = {'sat': session_target_sat, 'puppet_sat': session_puppet_enabled_sat} - yield hosts[getattr(request, 'param', 'sat')] + return hosts[getattr(request, 'param', 'sat')] @pytest.fixture(scope='module') def sat_gce_org(sat_gce): - yield sat_gce.api.Organization().create() + return sat_gce.api.Organization().create() @pytest.fixture(scope='module') def sat_gce_loc(sat_gce): - yield sat_gce.api.Location().create() + return sat_gce.api.Location().create() @pytest.fixture(scope='module') def sat_gce_domain(sat_gce, sat_gce_loc, sat_gce_org): - yield sat_gce.api.Domain(location=[sat_gce_loc], organization=[sat_gce_org]).create() + return sat_gce.api.Domain(location=[sat_gce_loc], organization=[sat_gce_org]).create() @pytest.fixture(scope='module') @@ -49,12 +53,11 @@ def sat_gce_default_partition_table(sat_gce): @pytest.fixture(scope='module') def sat_gce_default_architecture(sat_gce): - arch = ( + return ( sat_gce.api.Architecture() .search(query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0] .read() ) - return arch @pytest.fixture(scope='session') @@ -86,16 +89,18 @@ def googleclient(gce_cert): @pytest.fixture(scope='session') def gce_latest_rhel_uuid(googleclient): - template_names = [img.name for img in googleclient.list_templates(True)] - latest_rhel7_template = max(name for name in template_names if name.startswith('rhel-7')) - latest_rhel7_uuid = googleclient.get_template(latest_rhel7_template, project='rhel-cloud').uuid - return latest_rhel7_uuid + templates = googleclient.find_templates( + include_public=True, + public_projects=GCE_RHEL_CLOUD_PROJECTS, + filter_expr=f'name:{GCE_TARGET_RHEL_IMAGE_NAME}*', + ) + latest_template_name = max(tpl.name for tpl in templates) + return next(tpl for tpl in templates if tpl.name == latest_template_name).uuid @pytest.fixture(scope='session') def gce_custom_cloudinit_uuid(googleclient, gce_cert): - cloudinit_uuid = googleclient.get_template('customcinit', project=gce_cert['project_id']).uuid - return cloudinit_uuid + return googleclient.get_template('customcinit', project=gce_cert['project_id']).uuid @pytest.fixture(scope='session') @@ -109,7 +114,7 @@ def session_default_os(session_target_sat): @pytest.fixture(scope='module') def module_gce_compute(sat_gce, sat_gce_org, sat_gce_loc, gce_cert): - gce_cr = sat_gce.api.GCEComputeResource( + return sat_gce.api.GCEComputeResource( name=gen_string('alphanumeric'), provider='GCE', key_path=settings.gce.cert_path, @@ -117,20 +122,6 @@ def module_gce_compute(sat_gce, sat_gce_org, sat_gce_loc, gce_cert): organization=[sat_gce_org], location=[sat_gce_loc], ).create() - return gce_cr - - -@pytest.fixture(scope='module') -def gce_template(googleclient): - max_rhel7_template = max( - img.name for img in googleclient.list_templates(True) if str(img.name).startswith('rhel-7') - ) - return googleclient.get_template(max_rhel7_template, project='rhel-cloud').uuid - - -@pytest.fixture(scope='module') -def gce_cloudinit_template(googleclient, gce_cert): - return googleclient.get_template('customcinit', project=gce_cert['project_id']).uuid @pytest.fixture(scope='module') @@ -151,8 +142,8 @@ def gce_domain(sat_gce_org, sat_gce_loc, gce_cert, sat_gce): @pytest.fixture(scope='module') def gce_resource_with_image( - gce_template, - gce_cloudinit_template, + gce_latest_rhel_uuid, + gce_custom_cloudinit_uuid, gce_cert, sat_gce_default_architecture, sat_gce_default_os, @@ -184,7 +175,7 @@ def gce_resource_with_image( name='autogce_img', operatingsystem=sat_gce_default_os, username=vm_user, - uuid=gce_template, + uuid=gce_latest_rhel_uuid, ).create() # Cloud-Init Image sat_gce.api.Image( @@ -193,7 +184,7 @@ def gce_resource_with_image( name='autogce_img_cinit', operatingsystem=sat_gce_default_os, username=vm_user, - uuid=gce_cloudinit_template, + uuid=gce_custom_cloudinit_uuid, user_data=True, ).create() return gce_cr @@ -212,7 +203,7 @@ def gce_hostgroup( googleclient, ): """Sets Hostgroup for GCE Host Provisioning""" - hgroup = sat_gce.api.HostGroup( + return sat_gce.api.HostGroup( architecture=sat_gce_default_architecture, compute_resource=module_gce_compute, domain=sat_gce_domain, @@ -222,7 +213,6 @@ def gce_hostgroup( organization=[sat_gce_org], ptable=sat_gce_default_partition_table, ).create() - return hgroup @pytest.fixture(scope='module') @@ -256,7 +246,7 @@ def module_gce_cloudimg( sat_gce, ): """Creates cloudinit image on GCE Compute Resource""" - cloud_image = sat_gce.api.Image( + return sat_gce.api.Image( architecture=sat_gce_default_architecture, compute_resource=module_gce_compute, name=gen_string('alpha'), @@ -265,7 +255,6 @@ def module_gce_cloudimg( uuid=gce_custom_cloudinit_uuid, user_data=True, ).create() - return cloud_image @pytest.fixture(scope='module') @@ -277,7 +266,7 @@ def module_gce_finishimg( sat_gce, ): """Creates finish image on GCE Compute Resource""" - finish_image = sat_gce.api.Image( + return sat_gce.api.Image( architecture=sat_gce_default_architecture, compute_resource=module_gce_compute, name=gen_string('alpha'), @@ -285,10 +274,9 @@ def module_gce_finishimg( username=gen_string('alpha'), uuid=gce_latest_rhel_uuid, ).create() - return finish_image -@pytest.fixture() +@pytest.fixture def gce_setting_update(sat_gce): sat_gce.update_setting('destroy_vm_on_host_delete', True) yield diff --git a/pytest_fixtures/component/provision_libvirt.py b/pytest_fixtures/component/provision_libvirt.py index 130f837454b..b294ca27a29 100644 --- a/pytest_fixtures/component/provision_libvirt.py +++ b/pytest_fixtures/component/provision_libvirt.py @@ -1,15 +1,21 @@ # Compute resource - Libvirt entities import pytest -from nailgun import entities -@pytest.fixture(scope="module") -def module_cr_libvirt(module_org, module_location): - return entities.LibvirtComputeResource( +@pytest.fixture(scope='module') +def module_cr_libvirt(module_target_sat, module_org, module_location): + return module_target_sat.api.LibvirtComputeResource( organization=[module_org], location=[module_location] ).create() -@pytest.fixture(scope="module") -def module_libvirt_image(module_cr_libvirt): - return entities.Image(compute_resource=module_cr_libvirt).create() +@pytest.fixture(scope='module') +def module_libvirt_image(module_target_sat, module_cr_libvirt): + return module_target_sat.api.Image(compute_resource=module_cr_libvirt).create() + + +@pytest.fixture(scope='module') +def module_libvirt_provisioning_sat(module_provisioning_sat): + # Configure Libvirt CR for provisioning + module_provisioning_sat.sat.configure_libvirt_cr() + return module_provisioning_sat diff --git a/pytest_fixtures/component/provision_pxe.py b/pytest_fixtures/component/provision_pxe.py index 5913000f051..beef717d94d 100644 --- a/pytest_fixtures/component/provision_pxe.py +++ b/pytest_fixtures/component/provision_pxe.py @@ -3,11 +3,11 @@ import re from tempfile import mkstemp -import pytest from box import Box from broker import Broker from fauxfactory import gen_string from packaging.version import Version +import pytest from robottelo import constants from robottelo.config import settings @@ -70,7 +70,7 @@ def module_provisioning_rhel_content( releasever=constants.REPOS['kickstart'][name]['version'], ) # do not sync content repos for discovery based provisioning. - if not module_provisioning_sat.provisioning_type == 'discovery': + if module_provisioning_sat.provisioning_type != 'discovery': rh_repo_id = sat.api_factory.enable_rhrepo_and_fetchid( basearch=constants.DEFAULT_ARCHITECTURE, org_id=module_sca_manifest_org.id, @@ -124,6 +124,12 @@ def module_provisioning_rhel_content( environment=module_lce_library, ).create() + # Ensure client repo is enabled in the activation key + content = ak.product_content(data={'content_access_mode_all': '1'})['results'] + client_repo_label = [repo['label'] for repo in content if repo['name'] == client_repo.name][0] + ak.content_override( + data={'content_overrides': [{'content_label': client_repo_label, 'value': '1'}]} + ) return Box(os=os, ak=ak, ksrepo=ksrepo, cv=content_view) @@ -146,12 +152,12 @@ def module_provisioning_sat( provisioning_domain_name = f"{gen_string('alpha').lower()}.foo" broker_data_out = Broker().execute( - workflow="configure-install-sat-provisioning-rhv", - artifacts="last", + workflow='configure-install-sat-provisioning-rhv', + artifacts='last', target_vlan_id=settings.provisioning.vlan_id, target_host=sat.name, provisioning_dns_zone=provisioning_domain_name, - sat_version=sat.version, + sat_version='stream' if sat.is_stream else sat.version, ) broker_data_out = Box(**broker_data_out['data_out']) @@ -231,6 +237,30 @@ def provisioning_host(module_ssh_key_file, pxe_loader): prov_host.blank = getattr(prov_host, 'blank', False) +@pytest.fixture +def provision_multiple_hosts(module_ssh_key_file, pxe_loader, request): + """Fixture to check out two blank VMs""" + vlan_id = settings.provisioning.vlan_id + cd_iso = ( + "" # TODO: Make this an optional fixture parameter (update vm_firmware when adding this) + ) + with Broker( + workflow="deploy-configure-pxe-provisioning-host-rhv", + host_class=ContentHost, + _count=getattr(request, 'param', 2), + target_vlan_id=vlan_id, + target_vm_firmware=pxe_loader.vm_firmware, + target_vm_cd_iso=cd_iso, + blank=True, + target_memory='6GiB', + auth=module_ssh_key_file, + ) as hosts: + yield hosts + + for prov_host in hosts: + prov_host.blank = getattr(prov_host, 'blank', False) + + @pytest.fixture def provisioning_hostgroup( module_provisioning_sat, @@ -279,6 +309,8 @@ def pxe_loader(request): PXE_LOADER_MAP = { 'bios': {'vm_firmware': 'bios', 'pxe_loader': 'PXELinux BIOS'}, 'uefi': {'vm_firmware': 'uefi', 'pxe_loader': 'Grub2 UEFI'}, + 'ipxe': {'vm_firmware': 'bios', 'pxe_loader': 'iPXE Embedded'}, + 'http_uefi': {'vm_firmware': 'uefi', 'pxe_loader': 'Grub2 UEFI HTTP'}, } return Box(PXE_LOADER_MAP[getattr(request, 'param', 'bios')]) diff --git a/pytest_fixtures/component/provision_vmware.py b/pytest_fixtures/component/provision_vmware.py new file mode 100644 index 00000000000..385f49bae38 --- /dev/null +++ b/pytest_fixtures/component/provision_vmware.py @@ -0,0 +1,67 @@ +from fauxfactory import gen_string +import pytest + +from robottelo.config import settings + + +@pytest.fixture(scope='module') +def vmware(request): + versions = { + 'vmware7': settings.vmware.vcenter7, + 'vmware8': settings.vmware.vcenter8, + } + return versions[getattr(request, 'param', 'vmware8')] + + +@pytest.fixture(scope='module') +def module_vmware_cr(module_provisioning_sat, module_sca_manifest_org, module_location, vmware): + return module_provisioning_sat.sat.api.VMWareComputeResource( + name=gen_string('alpha'), + provider='Vmware', + url=vmware.hostname, + user=settings.vmware.username, + password=settings.vmware.password, + datacenter=settings.vmware.datacenter, + organization=[module_sca_manifest_org], + location=[module_location], + ).create() + + +@pytest.fixture +def module_vmware_hostgroup( + module_vmware_cr, + module_provisioning_sat, + module_sca_manifest_org, + module_location, + default_architecture, + module_provisioning_rhel_content, + module_lce_library, + default_partitiontable, + module_provisioning_capsule, + pxe_loader, +): + return module_provisioning_sat.sat.api.HostGroup( + name=gen_string('alpha'), + organization=[module_sca_manifest_org], + location=[module_location], + architecture=default_architecture, + domain=module_provisioning_sat.domain, + content_source=module_provisioning_capsule.id, + content_view=module_provisioning_rhel_content.cv, + compute_resource=module_vmware_cr, + kickstart_repository=module_provisioning_rhel_content.ksrepo, + lifecycle_environment=module_lce_library, + root_pass=settings.provisioning.host_root_password, + operatingsystem=module_provisioning_rhel_content.os, + ptable=default_partitiontable, + subnet=module_provisioning_sat.subnet, + pxe_loader=pxe_loader.pxe_loader, + group_parameters_attributes=[ + # assign AK in order the hosts to be subscribed + { + 'name': 'kt_activation_keys', + 'parameter_type': 'string', + 'value': module_provisioning_rhel_content.ak.name, + }, + ], + ).create() diff --git a/pytest_fixtures/component/provisioning_template.py b/pytest_fixtures/component/provisioning_template.py index 6b8e96de7e9..7b797e1da7c 100644 --- a/pytest_fixtures/component/provisioning_template.py +++ b/pytest_fixtures/component/provisioning_template.py @@ -1,13 +1,11 @@ # Provisioning Template Fixtures -import pytest from box import Box from nailgun import entities from packaging.version import Version +import pytest from robottelo import constants -from robottelo.constants import DEFAULT_PTABLE -from robottelo.constants import DEFAULT_PXE_TEMPLATE -from robottelo.constants import DEFAULT_TEMPLATE +from robottelo.constants import DEFAULT_PTABLE, DEFAULT_PXE_TEMPLATE, DEFAULT_TEMPLATE @pytest.fixture(scope='module') @@ -19,8 +17,7 @@ def module_provisioningtemplate_default(module_org, module_location): provisioning_template.organization.append(module_org) provisioning_template.location.append(module_location) provisioning_template.update(['organization', 'location']) - provisioning_template = entities.ProvisioningTemplate(id=provisioning_template.id).read() - return provisioning_template + return entities.ProvisioningTemplate(id=provisioning_template.id).read() @pytest.fixture(scope='module') @@ -32,8 +29,7 @@ def module_provisioningtemplate_pxe(module_org, module_location): pxe_template.organization.append(module_org) pxe_template.location.append(module_location) pxe_template.update(['organization', 'location']) - pxe_template = entities.ProvisioningTemplate(id=pxe_template.id).read() - return pxe_template + return entities.ProvisioningTemplate(id=pxe_template.id).read() @pytest.fixture(scope='session') @@ -41,6 +37,7 @@ def default_partitiontable(): ptables = entities.PartitionTable().search(query={'search': f'name="{DEFAULT_PTABLE}"'}) if ptables: return ptables[0].read() + return None @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/puppet.py b/pytest_fixtures/component/puppet.py index d0f9b267263..c531c7888ac 100644 --- a/pytest_fixtures/component/puppet.py +++ b/pytest_fixtures/component/puppet.py @@ -1,36 +1,38 @@ # Puppet Environment fixtures import pytest -from robottelo.config import settings from robottelo.constants import ENVIRONMENT @pytest.fixture(scope='session') def session_puppet_enabled_sat(session_satellite_host): """Satellite with enabled puppet plugin""" - yield session_satellite_host.enable_puppet_satellite() + if session_satellite_host: + yield session_satellite_host.enable_puppet_satellite() + else: + yield @pytest.fixture(scope='session') def session_puppet_enabled_capsule(session_capsule_host, session_puppet_enabled_sat): """Capsule with enabled puppet plugin""" session_capsule_host.capsule_setup(sat_host=session_puppet_enabled_sat) - yield session_capsule_host.enable_puppet_capsule(satellite=session_puppet_enabled_sat) + return session_capsule_host.enable_puppet_capsule(satellite=session_puppet_enabled_sat) @pytest.fixture(scope='module') def module_puppet_org(session_puppet_enabled_sat): - yield session_puppet_enabled_sat.api.Organization().create() + return session_puppet_enabled_sat.api.Organization().create() @pytest.fixture(scope='module') def module_puppet_loc(session_puppet_enabled_sat): - yield session_puppet_enabled_sat.api.Location().create() + return session_puppet_enabled_sat.api.Location().create() @pytest.fixture(scope='module') def module_puppet_domain(session_puppet_enabled_sat, module_puppet_loc, module_puppet_org): - yield session_puppet_enabled_sat.api.Domain( + return session_puppet_enabled_sat.api.Domain( location=[module_puppet_loc], organization=[module_puppet_org] ).create() @@ -42,6 +44,7 @@ def default_puppet_environment(module_puppet_org, session_puppet_enabled_sat): ) if environments: return environments[0].read() + return None @pytest.fixture(scope='module') @@ -52,7 +55,6 @@ def module_puppet_environment(module_puppet_org, module_puppet_loc, session_pupp return session_puppet_enabled_sat.api.Environment(id=environment.id).read() -@pytest.mark.skipif((not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url') @pytest.fixture(scope='module') def module_import_puppet_module(session_puppet_enabled_sat): """Returns custom puppet environment name that contains imported puppet module @@ -100,10 +102,7 @@ def module_puppet_classes( @pytest.fixture(scope='session', params=[True, False], ids=["puppet_enabled", "puppet_disabled"]) def parametrized_puppet_sat(request, session_target_sat, session_puppet_enabled_sat): - if request.param: - sat = session_puppet_enabled_sat - else: - sat = session_target_sat + sat = session_puppet_enabled_sat if request.param else session_target_sat return {'sat': sat, 'enabled': request.param} diff --git a/pytest_fixtures/component/repository.py b/pytest_fixtures/component/repository.py index fa7f479aafe..ce49a1d7f88 100644 --- a/pytest_fixtures/component/repository.py +++ b/pytest_fixtures/component/repository.py @@ -1,13 +1,11 @@ # Repository Fixtures -import pytest from fauxfactory import gen_string from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout +import pytest -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.config import settings +from robottelo.constants import DEFAULT_ARCHITECTURE, DEFAULT_ORG, PRDS, REPOS, REPOSET @pytest.fixture(scope='module') @@ -25,7 +23,7 @@ def module_repo(module_repo_options, module_target_sat): return module_target_sat.api.Repository(**module_repo_options).create() -@pytest.fixture(scope='function') +@pytest.fixture def function_product(function_org): return entities.Product(organization=function_org).create() @@ -35,24 +33,6 @@ def module_product(module_org, module_target_sat): return module_target_sat.api.Product(organization=module_org).create() -@pytest.fixture(scope='module') -def rh_repo_gt_manifest(module_gt_manifest_org, module_target_sat): - """Use GT manifest org, creates RH tools repo, syncs and returns RH repo.""" - # enable rhel repo and return its ID - rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch=DEFAULT_ARCHITECTURE, - org_id=module_gt_manifest_org.id, - product=PRDS['rhel'], - repo=REPOS['rhst7']['name'], - reposet=REPOSET['rhst7'], - releasever=None, - ) - # Sync step because repo is not synced by default - rh_repo = entities.Repository(id=rh_repo_id).read() - rh_repo.sync() - return rh_repo - - @pytest.fixture(scope='module') def module_rhst_repo(module_target_sat, module_org_with_manifest, module_promoted_cv, module_lce): """Use module org with manifest, creates RH tools repo, syncs and returns RH repo id.""" @@ -76,7 +56,7 @@ def module_rhst_repo(module_target_sat, module_org_with_manifest, module_promote return REPOS['rhst7']['id'] -@pytest.fixture(scope="function") +@pytest.fixture def repo_setup(): """ This fixture is used to create an organization, product, repository, and lifecycle environment @@ -87,8 +67,31 @@ def repo_setup(): product = entities.Product(organization=org).create() repo = entities.Repository(name=repo_name, product=product).create() lce = entities.LifecycleEnvironment(organization=org).create() - details = {'org': org, 'product': product, 'repo': repo, 'lce': lce} - yield details + return {'org': org, 'product': product, 'repo': repo, 'lce': lce} + + +@pytest.fixture(scope='module') +def setup_content(module_org): + """This fixture is used to setup an activation key with a custom product attached. Used for + registering a host + """ + org = module_org + custom_repo = entities.Repository( + product=entities.Product(organization=org).create(), + ).create() + custom_repo.sync() + lce = entities.LifecycleEnvironment(organization=org).create() + cv = entities.ContentView( + organization=org, + repository=[custom_repo.id], + ).create() + cv.publish() + cvv = cv.read().version[0].read() + cvv.promote(data={'environment_ids': lce.id, 'force': False}) + ak = entities.ActivationKey( + content_view=cv, max_hosts=100, organization=org, environment=lce, auto_attach=True + ).create() + return ak, org, custom_repo @pytest.fixture(scope='module') @@ -98,6 +101,16 @@ def module_repository(os_path, module_product, module_target_sat): return repo +@pytest.fixture +def custom_synced_repo(target_sat): + custom_repo = target_sat.api.Repository( + product=target_sat.api.Product(organization=DEFAULT_ORG).create(), + url=settings.repos.yum_0.url, + ).create() + custom_repo.sync() + return custom_repo + + def _simplify_repos(request, repos): """This is a helper function that transforms repos_collection related fixture parameters into a list that can be passed to robottelo.host_helpers.RepositoryMixins.RepositoryCollection @@ -175,7 +188,7 @@ def repos_collection(request, target_sat): """ repos = getattr(request, 'param', []) repo_distro, repos = _simplify_repos(request, repos) - _repos_collection = target_sat.cli_factory.RepositoryCollection( + return target_sat.cli_factory.RepositoryCollection( distro=repo_distro or request.getfixturevalue('distro'), repositories=[ getattr(target_sat.cli_factory, repo_name)(**repo_params) @@ -183,7 +196,6 @@ def repos_collection(request, target_sat): for repo_name, repo_params in repo.items() ], ) - return _repos_collection @pytest.fixture(scope='module') @@ -211,7 +223,9 @@ def module_repos_collection_with_setup(request, module_target_sat, module_org, m @pytest.fixture(scope='module') -def module_repos_collection_with_manifest(request, module_target_sat, module_org, module_lce): +def module_repos_collection_with_manifest( + request, module_target_sat, module_entitlement_manifest_org, module_lce +): """This fixture and its usage is very similar to repos_collection fixture above with extra setup_content and uploaded manifest capabilities using module_org and module_lce fixtures @@ -229,5 +243,27 @@ def module_repos_collection_with_manifest(request, module_target_sat, module_org for repo_name, repo_params in repo.items() ], ) - _repos_collection.setup_content(module_org.id, module_lce.id, upload_manifest=True) + _repos_collection.setup_content(module_entitlement_manifest_org.id, module_lce.id) + return _repos_collection + + +@pytest.fixture +def function_repos_collection_with_manifest( + request, target_sat, function_sca_manifest_org, function_lce +): + """This fixture and its usage is very similar to repos_collection fixture above with extra + setup_content and uploaded manifest capabilities using function_lce and + function_sca_manifest_org fixtures + """ + repos = getattr(request, 'param', []) + repo_distro, repos = _simplify_repos(request, repos) + _repos_collection = target_sat.cli_factory.RepositoryCollection( + distro=repo_distro, + repositories=[ + getattr(target_sat.cli_factory, repo_name)(**repo_params) + for repo in repos + for repo_name, repo_params in repo.items() + ], + ) + _repos_collection.setup_content(function_sca_manifest_org.id, function_lce.id) return _repos_collection diff --git a/pytest_fixtures/component/rh_cloud.py b/pytest_fixtures/component/rh_cloud.py index 8efc0044ae0..fd035f653c3 100644 --- a/pytest_fixtures/component/rh_cloud.py +++ b/pytest_fixtures/component/rh_cloud.py @@ -1,51 +1,42 @@ import pytest -from broker import Broker - -@pytest.fixture(scope='module') -def rhcloud_sat_host(satellite_factory): - """A module level fixture that provides a Satellite based on config settings""" - new_sat = satellite_factory() - yield new_sat - new_sat.teardown() - Broker(hosts=[new_sat]).checkin() +from robottelo.constants import CAPSULE_REGISTRATION_OPTS @pytest.fixture(scope='module') -def rhcloud_manifest_org(rhcloud_sat_host, module_sca_manifest): +def rhcloud_manifest_org(module_target_sat, module_extra_rhel_entitlement_manifest): """A module level fixture to get organization with manifest.""" - org = rhcloud_sat_host.api.Organization().create() - rhcloud_sat_host.upload_manifest(org.id, module_sca_manifest.content) + org = module_target_sat.api.Organization().create() + module_target_sat.upload_manifest(org.id, module_extra_rhel_entitlement_manifest.content) return org @pytest.fixture(scope='module') -def organization_ak_setup(rhcloud_sat_host, rhcloud_manifest_org): +def rhcloud_activation_key(module_target_sat, rhcloud_manifest_org): """A module-level fixture to create an Activation key in module_org""" purpose_addons = "test-addon1, test-addon2" - ak = rhcloud_sat_host.api.ActivationKey( + return module_target_sat.api.ActivationKey( content_view=rhcloud_manifest_org.default_content_view, organization=rhcloud_manifest_org, - environment=rhcloud_sat_host.api.LifecycleEnvironment(id=rhcloud_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment(id=rhcloud_manifest_org.library.id), purpose_addons=[purpose_addons], service_level='Self-Support', purpose_usage='test-usage', purpose_role='test-role', - auto_attach=True, + auto_attach=False, ).create() - yield rhcloud_manifest_org, ak - ak.delete() @pytest.fixture(scope='module') -def rhcloud_registered_hosts(organization_ak_setup, mod_content_hosts, rhcloud_sat_host): +def rhcloud_registered_hosts( + rhcloud_activation_key, rhcloud_manifest_org, mod_content_hosts, module_target_sat +): """Fixture that registers content hosts to Satellite and Insights.""" - org, ak = organization_ak_setup for vm in mod_content_hosts: vm.configure_rhai_client( - satellite=rhcloud_sat_host, - activation_key=ak.name, - org=org.label, + satellite=module_target_sat, + activation_key=rhcloud_activation_key.name, + org=rhcloud_manifest_org.label, rhel_distro=f"rhel{vm.os_version.major}", ) assert vm.subscribed @@ -53,35 +44,49 @@ def rhcloud_registered_hosts(organization_ak_setup, mod_content_hosts, rhcloud_s @pytest.fixture -def rhel_insights_vm(rhcloud_sat_host, organization_ak_setup, rhel_contenthost): - """A module-level fixture to create rhel content host registered with insights.""" - # settings.supportability.content_hosts.rhel.versions - org, ak = organization_ak_setup - rhel_contenthost.configure_rex(satellite=rhcloud_sat_host, org=org, register=False) +def rhel_insights_vm( + module_target_sat, rhcloud_activation_key, rhcloud_manifest_org, rhel_contenthost +): + """A function-level fixture to create rhel content host registered with insights.""" + rhel_contenthost.configure_rex( + satellite=module_target_sat, org=rhcloud_manifest_org, register=False + ) rhel_contenthost.configure_rhai_client( - satellite=rhcloud_sat_host, - activation_key=ak.name, - org=org.label, + satellite=module_target_sat, + activation_key=rhcloud_activation_key.name, + org=rhcloud_manifest_org.label, rhel_distro=f"rhel{rhel_contenthost.os_version.major}", ) - yield rhel_contenthost + # Generate report + module_target_sat.generate_inventory_report(rhcloud_manifest_org) + # Sync inventory status + module_target_sat.sync_inventory_status(rhcloud_manifest_org) + return rhel_contenthost @pytest.fixture -def inventory_settings(rhcloud_sat_host): - hostnames_setting = rhcloud_sat_host.update_setting('obfuscate_inventory_hostnames', False) - ip_setting = rhcloud_sat_host.update_setting('obfuscate_inventory_ips', False) - packages_setting = rhcloud_sat_host.update_setting('exclude_installed_packages', False) - parameter_tags_setting = rhcloud_sat_host.update_setting('include_parameter_tags', False) +def inventory_settings(module_target_sat): + hostnames_setting = module_target_sat.update_setting('obfuscate_inventory_hostnames', False) + ip_setting = module_target_sat.update_setting('obfuscate_inventory_ips', False) + packages_setting = module_target_sat.update_setting('exclude_installed_packages', False) + parameter_tags_setting = module_target_sat.update_setting('include_parameter_tags', False) yield - rhcloud_sat_host.update_setting('obfuscate_inventory_hostnames', hostnames_setting) - rhcloud_sat_host.update_setting('obfuscate_inventory_ips', ip_setting) - rhcloud_sat_host.update_setting('exclude_installed_packages', packages_setting) - rhcloud_sat_host.update_setting('include_parameter_tags', parameter_tags_setting) + module_target_sat.update_setting('obfuscate_inventory_hostnames', hostnames_setting) + module_target_sat.update_setting('obfuscate_inventory_ips', ip_setting) + module_target_sat.update_setting('exclude_installed_packages', packages_setting) + module_target_sat.update_setting('include_parameter_tags', parameter_tags_setting) -@pytest.fixture -def rhcloud_capsule(capsule_host, rhcloud_sat_host): +@pytest.fixture(scope='module') +def rhcloud_capsule(module_capsule_host, module_target_sat, rhcloud_manifest_org, default_location): """Configure the capsule instance with the satellite from settings.server.hostname""" - capsule_host.capsule_setup(sat_host=rhcloud_sat_host) - yield capsule_host + org = rhcloud_manifest_org + module_capsule_host.capsule_setup(sat_host=module_target_sat, **CAPSULE_REGISTRATION_OPTS) + module_target_sat.cli.Capsule.update( + { + 'name': module_capsule_host.hostname, + 'organization-ids': org.id, + 'location-ids': default_location.id, + } + ) + return module_capsule_host diff --git a/pytest_fixtures/component/satellite_auth.py b/pytest_fixtures/component/satellite_auth.py index 73522eb4c33..c6df8df1897 100644 --- a/pytest_fixtures/component/satellite_auth.py +++ b/pytest_fixtures/component/satellite_auth.py @@ -1,20 +1,21 @@ import copy import socket -import pytest from box import Box from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import AUDIENCE_MAPPER -from robottelo.constants import CERT_PATH -from robottelo.constants import GROUP_MEMBERSHIP_MAPPER -from robottelo.constants import HAMMER_CONFIG -from robottelo.constants import HAMMER_SESSIONS -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE -from robottelo.hosts import IPAHost -from robottelo.hosts import SSOHost +from robottelo.constants import ( + AUDIENCE_MAPPER, + CERT_PATH, + GROUP_MEMBERSHIP_MAPPER, + HAMMER_CONFIG, + HAMMER_SESSIONS, + LDAP_ATTR, + LDAP_SERVER_TYPE, +) +from robottelo.hosts import IPAHost, SSOHost from robottelo.utils.datafactory import gen_string from robottelo.utils.installer import InstallerCommand from robottelo.utils.issue_handlers import is_open @@ -34,7 +35,7 @@ def default_ipa_host(module_target_sat): return IPAHost(module_target_sat) -@pytest.fixture() +@pytest.fixture def ldap_cleanup(): """this is an extra step taken to clean any existing ldap source""" ldap_auth_sources = entities.AuthSourceLDAP().search() @@ -43,7 +44,7 @@ def ldap_cleanup(): for user in users: user.delete() ldap_auth.delete() - yield + return @pytest.fixture(scope='session') @@ -103,7 +104,7 @@ def open_ldap_data(): } -@pytest.fixture(scope='function') +@pytest.fixture def auth_source(ldap_cleanup, module_org, module_location, ad_data): ad_data = ad_data() return entities.AuthSourceLDAP( @@ -126,7 +127,7 @@ def auth_source(ldap_cleanup, module_org, module_location, ad_data): ).create() -@pytest.fixture(scope='function') +@pytest.fixture def auth_source_ipa(ldap_cleanup, default_ipa_host, module_org, module_location): return entities.AuthSourceLDAP( onthefly_register=True, @@ -148,7 +149,7 @@ def auth_source_ipa(ldap_cleanup, default_ipa_host, module_org, module_location) ).create() -@pytest.fixture(scope='function') +@pytest.fixture def auth_source_open_ldap(ldap_cleanup, module_org, module_location, open_ldap_data): return entities.AuthSourceLDAP( onthefly_register=True, @@ -258,7 +259,7 @@ def ldap_auth_source( else: ldap_data['server_type'] = LDAP_SERVER_TYPE['UI']['posix'] ldap_data['attr_login'] = LDAP_ATTR['login'] - yield ldap_data, auth_source + return ldap_data, auth_source @pytest.fixture @@ -270,11 +271,12 @@ def auth_data(request, ad_data, ipa_data): ad_data['attr_login'] = LDAP_ATTR['login_ad'] ad_data['auth_type'] = auth_type return ad_data - elif auth_type == 'ipa': + if auth_type == 'ipa': ipa_data['server_type'] = LDAP_SERVER_TYPE['UI']['ipa'] ipa_data['attr_login'] = LDAP_ATTR['login'] ipa_data['auth_type'] = auth_type return ipa_data + return None @pytest.fixture(scope='module') @@ -333,7 +335,6 @@ def enable_external_auth_rhsso( default_sso_host.set_the_redirect_uri() -@pytest.mark.external_auth @pytest.fixture(scope='module') def module_enroll_idm_and_configure_external_auth(module_target_sat): ipa_host = IPAHost(module_target_sat) @@ -342,7 +343,6 @@ def module_enroll_idm_and_configure_external_auth(module_target_sat): ipa_host.disenroll_idm() -@pytest.mark.external_auth @pytest.fixture def func_enroll_idm_and_configure_external_auth(target_sat): ipa_host = IPAHost(target_sat) @@ -374,9 +374,9 @@ def rhsso_setting_setup(module_target_sat): rhhso_settings = { 'authorize_login_delegation': True, 'authorize_login_delegation_auth_source_user_autocreate': 'External', - 'login_delegation_logout_url': f'https://{settings.server.hostname}/users/extlogout', + 'login_delegation_logout_url': f'https://{module_target_sat.hostname}/users/extlogout', 'oidc_algorithm': 'RS256', - 'oidc_audience': [f'{settings.server.hostname}-foreman-openidc'], + 'oidc_audience': [f'{module_target_sat.hostname}-foreman-openidc'], 'oidc_issuer': f'{settings.rhsso.host_url}/auth/realms/{settings.rhsso.realm}', 'oidc_jwks_url': f'{settings.rhsso.host_url}/auth/realms' f'/{settings.rhsso.realm}/protocol/openid-connect/certs', @@ -409,19 +409,16 @@ def rhsso_setting_setup_with_timeout(module_target_sat, rhsso_setting_setup): setting_entity.update({'value'}) -@pytest.mark.external_auth @pytest.fixture(scope='module') def module_enroll_ad_and_configure_external_auth(ad_data, module_target_sat): module_target_sat.enroll_ad_and_configure_external_auth(ad_data) -@pytest.mark.external_auth @pytest.fixture def func_enroll_ad_and_configure_external_auth(ad_data, target_sat): target_sat.enroll_ad_and_configure_external_auth(ad_data) -@pytest.mark.external_auth @pytest.fixture def configure_hammer_no_creds(parametrized_enrolled_sat): """Configures hammer to use sessions and negotiate auth.""" @@ -432,7 +429,6 @@ def configure_hammer_no_creds(parametrized_enrolled_sat): parametrized_enrolled_sat.execute(f'mv -f {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') -@pytest.mark.external_auth @pytest.fixture def configure_hammer_negotiate(parametrized_enrolled_sat, configure_hammer_no_creds): """Configures hammer to use sessions and negotiate auth.""" @@ -447,7 +443,6 @@ def configure_hammer_negotiate(parametrized_enrolled_sat, configure_hammer_no_cr parametrized_enrolled_sat.execute(f'mv -f {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') -@pytest.mark.external_auth @pytest.fixture def configure_hammer_no_negotiate(parametrized_enrolled_sat): """Configures hammer not to use automatic negotiation.""" @@ -457,8 +452,7 @@ def configure_hammer_no_negotiate(parametrized_enrolled_sat): parametrized_enrolled_sat.execute(f'mv -f {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') -@pytest.mark.external_auth -@pytest.fixture(scope='function') +@pytest.fixture def hammer_logout(parametrized_enrolled_sat): """Logout in Hammer.""" result = parametrized_enrolled_sat.cli.Auth.logout() diff --git a/pytest_fixtures/component/settings.py b/pytest_fixtures/component/settings.py index 8f5b340bbc7..b541e703733 100644 --- a/pytest_fixtures/component/settings.py +++ b/pytest_fixtures/component/settings.py @@ -1,19 +1,21 @@ # Settings Fixtures import pytest -from nailgun import entities -@pytest.fixture(scope="function") -def setting_update(request): +@pytest.fixture +def setting_update(request, target_sat): """ This fixture is used to create an object of the provided settings parameter that we use in each test case to update their attributes and once the test case gets completed it helps to restore their default value """ - setting_object = entities.Setting().search(query={'search': f'name={request.param}'})[0] - default_setting_value = setting_object.value - if default_setting_value is None: - default_setting_value = '' + key_val = request.param + setting, new_value = tuple(key_val.split('=')) if '=' in key_val else (key_val, None) + setting_object = target_sat.api.Setting().search(query={'search': f'name={setting}'})[0] + default_setting_value = '' if setting_object.value is None else setting_object.value + if new_value is not None: + setting_object.value = new_value + setting_object.update({'value'}) yield setting_object setting_object.value = default_setting_value setting_object.update({'value'}) diff --git a/pytest_fixtures/component/smartproxy.py b/pytest_fixtures/component/smartproxy.py index 3b711dfbeff..d879eff5d3a 100644 --- a/pytest_fixtures/component/smartproxy.py +++ b/pytest_fixtures/component/smartproxy.py @@ -13,11 +13,6 @@ def default_smart_proxy(session_target_sat): return session_target_sat.api.SmartProxy(id=smart_proxy.id).read() -@pytest.fixture(scope='session') -def import_puppet_classes(default_smart_proxy): - default_smart_proxy.import_puppetclasses(environment='production') - - @pytest.fixture(scope='module') def module_fake_proxy(request, module_target_sat): """Create a Proxy and register the cleanup function""" diff --git a/pytest_fixtures/component/subnet.py b/pytest_fixtures/component/subnet.py index e43f9952652..0e1ddb1b782 100644 --- a/pytest_fixtures/component/subnet.py +++ b/pytest_fixtures/component/subnet.py @@ -1,6 +1,6 @@ # Subnet Fixtures -import pytest from nailgun import entities +import pytest @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/taxonomy.py b/pytest_fixtures/component/taxonomy.py index 2276b3c3b88..e6ac87357cd 100644 --- a/pytest_fixtures/component/taxonomy.py +++ b/pytest_fixtures/component/taxonomy.py @@ -1,10 +1,9 @@ # Content Component fixtures -import pytest from manifester import Manifester +import pytest from robottelo.config import settings -from robottelo.constants import DEFAULT_LOC -from robottelo.constants import DEFAULT_ORG +from robottelo.constants import DEFAULT_LOC, DEFAULT_ORG from robottelo.utils.manifest import clone @@ -104,6 +103,13 @@ def module_sca_manifest_org(module_org, module_sca_manifest, module_target_sat): return module_org +@pytest.fixture(scope='class') +def class_sca_manifest_org(class_org, class_sca_manifest, class_target_sat): + """Creates an organization and uploads an SCA mode manifest generated with manifester""" + class_target_sat.upload_manifest(class_org.id, class_sca_manifest.content) + return class_org + + @pytest.fixture(scope='module') def module_extra_rhel_entitlement_manifest_org( module_target_sat, @@ -190,6 +196,16 @@ def module_extra_rhel_entitlement_manifest(): yield manifest +@pytest.fixture(scope='module') +def module_extra_rhel_sca_manifest(): + """Yields a manifest in sca mode with subscriptions determined by the + 'manifest_category.extra_rhel_entitlement` setting in conf/manifest.yaml.""" + with Manifester( + manifest_category=settings.manifest.extra_rhel_entitlement, simple_content_access="enabled" + ) as manifest: + yield manifest + + @pytest.fixture(scope='module') def module_sca_manifest(): """Yields a manifest in Simple Content Access mode with subscriptions determined by the @@ -198,7 +214,15 @@ def module_sca_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture(scope='class') +def class_sca_manifest(): + """Yields a manifest in Simple Content Access mode with subscriptions determined by the + `manifest_category.golden_ticket` setting in conf/manifest.yaml.""" + with Manifester(manifest_category=settings.manifest.golden_ticket) as manifest: + yield manifest + + +@pytest.fixture def function_entitlement_manifest(): """Yields a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml.""" @@ -206,7 +230,7 @@ def function_entitlement_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture def function_secondary_entitlement_manifest(): """Yields a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml. @@ -215,7 +239,7 @@ def function_secondary_entitlement_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture def function_sca_manifest(): """Yields a manifest in Simple Content Access mode with subscriptions determined by the `manifest_category.golden_ticket` setting in conf/manifest.yaml.""" @@ -231,7 +255,7 @@ def smart_proxy_location(module_org, module_target_sat, default_smart_proxy): return location -@pytest.fixture(scope='function') +@pytest.fixture def upgrade_entitlement_manifest(): """Returns a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml. used only for diff --git a/pytest_fixtures/component/templatesync.py b/pytest_fixtures/component/templatesync.py index 6b5881a95fc..e6c93096a8b 100644 --- a/pytest_fixtures/component/templatesync.py +++ b/pytest_fixtures/component/templatesync.py @@ -1,13 +1,13 @@ +from fauxfactory import gen_string import pytest import requests -from fauxfactory import gen_string from robottelo.config import settings from robottelo.constants import FOREMAN_TEMPLATE_ROOT_DIR from robottelo.logging import logger -@pytest.fixture() +@pytest.fixture def create_import_export_local_dir(target_sat): """Creates a local directory inside root_dir on satellite from where the templates will be imported from or exported to. @@ -24,7 +24,7 @@ def create_import_export_local_dir(target_sat): f'mkdir -p {dir_path} && ' f'chown foreman -R {root_dir} && ' f'restorecon -R -v {root_dir} && ' - f'chcon -t httpd_sys_rw_content_t {dir_path} -R' + f'chcon -t foreman_lib_t {dir_path} -R' ) if result.status != 0: logger.debug(result.stdout) @@ -66,7 +66,7 @@ def git_pub_key(session_target_sat, git_port): id = res.json()['id'] # add ssh key to known host session_target_sat.execute( - f'ssh-keyscan -t rsa -p {git.ssh_port} {git.hostname} > {key_path}/known_hosts' + f'ssh-keyscan -4 -t rsa -p {git.ssh_port} {git.hostname} > {key_path}/known_hosts' ) yield res = requests.delete( @@ -76,7 +76,7 @@ def git_pub_key(session_target_sat, git_port): res.raise_for_status() -@pytest.fixture(scope='function') +@pytest.fixture def git_repository(git_port, git_pub_key, request): """Creates a new repository on git provider for exporting templates. @@ -96,7 +96,7 @@ def git_repository(git_port, git_pub_key, request): res.raise_for_status() -@pytest.fixture() +@pytest.fixture def git_branch(git_repository): """Creates a new branch in the git repository for exporting templates. diff --git a/pytest_fixtures/component/user.py b/pytest_fixtures/component/user.py index fea405520e4..a1b9b7a1a9f 100644 --- a/pytest_fixtures/component/user.py +++ b/pytest_fixtures/component/user.py @@ -1,5 +1,5 @@ -import pytest from nailgun import entities +import pytest @pytest.fixture @@ -9,5 +9,4 @@ def user_not_exists(request): if users: users[0].delete() return True - else: - return False + return False diff --git a/pytest_fixtures/component/user_role.py b/pytest_fixtures/component/user_role.py index 8e4f3f00bea..92b742e6966 100644 --- a/pytest_fixtures/component/user_role.py +++ b/pytest_fixtures/component/user_role.py @@ -1,7 +1,6 @@ -import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string +from fauxfactory import gen_alphanumeric, gen_string from nailgun import entities +import pytest @pytest.fixture(scope='class') diff --git a/pytest_fixtures/component/virtwho_config.py b/pytest_fixtures/component/virtwho_config.py new file mode 100644 index 00000000000..b8970232136 --- /dev/null +++ b/pytest_fixtures/component/virtwho_config.py @@ -0,0 +1,356 @@ +import pytest + +from robottelo.config import settings +from robottelo.utils.datafactory import gen_string +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_command_option, + get_guest_info, +) + +LOGGEDOUT = 'Logged out.' + + +@pytest.fixture +def org_module(request, default_org, module_sca_manifest_org): + if 'sca' in request.module.__name__.split('.')[-1]: + org_module = module_sca_manifest_org + else: + org_module = default_org + return org_module + + +@pytest.fixture +def org_session(request, session, session_sca): + if 'sca' in request.module.__name__.split('.')[-1]: + org_session = session_sca + else: + org_session = session + return org_session + + +@pytest.fixture +def form_data_cli(request, target_sat, org_module): + hypervisor_type = request.module.__name__.split('.')[-1].split('_', 1)[-1] + if 'esx' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor-id': 'hostname', + 'hypervisor-type': settings.virtwho.esx.hypervisor_type, + 'hypervisor-server': settings.virtwho.esx.hypervisor_server, + 'organization-id': org_module.id, + 'filtering-mode': 'none', + 'satellite-url': target_sat.hostname, + 'hypervisor-username': settings.virtwho.esx.hypervisor_username, + 'hypervisor-password': settings.virtwho.esx.hypervisor_password, + } + elif 'hyperv' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor-id': 'hostname', + 'hypervisor-type': settings.virtwho.hyperv.hypervisor_type, + 'hypervisor-server': settings.virtwho.hyperv.hypervisor_server, + 'organization-id': org_module.id, + 'filtering-mode': 'none', + 'satellite-url': target_sat.hostname, + 'hypervisor-username': settings.virtwho.hyperv.hypervisor_username, + 'hypervisor-password': settings.virtwho.hyperv.hypervisor_password, + } + elif 'kubevirt' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor-id': 'hostname', + 'hypervisor-type': settings.virtwho.kubevirt.hypervisor_type, + 'organization-id': org_module.id, + 'filtering-mode': 'none', + 'satellite-url': target_sat.hostname, + 'kubeconfig-path': settings.virtwho.kubevirt.hypervisor_config_file, + } + elif 'libvirt' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor-id': 'hostname', + 'hypervisor-type': settings.virtwho.libvirt.hypervisor_type, + 'hypervisor-server': settings.virtwho.libvirt.hypervisor_server, + 'organization-id': org_module.id, + 'filtering-mode': 'none', + 'satellite-url': target_sat.hostname, + 'hypervisor-username': settings.virtwho.libvirt.hypervisor_username, + } + elif 'nutanix' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor-id': 'hostname', + 'hypervisor-type': settings.virtwho.ahv.hypervisor_type, + 'hypervisor-server': settings.virtwho.ahv.hypervisor_server, + 'organization-id': org_module.id, + 'filtering-mode': 'none', + 'satellite-url': target_sat.hostname, + 'hypervisor-username': settings.virtwho.ahv.hypervisor_username, + 'hypervisor-password': settings.virtwho.ahv.hypervisor_password, + 'prism-flavor': settings.virtwho.ahv.prism_flavor, + 'ahv-internal-debug': 'false', + } + return form + + +@pytest.fixture +def form_data_api(request, target_sat, org_module): + hypervisor_type = request.module.__name__.split('.')[-1].split('_', 1)[-1] + if 'esx' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.esx.hypervisor_type, + 'hypervisor_server': settings.virtwho.esx.hypervisor_server, + 'organization_id': org_module.id, + 'filtering_mode': 'none', + 'satellite_url': target_sat.hostname, + 'hypervisor_username': settings.virtwho.esx.hypervisor_username, + 'hypervisor_password': settings.virtwho.esx.hypervisor_password, + } + elif 'hyperv' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, + 'hypervisor_server': settings.virtwho.hyperv.hypervisor_server, + 'organization_id': org_module.id, + 'filtering_mode': 'none', + 'satellite_url': target_sat.hostname, + 'hypervisor_username': settings.virtwho.hyperv.hypervisor_username, + 'hypervisor_password': settings.virtwho.hyperv.hypervisor_password, + } + elif 'kubevirt' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, + 'organization_id': org_module.id, + 'filtering_mode': 'none', + 'satellite_url': target_sat.hostname, + 'kubeconfig_path': settings.virtwho.kubevirt.hypervisor_config_file, + } + elif 'libvirt' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, + 'hypervisor_server': settings.virtwho.libvirt.hypervisor_server, + 'organization_id': org_module.id, + 'filtering_mode': 'none', + 'satellite_url': target_sat.hostname, + 'hypervisor_username': settings.virtwho.libvirt.hypervisor_username, + } + elif 'nutanix' in hypervisor_type: + form = { + 'name': gen_string('alpha'), + 'debug': 1, + 'interval': '60', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, + 'hypervisor_server': settings.virtwho.ahv.hypervisor_server, + 'organization_id': org_module.id, + 'filtering_mode': 'none', + 'satellite_url': target_sat.hostname, + 'hypervisor_username': settings.virtwho.ahv.hypervisor_username, + 'hypervisor_password': settings.virtwho.ahv.hypervisor_password, + 'prism_flavor': settings.virtwho.ahv.prism_flavor, + 'ahv_internal_debug': 'false', + } + return form + + +@pytest.fixture +def form_data_ui(request, target_sat, org_module): + hypervisor_type = request.module.__name__.split('.')[-1].split('_', 1)[-1] + if 'esx' in hypervisor_type: + form = { + 'debug': True, + 'interval': 'Every hour', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.esx.hypervisor_type, + 'hypervisor_content.server': settings.virtwho.esx.hypervisor_server, + 'hypervisor_content.username': settings.virtwho.esx.hypervisor_username, + 'hypervisor_content.password': settings.virtwho.esx.hypervisor_password, + } + elif 'hyperv' in hypervisor_type: + form = { + 'debug': True, + 'interval': 'Every hour', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, + 'hypervisor_content.server': settings.virtwho.hyperv.hypervisor_server, + 'hypervisor_content.username': settings.virtwho.hyperv.hypervisor_username, + 'hypervisor_content.password': settings.virtwho.hyperv.hypervisor_password, + } + elif 'kubevirt' in hypervisor_type: + form = { + 'debug': True, + 'interval': 'Every hour', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, + 'hypervisor_content.kubeconfig': settings.virtwho.kubevirt.hypervisor_config_file, + } + elif 'libvirt' in hypervisor_type: + form = { + 'debug': True, + 'interval': 'Every hour', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, + 'hypervisor_content.server': settings.virtwho.libvirt.hypervisor_server, + 'hypervisor_content.username': settings.virtwho.libvirt.hypervisor_username, + } + elif 'nutanix' in hypervisor_type: + form = { + 'debug': True, + 'interval': 'Every hour', + 'hypervisor_id': 'hostname', + 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, + 'hypervisor_content.server': settings.virtwho.ahv.hypervisor_server, + 'hypervisor_content.username': settings.virtwho.ahv.hypervisor_username, + 'hypervisor_content.password': settings.virtwho.ahv.hypervisor_password, + 'hypervisor_content.prism_flavor': "Prism Element", + 'ahv_internal_debug': False, + } + return form + + +@pytest.fixture +def virtwho_config_cli(form_data_cli, target_sat): + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] + yield virtwho_config_cli + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) + + +@pytest.fixture +def virtwho_config_api(form_data_api, target_sat): + virtwho_config_api = target_sat.api.VirtWhoConfig(**form_data_api).create() + yield virtwho_config_api + virtwho_config_api.delete() + assert not target_sat.api.VirtWhoConfig().search( + query={'search': f"name={form_data_api['name']}"} + ) + + +@pytest.fixture +def virtwho_config_ui(form_data_ui, target_sat, org_session): + name = gen_string('alpha') + form_data_ui['name'] = name + with org_session: + org_session.virtwho_configure.create(form_data_ui) + yield virtwho_config_ui + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) + + +@pytest.fixture +def deploy_type_cli( + request, + org_module, + form_data_cli, + virtwho_config_cli, + target_sat, + default_location, +): + deploy_type = request.param.lower() + assert virtwho_config_cli['status'] == 'No Report Yet' + if 'script' in deploy_type: + script = target_sat.cli.VirtWhoConfig.fetch( + {'id': virtwho_config_cli['id']}, output_format='base' + ) + hypervisor_name, guest_name = deploy_configure_by_script( + script, form_data_cli['hypervisor-type'], debug=True, org=org_module.label + ) + elif deploy_type == 'organization-title': + virtwho_config_cli['organization-title'] = org_module.title + elif deploy_type == 'location-id': + virtwho_config_cli['location-id'] = default_location.id + if deploy_type in ['id', 'name', 'organization-title', 'location-id']: + if 'id' in deploy_type: + command = get_configure_command(virtwho_config_cli['id'], org_module.name) + else: + command = get_configure_command_option(deploy_type, virtwho_config_cli, org_module.name) + hypervisor_name, guest_name = deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], debug=True, org=org_module.label + ) + return hypervisor_name, guest_name + + +@pytest.fixture +def deploy_type_api( + request, + org_module, + form_data_api, + virtwho_config_api, + target_sat, +): + deploy_type = request.param.lower() + assert virtwho_config_api.status == 'unknown' + if "id" in deploy_type: + command = get_configure_command(virtwho_config_api.id, org_module.name) + hypervisor_name, guest_name = deploy_configure_by_command( + command, form_data_api['hypervisor_type'], debug=True, org=org_module.label + ) + elif "script" in deploy_type: + script = virtwho_config_api.deploy_script() + hypervisor_name, guest_name = deploy_configure_by_script( + script['virt_who_config_script'], + form_data_api['hypervisor_type'], + debug=True, + org=org_module.label, + ) + return hypervisor_name, guest_name + + +@pytest.fixture +def deploy_type_ui( + request, + org_module, + form_data_ui, + org_session, + virtwho_config_ui, + target_sat, +): + deploy_type = request.param.lower() + values = org_session.virtwho_configure.read(form_data_ui['name']) + if "id" in deploy_type: + command = values['deploy']['command'] + hypervisor_name, guest_name = deploy_configure_by_command( + command, form_data_ui['hypervisor_type'], debug=True, org=org_module.label + ) + elif "script" in deploy_type: + script = values['deploy']['script'] + hypervisor_name, guest_name = deploy_configure_by_script( + script, form_data_ui['hypervisor_type'], debug=True, org=org_module.label + ) + return hypervisor_name, guest_name + + +@pytest.fixture +def delete_host(form_data_api, target_sat): + guest_name, _ = get_guest_info(form_data_api['hypervisor_type']) + results = target_sat.api.Host().search(query={'search': guest_name}) + if results: + target_sat.api.Host(id=results[0].read_json()['id']).delete() diff --git a/pytest_fixtures/core/broker.py b/pytest_fixtures/core/broker.py index 0fb863c23cb..f3356dbbef5 100644 --- a/pytest_fixtures/core/broker.py +++ b/pytest_fixtures/core/broker.py @@ -1,36 +1,22 @@ from contextlib import contextmanager -import pytest from box import Box from broker import Broker -from wait_for import wait_for +import pytest from robottelo.config import settings -from robottelo.hosts import Capsule -from robottelo.hosts import Satellite -from robottelo.logging import logger - - -def _resolve_deploy_args(args_dict): - # TODO: https://github.com/rochacbruno/dynaconf/issues/690 - for key, val in args_dict.copy().to_dict().items(): - if isinstance(val, str) and val.startswith('this.'): - # Args transformed into small letters and existing capital args removed - args_dict[key.lower()] = settings.get(args_dict.pop(key).replace('this.', '')) - return args_dict +from robottelo.hosts import ContentHostError, Satellite, lru_sat_ready_rhel @pytest.fixture(scope='session') def _default_sat(align_to_satellite): """Returns a Satellite object for settings.server.hostname""" if settings.server.hostname: - hosts = Broker(host_class=Satellite).from_inventory( - filter=f'@inv.hostname == "{settings.server.hostname}"' - ) - if hosts: - return hosts[0] - else: + try: + return Satellite.get_host_by_hostname(settings.server.hostname) + except ContentHostError: return Satellite() + return None @contextmanager @@ -41,6 +27,10 @@ def _target_sat_imp(request, _default_sat, satellite_factory): yield new_sat new_sat.teardown() Broker(hosts=[new_sat]).checkin() + elif 'sanity' in request.config.option.markexpr: + installer_sat = lru_sat_ready_rhel(settings.server.version.rhel_version) + settings.set('server.hostname', installer_sat.hostname) + yield installer_sat else: yield _default_sat @@ -69,54 +59,6 @@ def class_target_sat(request, _default_sat, satellite_factory): yield sat -@pytest.fixture(scope='session') -def satellite_factory(): - if settings.server.get('deploy_arguments'): - logger.debug(f'Original deploy arguments for sat: {settings.server.deploy_arguments}') - resolved = _resolve_deploy_args(settings.server.deploy_arguments) - settings.set('server.deploy_arguments', resolved) - logger.debug(f'Resolved deploy arguments for sat: {settings.server.deploy_arguments}') - - def factory(retry_limit=3, delay=300, workflow=None, **broker_args): - if settings.server.deploy_arguments: - broker_args.update(settings.server.deploy_arguments) - logger.debug(f'Updated broker args for sat: {broker_args}') - - vmb = Broker( - host_class=Satellite, - workflow=workflow or settings.server.deploy_workflow, - **broker_args, - ) - timeout = (1200 + delay) * retry_limit - sat = wait_for(vmb.checkout, timeout=timeout, delay=delay, fail_condition=[]) - return sat.out - - return factory - - -@pytest.fixture(scope='session') -def capsule_factory(): - if settings.capsule.get('deploy_arguments'): - logger.debug(f'Original deploy arguments for cap: {settings.capsule.deploy_arguments}') - resolved = _resolve_deploy_args(settings.capsule.deploy_arguments) - settings.set('capsule.deploy_arguments', resolved) - logger.debug(f'Resolved deploy arguments for cap: {settings.capsule.deploy_arguments}') - - def factory(retry_limit=3, delay=300, workflow=None, **broker_args): - if settings.capsule.deploy_arguments: - broker_args.update(settings.capsule.deploy_arguments) - vmb = Broker( - host_class=Capsule, - workflow=workflow or settings.capsule.deploy_workflow, - **broker_args, - ) - timeout = (1200 + delay) * retry_limit - cap = wait_for(vmb.checkout, timeout=timeout, delay=delay, fail_condition=[]) - return cap.out - - return factory - - @pytest.fixture(scope='module') def module_discovery_sat( module_provisioning_sat, diff --git a/pytest_fixtures/core/contenthosts.py b/pytest_fixtures/core/contenthosts.py index f743601852c..d2bbb4cce39 100644 --- a/pytest_fixtures/core/contenthosts.py +++ b/pytest_fixtures/core/contenthosts.py @@ -4,14 +4,12 @@ The functions in this module are read in the pytest_plugins/fixture_markers.py module All functions in this module will be treated as fixtures that apply the contenthost mark """ -import pytest from broker import Broker +import pytest from robottelo import constants from robottelo.config import settings -from robottelo.hosts import Capsule -from robottelo.hosts import ContentHost -from robottelo.hosts import Satellite +from robottelo.hosts import ContentHost, Satellite def host_conf(request): @@ -47,6 +45,15 @@ def rhel_contenthost(request): yield host +@pytest.fixture(scope='module') +def module_rhel_contenthost(request): + """A module-level fixture that provides a content host object parametrized""" + # Request should be parametrized through pytest_fixtures.fixture_markers + # unpack params dict + with Broker(**host_conf(request), host_class=ContentHost) as host: + yield host + + @pytest.fixture(params=[{'rhel_version': '7'}]) def rhel7_contenthost(request): """A function-level fixture that provides a rhel7 content host object""" @@ -89,7 +96,14 @@ def rhel6_contenthost(request): yield host -@pytest.fixture() +@pytest.fixture(params=[{'rhel_version': '9'}]) +def rhel9_contenthost(request): + """A fixture that provides a rhel9 content host object""" + with Broker(**host_conf(request), host_class=ContentHost) as host: + yield host + + +@pytest.fixture def content_hosts(request): """A function-level fixture that provides two rhel content hosts object""" with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: @@ -105,15 +119,13 @@ def mod_content_hosts(request): yield hosts -@pytest.fixture() -def registered_hosts(request, target_sat, module_org): +@pytest.fixture +def registered_hosts(request, target_sat, module_org, module_ak_with_cv): """Fixture that registers content hosts to Satellite, based on rh_cloud setup""" with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: for vm in hosts: repo = settings.repos['SATCLIENT_REPO'][f'RHEL{vm.os_version.major}'] - target_sat.register_host_custom_repo(module_org, vm, [repo]) - vm.install_katello_host_tools() - vm.add_rex_key(target_sat) + vm.register(module_org, None, module_ak_with_cv.name, target_sat, repo=repo) yield hosts @@ -121,9 +133,17 @@ def registered_hosts(request, target_sat, module_org): def katello_host_tools_host(target_sat, module_org, rhel_contenthost): """Register content host to Satellite and install katello-host-tools on the host.""" repo = settings.repos['SATCLIENT_REPO'][f'RHEL{rhel_contenthost.os_version.major}'] - target_sat.register_host_custom_repo(module_org, rhel_contenthost, [repo]) + ak = target_sat.api.ActivationKey( + content_view=module_org.default_content_view, + max_hosts=100, + organization=module_org, + environment=target_sat.api.LifecycleEnvironment(id=module_org.library.id), + auto_attach=True, + ).create() + + rhel_contenthost.register(module_org, None, ak.name, target_sat, repo=repo) rhel_contenthost.install_katello_host_tools() - yield rhel_contenthost + return rhel_contenthost @pytest.fixture @@ -138,21 +158,28 @@ def cockpit_host(class_target_sat, class_org, rhel_contenthost): rhel_contenthost.execute(f"hostnamectl set-hostname {rhel_contenthost.hostname} --static") rhel_contenthost.install_cockpit() rhel_contenthost.add_rex_key(satellite=class_target_sat) - yield rhel_contenthost + return rhel_contenthost @pytest.fixture -def rex_contenthost(request, module_org, target_sat): +def rex_contenthost(request, module_org, target_sat, module_ak_with_cv): request.param['no_containers'] = True with Broker(**host_conf(request), host_class=ContentHost) as host: - # Register content host to Satellite repo = settings.repos['SATCLIENT_REPO'][f'RHEL{host.os_version.major}'] - target_sat.register_host_custom_repo(module_org, host, [repo]) - # Enable remote execution on the host - host.add_rex_key(satellite=target_sat) + host.register(module_org, None, module_ak_with_cv.name, target_sat, repo=repo) yield host +@pytest.fixture +def rex_contenthosts(request, module_org, target_sat, module_ak_with_cv): + request.param['no_containers'] = True + with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: + for host in hosts: + repo = settings.repos['SATCLIENT_REPO'][f'RHEL{host.os_version.major}'] + host.register(module_org, None, module_ak_with_cv.name, target_sat, repo=repo) + yield hosts + + @pytest.fixture def katello_host_tools_tracer_host(rex_contenthost, target_sat): """Install katello-host-tools-tracer, create custom @@ -166,7 +193,7 @@ def katello_host_tools_tracer_host(rex_contenthost, target_sat): **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} ) rex_contenthost.install_tracer() - yield rex_contenthost + return rex_contenthost @pytest.fixture @@ -213,33 +240,6 @@ def oracle_host(request, version): yield host -@pytest.fixture -def sat_ready_rhel(request): - deploy_args = { - 'deploy_rhel_version': request.param, - 'deploy_flavor': 'satqe-ssd.standard.std', - 'promtail_config_template_file': 'config_sat.j2', - 'workflow': 'deploy-rhel', - } - # if 'deploy_rhel_version' is not set, let's default to RHEL 8 - deploy_args['deploy_rhel_version'] = deploy_args.get('deploy_rhel_version', '8') - deploy_args['workflow'] = 'deploy-rhel' - with Broker(**deploy_args, host_class=Satellite) as host: - yield host - - -@pytest.fixture -def cap_ready_rhel(): - rhel8 = settings.content_host.rhel8.vm - deploy_args = { - 'deploy_rhel_version': rhel8.deploy_rhel_version, - 'deploy_flavor': 'satqe-ssd.standard.std', - 'workflow': rhel8.workflow, - } - with Broker(**deploy_args, host_class=Capsule) as host: - yield host - - @pytest.fixture(scope='module', params=[{'rhel_version': 8, 'no_containers': True}]) def external_puppet_server(request): deploy_args = host_conf(request) @@ -287,8 +287,10 @@ def sat_upgrade_chost(): def custom_host(request): """A rhel content host that passes custom host config through request.param""" deploy_args = request.param - # if 'deploy_rhel_version' is not set, let's default to RHEL 8 - deploy_args['deploy_rhel_version'] = deploy_args.get('deploy_rhel_version', '8') + # if 'deploy_rhel_version' is not set, let's default to what's in content_host.yaml + deploy_args['deploy_rhel_version'] = deploy_args.get( + 'deploy_rhel_version', settings.content_host.default_rhel_version + ) deploy_args['workflow'] = 'deploy-rhel' with Broker(**deploy_args, host_class=Satellite) as host: yield host diff --git a/pytest_fixtures/core/reporting.py b/pytest_fixtures/core/reporting.py index 24f1124f4c6..c89c87d46da 100644 --- a/pytest_fixtures/core/reporting.py +++ b/pytest_fixtures/core/reporting.py @@ -1,12 +1,10 @@ import datetime -import pytest from _pytest.junitxml import xml_key +import pytest from xdist import get_xdist_worker_id -from robottelo.config import setting_is_set -from robottelo.config import settings - +from robottelo.config import setting_is_set, settings FMT_XUNIT_TIME = '%Y-%m-%dT%H:%M:%S' @@ -37,13 +35,14 @@ def pytest_sessionstart(session): remove if resolved and set autouse=True for record_testsuite_timestamp_xml fixture """ - if get_xdist_worker_id(session) == 'master': - if session.config.pluginmanager.hasplugin('junitxml'): - xml = session.config._store.get(xml_key, None) - if xml: - xml.add_global_property( - 'start_time', datetime.datetime.utcnow().strftime(FMT_XUNIT_TIME) - ) + if get_xdist_worker_id(session) == 'master' and session.config.pluginmanager.hasplugin( + 'junitxml' + ): + xml = session.config._store.get(xml_key, None) + if xml: + xml.add_global_property( + 'start_time', datetime.datetime.utcnow().strftime(FMT_XUNIT_TIME) + ) @pytest.fixture(autouse=False, scope='session') diff --git a/pytest_fixtures/core/sat_cap_factory.py b/pytest_fixtures/core/sat_cap_factory.py index d8a6891d06b..bc4c16b852f 100644 --- a/pytest_fixtures/core/sat_cap_factory.py +++ b/pytest_fixtures/core/sat_cap_factory.py @@ -1,90 +1,216 @@ -import pytest +from contextlib import contextmanager +from functools import lru_cache + from broker import Broker +from packaging.version import Version +import pytest from wait_for import wait_for -from pytest_fixtures.core.broker import _resolve_deploy_args -from robottelo.config import settings -from robottelo.hosts import Capsule -from robottelo.hosts import IPAHost +from robottelo.config import configure_airgun, configure_nailgun, settings +from robottelo.hosts import ( + Capsule, + IPAHost, + Satellite, + get_sat_rhel_version, + lru_sat_ready_rhel, +) +from robottelo.logging import logger +from robottelo.utils.installer import InstallerCommand + + +def resolve_deploy_args(args_dict): + # TODO: https://github.com/rochacbruno/dynaconf/issues/690 + for key, val in args_dict.copy().to_dict().items(): + if isinstance(val, str) and val.startswith('this.'): + # Args transformed into small letters and existing capital args removed + args_dict[key.lower()] = settings.get(args_dict.pop(key).replace('this.', '')) + return args_dict + + +@contextmanager +def _target_satellite_host(request, satellite_factory): + if 'sanity' not in request.config.option.markexpr: + new_sat = satellite_factory() + yield new_sat + new_sat.teardown() + Broker(hosts=[new_sat]).checkin() + else: + yield + + +@lru_cache +def cached_capsule_cdn_register(hostname=None): + cap = Capsule.get_host_by_hostname(hostname=hostname) + cap.enable_capsule_downstream_repos() + + +@contextmanager +def _target_capsule_host(request, capsule_factory): + if 'sanity' not in request.config.option.markexpr and not request.config.option.n_minus: + new_cap = capsule_factory() + yield new_cap + new_cap.teardown() + Broker(hosts=[new_cap]).checkin() + elif request.config.option.n_minus: + if not settings.capsule.hostname: + hosts = Capsule.get_hosts_from_inventory(filter="'cap' in @inv.name") + settings.capsule.hostname = hosts[0].hostname + cap = hosts[0] + else: + cap = Capsule.get_host_by_hostname(settings.capsule.hostname) + # Capsule needs RHEL contents for some tests + cached_capsule_cdn_register(hostname=settings.capsule.hostname) + yield cap + else: + yield + + +@pytest.fixture(scope='session') +def satellite_factory(): + if settings.server.get('deploy_arguments'): + logger.debug(f'Original deploy arguments for sat: {settings.server.deploy_arguments}') + resolved = resolve_deploy_args(settings.server.deploy_arguments) + settings.set('server.deploy_arguments', resolved) + logger.debug(f'Resolved deploy arguments for sat: {settings.server.deploy_arguments}') + + def factory(retry_limit=3, delay=300, workflow=None, **broker_args): + if settings.server.deploy_arguments: + broker_args.update(settings.server.deploy_arguments) + logger.debug(f'Updated broker args for sat: {broker_args}') + + vmb = Broker( + host_class=Satellite, + workflow=workflow or settings.server.deploy_workflows.product, + **broker_args, + ) + timeout = (1200 + delay) * retry_limit + sat = wait_for(vmb.checkout, timeout=timeout, delay=delay, fail_condition=[]) + return sat.out + + return factory @pytest.fixture -def satellite_host(satellite_factory): +def large_capsule_host(capsule_factory): + """A fixture that provides a Capsule based on config settings""" + new_cap = capsule_factory(deploy_flavor=settings.flavors.custom_db) + yield new_cap + new_cap.teardown() + Broker(hosts=[new_cap]).checkin() + + +@pytest.fixture(scope='session') +def capsule_factory(): + if settings.capsule.get('deploy_arguments'): + logger.debug(f'Original deploy arguments for cap: {settings.capsule.deploy_arguments}') + resolved = resolve_deploy_args(settings.capsule.deploy_arguments) + settings.set('capsule.deploy_arguments', resolved) + logger.debug(f'Resolved deploy arguments for cap: {settings.capsule.deploy_arguments}') + + def factory(retry_limit=3, delay=300, workflow=None, **broker_args): + if settings.capsule.deploy_arguments: + broker_args.update(settings.capsule.deploy_arguments) + vmb = Broker( + host_class=Capsule, + workflow=workflow or settings.capsule.deploy_workflows.product, + **broker_args, + ) + timeout = (1200 + delay) * retry_limit + cap = wait_for(vmb.checkout, timeout=timeout, delay=delay, fail_condition=[]) + return cap.out + + return factory + + +@pytest.fixture +def satellite_host(request, satellite_factory): """A fixture that provides a Satellite based on config settings""" - new_sat = satellite_factory() - yield new_sat - new_sat.teardown() - Broker(hosts=[new_sat]).checkin() + with _target_satellite_host(request, satellite_factory) as sat: + yield sat @pytest.fixture(scope='module') -def module_satellite_host(satellite_factory): +def module_satellite_host(request, satellite_factory): """A fixture that provides a Satellite based on config settings""" - new_sat = satellite_factory() - yield new_sat - new_sat.teardown() - Broker(hosts=[new_sat]).checkin() + with _target_satellite_host(request, satellite_factory) as sat: + yield sat @pytest.fixture(scope='session') -def session_satellite_host(satellite_factory): +def session_satellite_host(request, satellite_factory): """A fixture that provides a Satellite based on config settings""" - new_sat = satellite_factory() - yield new_sat - new_sat.teardown() - Broker(hosts=[new_sat]).checkin() + with _target_satellite_host(request, satellite_factory) as sat: + yield sat + + +@pytest.fixture(scope='module') +def module_satellite_mqtt(module_target_sat): + """Configure satellite with MQTT broker enabled""" + module_target_sat.set_rex_script_mode_provider('pull-mqtt') + # lower the mqtt_resend_interval interval + module_target_sat.set_mqtt_resend_interval('30') + result = module_target_sat.execute('systemctl status mosquitto') + assert result.status == 0, 'MQTT broker is not running' + result = module_target_sat.execute('firewall-cmd --permanent --add-port="1883/tcp"') + assert result.status == 0, 'Failed to open mqtt port on capsule' + module_target_sat.execute('firewall-cmd --reload') + return module_target_sat @pytest.fixture -def capsule_host(capsule_factory): +def capsule_host(request, capsule_factory): """A fixture that provides a Capsule based on config settings""" - new_cap = capsule_factory() - yield new_cap - new_cap.teardown() - Broker(hosts=[new_cap]).checkin() + with _target_capsule_host(request, capsule_factory) as cap: + yield cap @pytest.fixture(scope='module') -def module_capsule_host(capsule_factory): +def module_capsule_host(request, capsule_factory): """A fixture that provides a Capsule based on config settings""" - new_cap = capsule_factory() - yield new_cap - new_cap.teardown() - Broker(hosts=[new_cap]).checkin() + with _target_capsule_host(request, capsule_factory) as cap: + yield cap @pytest.fixture(scope='session') -def session_capsule_host(capsule_factory): +def session_capsule_host(request, capsule_factory): """A fixture that provides a Capsule based on config settings""" - new_cap = capsule_factory() - yield new_cap - new_cap.teardown() - Broker(hosts=[new_cap]).checkin() + with _target_capsule_host(request, capsule_factory) as cap: + yield cap @pytest.fixture -def capsule_configured(capsule_host, target_sat): +def capsule_configured(request, capsule_host, target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" - capsule_host.capsule_setup(sat_host=target_sat) - yield capsule_host + if not request.config.option.n_minus: + capsule_host.capsule_setup(sat_host=target_sat) + return capsule_host + + +@pytest.fixture +def large_capsule_configured(large_capsule_host, target_sat): + """Configure the capsule instance with the satellite from settings.server.hostname""" + large_capsule_host.capsule_setup(sat_host=target_sat) + return large_capsule_host @pytest.fixture(scope='module') -def module_capsule_configured(module_capsule_host, module_target_sat): +def module_capsule_configured(request, module_capsule_host, module_target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" - module_capsule_host.capsule_setup(sat_host=module_target_sat) - yield module_capsule_host + if not request.config.option.n_minus: + module_capsule_host.capsule_setup(sat_host=module_target_sat) + return module_capsule_host @pytest.fixture(scope='session') -def session_capsule_configured(session_capsule_host, session_target_sat): +def session_capsule_configured(request, session_capsule_host, session_target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" - session_capsule_host.capsule_setup(sat_host=session_target_sat) - yield session_capsule_host + if not request.config.option.n_minus: + session_capsule_host.capsule_setup(sat_host=session_target_sat) + return session_capsule_host @pytest.fixture(scope='module') -def module_capsule_configured_mqtt(module_capsule_configured): +def module_capsule_configured_mqtt(request, module_capsule_configured): """Configure the capsule instance with the satellite from settings.server.hostname, enable MQTT broker""" module_capsule_configured.set_rex_script_mode_provider('pull-mqtt') @@ -96,6 +222,8 @@ def module_capsule_configured_mqtt(module_capsule_configured): assert result.status == 0, 'Failed to open mqtt port on capsule' module_capsule_configured.execute('firewall-cmd --reload') yield module_capsule_configured + if request.config.option.n_minus: + raise TypeError('The teardown is missed for MQTT configuration undo for nminus testing') @pytest.fixture(scope='module') @@ -104,13 +232,13 @@ def module_lb_capsule(retry_limit=3, delay=300, **broker_args): :return: List of capsules """ if settings.capsule.get('deploy_arguments'): - resolved = _resolve_deploy_args(settings.capsule.deploy_arguments) + resolved = resolve_deploy_args(settings.capsule.deploy_arguments) settings.set('capsule.deploy_arguments', resolved) broker_args.update(settings.capsule.deploy_arguments) timeout = (1200 + delay) * retry_limit hosts = Broker( host_class=Capsule, - workflow=settings.capsule.deploy_workflow, + workflow=settings.capsule.deploy_workflows.product, _count=2, **broker_args, ) @@ -127,16 +255,7 @@ def module_capsule_configured_async_ssh(module_capsule_configured): """Configure the capsule instance with the satellite from settings.server.hostname, enable MQTT broker""" module_capsule_configured.set_rex_script_mode_provider('ssh-async') - yield module_capsule_configured - - -@pytest.fixture(scope='module') -def rhcloud_sat_host(satellite_factory): - """A module level fixture that provides a Satellite based on config settings""" - new_sat = satellite_factory() - yield new_sat - new_sat.teardown() - Broker(hosts=[new_sat]).checkin() + return module_capsule_configured @pytest.fixture(scope='module', params=['IDM', 'AD']) @@ -159,3 +278,93 @@ def parametrized_enrolled_sat( new_sat.unregister() new_sat.teardown() Broker(hosts=[new_sat]).checkin() + + +def get_deploy_args(request): + """Get deploy arguments for Satellite base OS deployment. Should not be used for Capsule.""" + rhel_version = get_sat_rhel_version() + deploy_args = { + 'deploy_rhel_version': rhel_version.base_version, + 'deploy_flavor': settings.flavors.default, + 'promtail_config_template_file': 'config_sat.j2', + 'workflow': settings.server.deploy_workflows.os, + } + if hasattr(request, 'param'): + if isinstance(request.param, dict): + deploy_args.update(request.param) + else: + deploy_args['deploy_rhel_version'] = request.param + return deploy_args + + +@pytest.fixture +def sat_ready_rhel(request): + deploy_args = get_deploy_args(request) + with Broker(**deploy_args, host_class=Satellite) as host: + yield host + + +@pytest.fixture(scope='module') +def module_sat_ready_rhels(request): + deploy_args = get_deploy_args(request) + with Broker(**deploy_args, host_class=Satellite, _count=2) as hosts: + yield hosts + + +@pytest.fixture +def cap_ready_rhel(): + rhel_version = Version(settings.capsule.version.release) + deploy_args = { + 'deploy_rhel_version': rhel_version.base_version, + 'deploy_flavor': settings.flavors.default, + 'promtail_config_template_file': 'config_sat.j2', + 'workflow': settings.capsule.deploy_workflows.os, + } + with Broker(**deploy_args, host_class=Capsule) as host: + yield host + + +@pytest.fixture(scope='session') +def installer_satellite(request): + """A fixture to freshly install the satellite using installer on RHEL machine + + This is a pure / virgin / nontemplate based satellite + + :params request: A pytest request object and this fixture is looking for + broker object of class satellite + """ + sat_version = settings.server.version.release + if 'sanity' in request.config.option.markexpr: + sat = Satellite(settings.server.hostname) + else: + sat = lru_sat_ready_rhel(getattr(request, 'param', None)) + sat.setup_firewall() + # # Register for RHEL8 repos, get Ohsnap repofile, and enable and download satellite + sat.register_to_cdn() + sat.download_repofile( + product='satellite', + release=settings.server.version.release, + snap=settings.server.version.snap, + ) + sat.execute('dnf -y module enable satellite:el8 && dnf -y install satellite') + installed_version = sat.execute('rpm --query satellite').stdout + assert sat_version in installed_version + # Install Satellite + sat.execute( + InstallerCommand( + installer_args=[ + 'scenario satellite', + f'foreman-initial-admin-password {settings.server.admin_password}', + ] + ).get_command(), + timeout='30m', + ) + if 'sanity' in request.config.option.markexpr: + configure_nailgun() + configure_airgun() + yield sat + if 'sanity' not in request.config.option.markexpr: + sanity_sat = Satellite(sat.hostname) + sanity_sat.unregister() + broker_sat = Satellite.get_host_by_hostname(sanity_sat.hostname) + Broker(hosts=[broker_sat]).checkin() diff --git a/pytest_fixtures/core/sys.py b/pytest_fixtures/core/sys.py index 1684c0bb251..88768508ee4 100644 --- a/pytest_fixtures/core/sys.py +++ b/pytest_fixtures/core/sys.py @@ -22,9 +22,9 @@ def allow_repo_discovery(target_sat): @pytest.fixture(autouse=True, scope="session") -def relax_bfa(session_target_sat): +def relax_bfa(request, session_target_sat): """Relax BFA protection against failed login attempts""" - if session_target_sat: + if session_target_sat and 'sanity' not in request.config.option.markexpr: session_target_sat.cli.Settings.set({'name': 'failed_login_attempts_limit', 'value': '0'}) @@ -54,7 +54,7 @@ def puppet_proxy_port_range(session_puppet_enabled_sat): @pytest.fixture(scope='class') def class_cockpit_sat(class_subscribe_satellite): class_subscribe_satellite.install_cockpit() - yield class_subscribe_satellite + return class_subscribe_satellite @pytest.fixture(scope='module') diff --git a/pytest_fixtures/core/ui.py b/pytest_fixtures/core/ui.py index 0addff500df..1c24d6f04a9 100644 --- a/pytest_fixtures/core/ui.py +++ b/pytest_fixtures/core/ui.py @@ -1,7 +1,8 @@ -import pytest from fauxfactory import gen_string +import pytest from requests.exceptions import HTTPError +from robottelo.hosts import Satellite from robottelo.logging import logger @@ -17,9 +18,12 @@ def ui_user(request, module_org, module_location, module_target_sat): test_module_name = request.module.__name__.split('.')[-1].split('_', 1)[-1] login = f"{test_module_name}_{gen_string('alphanumeric')}" password = gen_string('alphanumeric') + admin = request.param.get('admin', True) if hasattr(request, 'param') else True logger.debug('Creating session user %r', login) user = module_target_sat.api.User( - admin=True, + admin=admin, + organization=[module_org], + location=[module_location], default_organization=module_org, default_location=module_location, description=f'created automatically by airgun for module "{test_module_name}"', @@ -27,6 +31,10 @@ def ui_user(request, module_org, module_location, module_target_sat): password=password, ).create() user.password = password + if not admin: + # give all the permissions + user.role = module_target_sat.api.Role().search(query={'per_page': 'all'}) + user.update(['role']) yield user try: logger.debug('Deleting session user %r', user.login) @@ -49,7 +57,8 @@ def test_foo(session): session.architecture.create({'name': 'bar'}) """ - return target_sat.ui_session(test_name, ui_user.login, ui_user.password) + with target_sat.ui_session(test_name, ui_user.login, ui_user.password) as session: + yield session @pytest.fixture @@ -69,3 +78,32 @@ def test_foo(autosession): """ with target_sat.ui_session(test_name, ui_user.login, ui_user.password) as started_session: yield started_session + + +@pytest.fixture(autouse=True) +def ui_session_record_property(request, record_property): + """ + Autouse fixture to set the record_property attribute for Satellite instances in the test. + + This fixture iterates over all fixtures in the current test node + (excluding the current fixture) and sets the record_property attribute + for instances of the Satellite class. + + Args: + request: The pytest request object. + record_property: The value to set for the record_property attribute. + """ + test_directories = [ + 'tests/foreman/destructive', + 'tests/foreman/ui', + 'tests/foreman/sanity', + 'tests/foreman/virtwho', + ] + test_file_path = request.node.fspath.strpath + if any(directory in test_file_path for directory in test_directories): + for fixture in request.node.fixturenames: + if request.fixturename != fixture and isinstance( + request.getfixturevalue(fixture), Satellite + ): + sat = request.getfixturevalue(fixture) + sat.record_property = record_property diff --git a/pytest_fixtures/core/upgrade.py b/pytest_fixtures/core/upgrade.py index fd9c2fd364b..5726f9f80a9 100644 --- a/pytest_fixtures/core/upgrade.py +++ b/pytest_fixtures/core/upgrade.py @@ -1,21 +1,20 @@ -import pytest from broker import Broker +import pytest from robottelo.hosts import Capsule from robottelo.logging import logger -@pytest.fixture(scope="function") +@pytest.fixture def dependent_scenario_name(request): """ This fixture is used to collect the dependent test case name. """ - depend_test_name = [ + return [ mark.kwargs['depend_on'].__name__ for mark in request.node.own_markers if 'depend_on' in mark.kwargs ][0] - yield depend_test_name @pytest.fixture(scope="session") @@ -38,6 +37,6 @@ def pre_configured_capsule(worker_id, session_target_sat): logger.debug(f'Capsules found: {intersect}') assert len(intersect) == 1, "More than one Capsule found in the inventory" target_capsule = intersect.pop() - hosts = Broker(host_class=Capsule).from_inventory(filter=f'@inv.hostname == "{target_capsule}"') + host = Capsule.get_host_by_hostname(target_capsule) logger.info(f'xdist worker {worker_id} was assigned pre-configured Capsule {target_capsule}') - return hosts[0] + return host diff --git a/pytest_fixtures/core/xdist.py b/pytest_fixtures/core/xdist.py index 982f3554749..4d02fe026d0 100644 --- a/pytest_fixtures/core/xdist.py +++ b/pytest_fixtures/core/xdist.py @@ -1,52 +1,61 @@ """Fixtures specific to or relating to pytest's xdist plugin""" import random -import pytest from broker import Broker +import pytest -from robottelo.config import configure_airgun -from robottelo.config import configure_nailgun -from robottelo.config import settings +from robottelo.config import configure_airgun, configure_nailgun, settings from robottelo.hosts import Satellite from robottelo.logging import logger @pytest.fixture(scope="session", autouse=True) -def align_to_satellite(worker_id, satellite_factory): +def align_to_satellite(request, worker_id, satellite_factory): """Attempt to align a Satellite to the current xdist worker""" - # clear any hostname that may have been previously set - settings.set("server.hostname", None) - on_demand_sat = None - - if worker_id in ['master', 'local']: - worker_pos = 0 + if 'build_sanity' in request.config.option.markexpr: + yield + if settings.server.hostname: + sanity_sat = Satellite(settings.server.hostname) + sanity_sat.unregister() + broker_sat = Satellite.get_host_by_hostname(sanity_sat.hostname) + Broker(hosts=[broker_sat]).checkin() else: - worker_pos = int(worker_id.replace('gw', '')) + # clear any hostname that may have been previously set + settings.set("server.hostname", None) + on_demand_sat = None + + if worker_id in ['master', 'local']: + worker_pos = 0 + else: + worker_pos = int(worker_id.replace('gw', '')) - # attempt to add potential satellites from the broker inventory file - if settings.server.inventory_filter: - hosts = Broker(host_class=Satellite).from_inventory(filter=settings.server.inventory_filter) - settings.server.hostnames += [host.hostname for host in hosts] + # attempt to add potential satellites from the broker inventory file + if settings.server.inventory_filter: + hosts = Satellite.get_hosts_from_inventory(filter=settings.server.inventory_filter) + settings.server.hostnames += [host.hostname for host in hosts] - # attempt to align a worker to a satellite - if settings.server.xdist_behavior == 'run-on-one' and settings.server.hostnames: - settings.set("server.hostname", settings.server.hostnames[0]) - elif settings.server.hostnames and worker_pos < len(settings.server.hostnames): - settings.set("server.hostname", settings.server.hostnames[worker_pos]) - elif settings.server.xdist_behavior == 'balance' and settings.server.hostnames: - settings.set("server.hostname", random.choice(settings.server.hostnames)) - # get current satellite information - elif settings.server.xdist_behavior == 'on-demand': - on_demand_sat = satellite_factory() - if on_demand_sat.hostname: - settings.set("server.hostname", on_demand_sat.hostname) - # if no satellite was received, fallback to balance - if not settings.server.hostname: + # attempt to align a worker to a satellite + if settings.server.xdist_behavior == 'run-on-one' and settings.server.hostnames: + settings.set("server.hostname", settings.server.hostnames[0]) + elif settings.server.hostnames and worker_pos < len(settings.server.hostnames): + settings.set("server.hostname", settings.server.hostnames[worker_pos]) + elif settings.server.xdist_behavior == 'balance' and settings.server.hostnames: settings.set("server.hostname", random.choice(settings.server.hostnames)) - logger.info(f'xdist worker {worker_id} was assigned hostname {settings.server.hostname}') - configure_airgun() - configure_nailgun() - yield - if on_demand_sat and settings.server.auto_checkin: - on_demand_sat.teardown() - Broker(hosts=[on_demand_sat]).checkin() + # get current satellite information + elif settings.server.xdist_behavior == 'on-demand': + on_demand_sat = satellite_factory() + if on_demand_sat.hostname: + settings.set("server.hostname", on_demand_sat.hostname) + # if no satellite was received, fallback to balance + if not settings.server.hostname: + settings.set("server.hostname", random.choice(settings.server.hostnames)) + if settings.server.hostname: + logger.info( + f'xdist worker {worker_id} was assigned hostname {settings.server.hostname}' + ) + configure_airgun() + configure_nailgun() + yield + if on_demand_sat and settings.server.auto_checkin: + on_demand_sat.teardown() + Broker(hosts=[on_demand_sat]).checkin() diff --git a/pytest_plugins/auto_vault.py b/pytest_plugins/auto_vault.py new file mode 100644 index 00000000000..cb9e1f0c10a --- /dev/null +++ b/pytest_plugins/auto_vault.py @@ -0,0 +1,9 @@ +"""Plugin enables pytest to notify and update the requirements""" + +from robottelo.utils.vault import Vault + + +def pytest_addoption(parser): + """Options to allow user to update the requirements""" + with Vault() as vclient: + vclient.login() diff --git a/pytest_plugins/capsule_n-minus.py b/pytest_plugins/capsule_n-minus.py new file mode 100644 index 00000000000..f903e239757 --- /dev/null +++ b/pytest_plugins/capsule_n-minus.py @@ -0,0 +1,55 @@ +# Collection of Capsule Factory fixture tests +# No destructive tests +# Adjust capsule host and capsule_configured host behavior for n_minus testing +# Calculate capsule hostname from inventory just as we do in xDist.py +from robottelo.config import settings +from robottelo.hosts import Capsule + + +def pytest_addoption(parser): + """Add options for pytest to collect tests based on fixtures its using""" + help_text = ''' + Collects tests based on capsule fixtures used by tests and uncollect destructive tests + + Usage: --n-minus + + example: pytest --n-minus tests/foreman + ''' + parser.addoption("--n-minus", action='store_true', default=False, help=help_text) + + +def pytest_collection_modifyitems(items, config): + + if not config.getoption('n_minus', False): + return + + selected = [] + deselected = [] + + for item in items: + is_destructive = item.get_closest_marker('destructive') + # Deselect Destructive tests and tests without capsule_factory fixture + if 'capsule_factory' not in item.fixturenames or is_destructive: + deselected.append(item) + continue + # Ignoring all puppet tests as they are destructive in nature + # and needs its own satellite for verification + if 'session_puppet_enabled_sat' in item.fixturenames: + deselected.append(item) + continue + # Ignoring all satellite maintain tests as they are destructive in nature + # Also dont need them in nminus testing as its not integration testing + if 'sat_maintain' in item.fixturenames and 'satellite' in item.callspec.params.values(): + deselected.append(item) + continue + selected.append(item) + + config.hook.pytest_deselected(items=deselected) + items[:] = selected + + +def pytest_sessionfinish(session, exitstatus): + # Unregister the capsule from CDN after all tests + if session.config.option.n_minus and not session.config.option.collectonly: + caps = Capsule.get_host_by_hostname(hostname=settings.capsule.hostname) + caps.unregister() diff --git a/pytest_plugins/factory_collection.py b/pytest_plugins/factory_collection.py index f8933e45a7b..3237fe8465f 100644 --- a/pytest_plugins/factory_collection.py +++ b/pytest_plugins/factory_collection.py @@ -1,5 +1,4 @@ -from inspect import getmembers -from inspect import isfunction +from inspect import getmembers, isfunction def pytest_configure(config): @@ -13,6 +12,9 @@ def pytest_collection_modifyitems(session, items, config): factory_fixture_names = [m[0] for m in getmembers(sat_cap_factory, isfunction)] for item in items: - has_factoryfixture = set(item.fixturenames).intersection(set(factory_fixture_names)) + itemfixtures = [ + itm for itm in item.fixturenames if itm not in ('satellite_factory', 'capsule_factory') + ] + has_factoryfixture = set(itemfixtures).intersection(set(factory_fixture_names)) if has_factoryfixture: item.add_marker('factory_instance') diff --git a/pytest_plugins/fixture_collection.py b/pytest_plugins/fixture_collection.py deleted file mode 100644 index 934efa5f56d..00000000000 --- a/pytest_plugins/fixture_collection.py +++ /dev/null @@ -1,38 +0,0 @@ -# Pytest Plugin to modify collection of test cases based on fixtures used by tests. -from robottelo.logging import collection_logger as logger - - -def pytest_addoption(parser): - """Add options for pytest to collect tests based on fixtures its using""" - help_text = ''' - Collects tests based on fixtures used by tests - - Usage: --uses-fixtures [options] - - Options: [ specific_fixture_name | list_of fixture names ] - - example: pytest tests/foreman --uses-fixtures target_sat module_target_sat - ''' - parser.addoption("--uses-fixtures", nargs='+', help=help_text) - - -def pytest_collection_modifyitems(items, config): - - if not config.getoption('uses_fixtures', False): - return - - filter_fixtures = config.getvalue('uses_fixtures') - selected = [] - deselected = [] - - for item in items: - if set(item.fixturenames).intersection(set(filter_fixtures)): - selected.append(item) - else: - deselected.append(item) - logger.debug( - f'Selected {len(selected)} and deselected {len(deselected)} ' - f'tests based on given fixtures {filter_fixtures} used by tests' - ) - config.hook.pytest_deselected(items=deselected) - items[:] = selected diff --git a/pytest_plugins/fixture_markers.py b/pytest_plugins/fixture_markers.py index e2126d86dad..795397bec5d 100644 --- a/pytest_plugins/fixture_markers.py +++ b/pytest_plugins/fixture_markers.py @@ -1,15 +1,16 @@ +from inspect import getmembers, isfunction import re -from inspect import getmembers -from inspect import isfunction from robottelo.config import settings - TARGET_FIXTURES = [ 'rhel_contenthost', + 'module_rhel_contenthost', 'content_hosts', 'module_provisioning_rhel_content', + 'capsule_provisioning_rhel_content', 'rex_contenthost', + 'rex_contenthosts', ] @@ -17,7 +18,7 @@ def pytest_generate_tests(metafunc): content_host_fixture = ''.join([i for i in TARGET_FIXTURES if i in metafunc.fixturenames]) if content_host_fixture in metafunc.fixturenames: function_marks = getattr(metafunc.function, 'pytestmark', []) - no_containers = any('no_containers' == mark.name for mark in function_marks) + no_containers = any(mark.name == 'no_containers' for mark in function_marks) # process eventual rhel_version_list markers matchers = [i.args for i in function_marks if i.name == 'rhel_ver_list'] list_params = [] @@ -87,6 +88,7 @@ def chost_rhelver(params): for param in params: if 'contenthost' in param: return params[param].get('rhel_version') + return None content_host_fixture_names = [m[0] for m in getmembers(contenthosts, isfunction)] for item in items: diff --git a/pytest_plugins/fspath_plugins.py b/pytest_plugins/fspath_plugins.py index 316d066a6b0..c6241cac9cc 100644 --- a/pytest_plugins/fspath_plugins.py +++ b/pytest_plugins/fspath_plugins.py @@ -12,5 +12,5 @@ def pytest_collection_modifyitems(session, items, config): if item.nodeid.startswith('tests/robottelo/') or item.nodeid.startswith('tests/upgrades/'): continue - endpoint = endpoint_regex.findall(item.location[0])[0] - item.user_properties.append(('endpoint', endpoint)) + if endpoints := endpoint_regex.findall(item.location[0]): + item.user_properties.append(('endpoint', endpoints[0])) diff --git a/pytest_plugins/issue_handlers.py b/pytest_plugins/issue_handlers.py index d2dfb3be55e..614c75a45be 100644 --- a/pytest_plugins/issue_handlers.py +++ b/pytest_plugins/issue_handlers.py @@ -1,20 +1,21 @@ +from collections import defaultdict +from datetime import datetime import inspect import json import re -from collections import defaultdict -from datetime import datetime import pytest from robottelo.config import settings from robottelo.logging import collection_logger as logger from robottelo.utils import slugify_component -from robottelo.utils.issue_handlers import add_workaround -from robottelo.utils.issue_handlers import bugzilla -from robottelo.utils.issue_handlers import is_open -from robottelo.utils.issue_handlers import should_deselect -from robottelo.utils.version import search_version_key -from robottelo.utils.version import VersionEncoder +from robottelo.utils.issue_handlers import ( + add_workaround, + bugzilla, + is_open, + should_deselect, +) +from robottelo.utils.version import VersionEncoder, search_version_key DEFAULT_BZ_CACHE_FILE = 'bz_cache.json' @@ -213,7 +214,9 @@ def generate_issue_collection(items, config): # pragma: no cover filepath, lineno, testcase = item.location # Component and importance marks are determined by testimony tokens # Testimony.yaml as of writing has both as required, so any - component_mark = item.get_closest_marker('component').args[0] + if not (components := item.get_closest_marker('component')): + continue + component_mark = components.args[0] component_slug = slugify_component(component_mark, False) importance_mark = item.get_closest_marker('importance').args[0] for marker in item.iter_markers(): diff --git a/pytest_plugins/logging_hooks.py b/pytest_plugins/logging_hooks.py index f7bcaf43620..c15136a647a 100644 --- a/pytest_plugins/logging_hooks.py +++ b/pytest_plugins/logging_hooks.py @@ -1,21 +1,21 @@ +import contextlib import logging import logzero import pytest from xdist import is_xdist_worker -from robottelo.logging import broker_log_setup -from robottelo.logging import DEFAULT_DATE_FORMAT -from robottelo.logging import logger -from robottelo.logging import logging_yaml -from robottelo.logging import robottelo_log_dir -from robottelo.logging import robottelo_log_file +from robottelo.logging import ( + DEFAULT_DATE_FORMAT, + broker_log_setup, + logger, + logging_yaml, + robottelo_log_dir, + robottelo_log_file, +) -try: - from pytest_reportportal import RPLogger - from pytest_reportportal import RPLogHandler -except ImportError: - pass +with contextlib.suppress(ImportError): + from pytest_reportportal import RPLogger, RPLogHandler @pytest.fixture(autouse=True, scope='session') @@ -36,42 +36,43 @@ def configure_logging(request, worker_id): if use_rp_logger: logging.setLoggerClass(RPLogger) - if is_xdist_worker(request): - if f'{worker_id}' not in [h.get_name() for h in logger.handlers]: - # Track the core logger's file handler level, set it in case core logger wasn't set - worker_log_level = 'INFO' - handlers_to_remove = [ - h - for h in logger.handlers - if isinstance(h, logging.FileHandler) - and getattr(h, 'baseFilename', None) == str(robottelo_log_file) - ] - for handler in handlers_to_remove: - logger.removeHandler(handler) - worker_log_level = handler.level - worker_handler = logging.FileHandler( - robottelo_log_dir.joinpath(f'robottelo_{worker_id}.log') - ) - worker_handler.set_name(f'{worker_id}') - worker_handler.setFormatter(worker_formatter) - worker_handler.setLevel(worker_log_level) - logger.addHandler(worker_handler) - broker_log_setup( - level=logging_yaml.broker.level, - file_level=logging_yaml.broker.fileLevel, - formatter=worker_formatter, - path=robottelo_log_dir.joinpath(f'robottelo_{worker_id}.log'), - ) + if is_xdist_worker(request) and f'{worker_id}' not in [h.get_name() for h in logger.handlers]: + # Track the core logger's file handler level, set it in case core logger wasn't set + worker_log_level = 'INFO' + handlers_to_remove = [ + h + for h in logger.handlers + if isinstance(h, logging.FileHandler) + and getattr(h, 'baseFilename', None) == str(robottelo_log_file) + ] + for handler in handlers_to_remove: + logger.removeHandler(handler) + worker_log_level = handler.level + worker_handler = logging.FileHandler( + robottelo_log_dir.joinpath(f'robottelo_{worker_id}.log') + ) + worker_handler.set_name(f'{worker_id}') + worker_handler.setFormatter(worker_formatter) + worker_handler.setLevel(worker_log_level) + logger.addHandler(worker_handler) + broker_log_setup( + level=logging_yaml.broker.level, + file_level=logging_yaml.broker.fileLevel, + formatter=worker_formatter, + path=robottelo_log_dir.joinpath(f'robottelo_{worker_id}.log'), + ) - if use_rp_logger: - rp_handler = RPLogHandler(request.node.config.py_test_service) - rp_handler.setFormatter(worker_formatter) - # logger.addHandler(rp_handler) + if use_rp_logger: + rp_handler = RPLogHandler(request.node.config.py_test_service) + rp_handler.setFormatter(worker_formatter) + # logger.addHandler(rp_handler) def pytest_runtest_logstart(nodeid, location): logger.info(f'Started Test: {nodeid}') -def pytest_runtest_logfinish(nodeid, location): - logger.info(f'Finished Test: {nodeid}') +def pytest_runtest_logreport(report): + """Process the TestReport produced for each of the setup, + call and teardown runtest phases of an item.""" + logger.info('Finished %s for test: %s, result: %s', report.when, report.nodeid, report.outcome) diff --git a/pytest_plugins/marker_deselection.py b/pytest_plugins/marker_deselection.py index ca68ead0165..ee2cd4c1485 100644 --- a/pytest_plugins/marker_deselection.py +++ b/pytest_plugins/marker_deselection.py @@ -2,7 +2,6 @@ from robottelo.logging import collection_logger as logger - non_satCI_components = ['Virt-whoConfigurePlugin'] diff --git a/pytest_plugins/markers.py b/pytest_plugins/markers.py index abf54997bd8..e5d9855a179 100644 --- a/pytest_plugins/markers.py +++ b/pytest_plugins/markers.py @@ -24,6 +24,8 @@ def pytest_configure(config): "no_containers: Disable container hosts from being used in favor of VMs", "include_capsule: For satellite-maintain tests to run on Satellite and Capsule both", "capsule_only: For satellite-maintain tests to run only on Capsules", + "manifester: Tests that require manifester", + "ldap: Tests related to ldap authentication", ] markers.extend(module_markers()) for marker in markers: diff --git a/pytest_plugins/metadata_markers.py b/pytest_plugins/metadata_markers.py index f409dd804c7..57b12aa5c1f 100644 --- a/pytest_plugins/metadata_markers.py +++ b/pytest_plugins/metadata_markers.py @@ -6,7 +6,6 @@ from robottelo.config import settings from robottelo.hosts import get_sat_rhel_version -from robottelo.hosts import get_sat_version from robottelo.logging import collection_logger as logger FMT_XUNIT_TIME = '%Y-%m-%dT%H:%M:%S' @@ -59,7 +58,7 @@ def pytest_configure(config): @pytest.hookimpl(tryfirst=True) -def pytest_collection_modifyitems(session, items, config): +def pytest_collection_modifyitems(items, config): """Add markers and user_properties for testimony token metadata user_properties is used by the junit plugin, and thus by many test report systems @@ -74,7 +73,7 @@ def pytest_collection_modifyitems(session, items, config): """ # get RHEL version of the satellite rhel_version = get_sat_rhel_version().base_version - sat_version = get_sat_version().base_version + sat_version = settings.server.version.get('release') snap_version = settings.server.version.get('snap', '') # split the option string and handle no option, single option, multiple @@ -117,8 +116,21 @@ def pytest_collection_modifyitems(session, items, config): # add markers as user_properties so they are recorded in XML properties of the report # pytest-ibutsu will include user_properties dict in testresult metadata + markers_prop_data = [] + exclude_markers = ['parametrize', 'skipif', 'usefixtures', 'skip_if_not_set'] for marker in item.iter_markers(): - item.user_properties.append((marker.name, next(iter(marker.args), None))) + proprty = marker.name + if proprty in exclude_markers: + continue + if marker_val := next(iter(marker.args), None): + proprty = '='.join([proprty, str(marker_val)]) + markers_prop_data.append(proprty) + # Adding independent marker as a property + item.user_properties.append((marker.name, marker_val)) + # Adding all markers as a single property + item.user_properties.append(("markers", ", ".join(markers_prop_data))) + + # Version specific user properties item.user_properties.append(("BaseOS", rhel_version)) item.user_properties.append(("SatelliteVersion", sat_version)) item.user_properties.append(("SnapVersion", snap_version)) diff --git a/pytest_plugins/requirements/req_updater.py b/pytest_plugins/requirements/req_updater.py index 52b8a822bfa..664a9bb92a5 100644 --- a/pytest_plugins/requirements/req_updater.py +++ b/pytest_plugins/requirements/req_updater.py @@ -1,5 +1,5 @@ -import subprocess from functools import cached_property +import subprocess class ReqUpdater: @@ -9,6 +9,8 @@ class ReqUpdater: 'Betelgeuse': 'betelgeuse', 'broker': 'broker[docker]', 'dynaconf': 'dynaconf[vault]', + 'Jinja2': 'jinja2', + 'Sphinx': 'sphinx', } @cached_property diff --git a/pytest_plugins/requirements/update_requirements.py b/pytest_plugins/requirements/update_requirements.py index df16c4694a2..e2ad8840185 100644 --- a/pytest_plugins/requirements/update_requirements.py +++ b/pytest_plugins/requirements/update_requirements.py @@ -1,6 +1,5 @@ """Plugin enables pytest to notify and update the requirements""" from .req_updater import ReqUpdater -from robottelo.constants import Colored updater = ReqUpdater() @@ -23,25 +22,29 @@ def pytest_report_header(config): e.g: # Following will update the mandatory requirements - # pytest tests/foreman --collect-only --upgrade-required-reqs + # pytest tests/foreman --collect-only --update-required-reqs # Following will update the mandatory and optional requirements - # pytest tests/foreman --collect-only --upgrade-all-reqs + # pytest tests/foreman --collect-only --update-all-reqs """ if updater.req_deviation: - print( - f"{Colored.REDLIGHT}Mandatory Requirements Available: " - f"{' '.join(updater.req_deviation)}{Colored.RESET}" - ) + print(f"Mandatory Requirements Mismatch: {' '.join(updater.req_deviation)}") if config.getoption('update_required_reqs') or config.getoption('update_all_reqs'): - print('Updating the mandatory requirements on demand ....') updater.install_req_deviations() + print('Mandatory requirements are installed to be up-to-date.') + else: + print('Mandatory Requirements are up to date.') if updater.opt_deviation: - print( - f"{Colored.REDLIGHT}Optional Requirements Available: " - f"{' '.join(updater.opt_deviation)}{Colored.RESET}" - ) + print(f"Optional Requirements Mismatch: {' '.join(updater.opt_deviation)}") if config.getoption('update_all_reqs'): - print('Updating the optional requirements on demand ....') updater.install_opt_deviations() + print('Optional requirements are installed to be up-to-date.') + else: + print('Optional Requirements are up to date.') + + if updater.req_deviation or updater.opt_deviation: + print( + "To update mismatched requirements, run the pytest command with " + "'--update-required-reqs' OR '--update-all-reqs' option." + ) diff --git a/pytest_plugins/rerun_rp/rerun_rp.py b/pytest_plugins/rerun_rp/rerun_rp.py index b2d92951419..7007880ce4b 100644 --- a/pytest_plugins/rerun_rp/rerun_rp.py +++ b/pytest_plugins/rerun_rp/rerun_rp.py @@ -73,7 +73,7 @@ def pytest_collection_modifyitems(items, config): failed/skipped and user-specific tests in Report Portal """ rp_url = settings.report_portal.portal_url or config.getini('rp_endpoint') - rp_uuid = config.getini('rp_uuid') or settings.report_portal.api_key + rp_api_key = config.getini('rp_api_key') or settings.report_portal.api_key # prefer dynaconf setting before ini config as pytest-reportportal plugin uses default value # for `rp_launch` if none is set there rp_launch_name = settings.report_portal.launch_name or config.getini('rp_launch') @@ -87,7 +87,7 @@ def pytest_collection_modifyitems(items, config): tests = [] if not any([fail_args, skip_arg, user_arg]): return - rp = ReportPortal(rp_url=rp_url, rp_api_key=rp_uuid, rp_project=rp_project) + rp = ReportPortal(rp_url=rp_url, rp_api_key=rp_api_key, rp_project=rp_project) if ref_launch_uuid: logger.info(f'Fetching A reference Report Portal launch {ref_launch_uuid}') @@ -123,7 +123,7 @@ def pytest_collection_modifyitems(items, config): test_args['status'].append('SKIPPED') if fail_args: test_args['status'].append('FAILED') - if not fail_args == 'all': + if fail_args != 'all': defect_types = fail_args.split(',') allowed_args = [*rp.defect_types.keys()] if not set(defect_types).issubset(set(allowed_args)): diff --git a/pytest_plugins/sanity_plugin.py b/pytest_plugins/sanity_plugin.py new file mode 100644 index 00000000000..1d93a4b45f3 --- /dev/null +++ b/pytest_plugins/sanity_plugin.py @@ -0,0 +1,62 @@ +""" A sanity testing plugin to assist in executing robottelo tests as sanity tests smartly + +1. Make installer test to run first which should set the hostname and all other tests then +should run after that +""" + + +class ConfigurationException(Exception): + """Raised when pytest configuration is missed""" + + pass + + +def pytest_configure(config): + """Register markers related to testimony tokens""" + config.addinivalue_line( + "markers", "first_sanity: An installer test to run first in sanity testing" + ) + + +def pytest_collection_modifyitems(session, items, config): + if 'sanity' not in config.option.markexpr: + return + + selected = [] + deselected = [] + installer_test = None + + for item in items: + if item.get_closest_marker('build_sanity'): + # Identify the installer sanity test to run first + if not installer_test and item.get_closest_marker('first_sanity'): + installer_test = item + deselected.append(item) + continue + # Test parameterization disablement for sanity + # Remove Puppet based tests + if 'session_puppet_enabled_sat' in item.fixturenames and 'puppet' in item.name: + deselected.append(item) + continue + # Remove capsule tests + if 'sat_maintain' in item.fixturenames and 'capsule' in item.name: + deselected.append(item) + continue + # Remove parametrization from organization test + if ( + 'test_positive_create_with_name_and_description' in item.name + and 'alphanumeric' not in item.name + ): + deselected.append(item) + continue + # Else select + selected.append(item) + + # Append the installer test first to run + if not installer_test: + raise ConfigurationException( + 'The installer test is not configured to base the sanity testing on!' + ) + selected.insert(0, installer_test) + config.hook.pytest_deselected(items=deselected) + items[:] = selected diff --git a/pytest_plugins/settings_skip.py b/pytest_plugins/settings_skip.py index 7a211cd32ae..e345338d5d8 100644 --- a/pytest_plugins/settings_skip.py +++ b/pytest_plugins/settings_skip.py @@ -1,7 +1,6 @@ import pytest -from robottelo.config import setting_is_set -from robottelo.config import settings +from robottelo.config import setting_is_set, settings def pytest_configure(config): @@ -23,7 +22,7 @@ def pytest_runtest_setup(item): skip_marker = item.get_closest_marker('skip_if_not_set', None) if skip_marker and skip_marker.args: options_set = {arg.upper() for arg in skip_marker.args} - settings_set = {key for key in settings.keys() if not key.endswith('_FOR_DYNACONF')} + settings_set = {key for key in settings if not key.endswith('_FOR_DYNACONF')} if not options_set.issubset(settings_set): invalid = options_set.difference(settings_set) raise ValueError( diff --git a/pytest_plugins/upgrade/scenario_workers.py b/pytest_plugins/upgrade/scenario_workers.py index 87d84b97f86..9deeb0ddf25 100644 --- a/pytest_plugins/upgrade/scenario_workers.py +++ b/pytest_plugins/upgrade/scenario_workers.py @@ -3,10 +3,7 @@ import pytest -from robottelo.config import configure_airgun -from robottelo.config import configure_nailgun -from robottelo.config import settings - +from robottelo.config import configure_airgun, configure_nailgun, settings _json_file = 'upgrade_workers.json' json_file = Path(_json_file) @@ -26,6 +23,7 @@ def save_worker_hostname(test_name, target_sat): def shared_workers(): if json_file.exists(): return json.loads(json_file.read_text()) + return None def get_worker_hostname_from_testname(test_name, shared_workers): diff --git a/pytest_plugins/video_cleanup.py b/pytest_plugins/video_cleanup.py new file mode 100644 index 00000000000..320864b7060 --- /dev/null +++ b/pytest_plugins/video_cleanup.py @@ -0,0 +1,77 @@ +from urllib.parse import urlparse + +from box import Box +from broker.hosts import Host +import pytest + +from robottelo.config import settings +from robottelo.logging import logger + +test_results = {} +test_directories = [ + 'tests/foreman/destructive', + 'tests/foreman/ui', + 'tests/foreman/sanity', + 'tests/foreman/virtwho', +] + + +def _clean_video(session_id, test): + if settings.ui.record_video: + logger.info(f"cleaning up video files for session: {session_id} and test: {test}") + + if settings.ui.grid_url and session_id: + grid = urlparse(url=settings.ui.grid_url) + infra_grid = Host(hostname=grid.hostname) + infra_grid.execute(command=f'rm -rf /var/www/html/videos/{session_id}') + logger.info(f"video cleanup for session {session_id} is complete") + else: + logger.warning("missing grid_url or session_id. unable to clean video files.") + + +def pytest_addoption(parser): + """Custom pytest option to skip video cleanup on test success. + Args: + parser (object): The pytest command-line option parser. + Options: + --skip-video-cleanup: Skip video cleaning on test success (default: False). + """ + parser.addoption( + "--skip-video-cleanup", + action="store_true", + default=False, + help="Skip video cleaning on test success", + ) + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item): + """Custom pytest hook to capture test outcomes and perform video cleanup. + Note: + This hook captures test results during 'call' and performs video cleanup + during 'teardown' if the test passed and the '--skip-video-cleanup' option is not set. + """ + outcome = yield + report = outcome.get_result() + skip_video_cleanup = item.config.getoption("--skip-video-cleanup", False) + + if not skip_video_cleanup and any(directory in report.fspath for directory in test_directories): + if report.when == "call": + test_results[item.nodeid] = Box( + { + 'outcome': report.outcome, + 'duration': report.duration, + 'longrepr': str(report.longrepr), + } + ) + if report.when == "teardown" and item.nodeid in test_results: + result_info = test_results[item.nodeid] + if result_info.outcome == 'passed': + report.user_properties = [ + (key, value) for key, value in report.user_properties if key != 'video_url' + ] + session_id_tuple = next( + (t for t in report.user_properties if t[0] == 'session_id'), None + ) + session_id = session_id_tuple[1] if session_id_tuple else None + _clean_video(session_id, item.nodeid) diff --git a/requirements-optional.txt b/requirements-optional.txt index 59cb7353114..9cc9c0300f1 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -1,12 +1,12 @@ # For running tests and checking code quality using these modules. -flake8==6.0.0 -pytest-cov==3.0.0 -redis==4.5.5 -pre-commit==3.3.2 +flake8==7.0.0 +pytest-cov==5.0.0 +redis==5.0.3 +pre-commit==3.7.0 # For generating documentation. -sphinx==7.0.1 -sphinx-autoapi==2.1.0 +sphinx==7.2.6 +sphinx-autoapi==3.0.0 # For 'manage' interactive shell -manage>=0.1.13 +manage==0.1.15 diff --git a/requirements.txt b/requirements.txt index 1488d8f89aa..c06725073a3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,31 +1,33 @@ # Version updates managed by dependabot -betelgeuse==1.10.0 -broker[docker]==0.3.2 -cryptography==40.0.2 -deepdiff==6.3.0 -dynaconf[vault]==3.1.12 -fauxfactory==3.1.0 -jinja2==3.1.2 -manifester==0.0.13 -navmazing==1.1.6 -productmd==1.35 -pyotp==2.8.0 -python-box==7.0.1 -pytest==7.3.1 +betelgeuse==1.11.0 +broker[docker]==0.4.9 +cryptography==42.0.5 +deepdiff==6.7.1 +dynaconf[vault]==3.2.5 +fauxfactory==3.1.1 +jinja2==3.1.3 +manifester==0.0.14 +navmazing==1.2.2 +productmd==1.38 +pyotp==2.9.0 +python-box==7.1.1 +pytest==8.1.1 +pytest-order==1.2.1 pytest-services==2.2.1 -pytest-mock==3.10.0 -pytest-reportportal==5.1.8 -pytest-xdist==3.3.1 +pytest-mock==3.14.0 +pytest-reportportal==5.4.1 +pytest-xdist==3.5.0 +pytest-fixturecollection==0.1.2 pytest-ibutsu==2.2.4 -PyYAML==6.0 +PyYAML==6.0.1 requests==2.31.0 -tenacity==8.2.2 -testimony==2.2.0 +tenacity==8.2.3 +testimony==2.4.0 wait-for==1.2.0 -wrapanapi==3.5.15 +wrapanapi==3.6.0 -# Get airgun, nailgun and upgrade from master -git+https://github.com/SatelliteQE/airgun.git@master#egg=airgun -git+https://github.com/SatelliteQE/nailgun.git@master#egg=nailgun +# Get airgun, nailgun and upgrade from 6.14.z +airgun @ git+https://github.com/SatelliteQE/airgun.git@6.14.z#egg=airgun +nailgun @ git+https://github.com/SatelliteQE/nailgun.git@6.14.z#egg=nailgun --editable . diff --git a/robottelo/api/__init__.py b/robottelo/api/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/robottelo/api/utils.py b/robottelo/api/utils.py deleted file mode 100644 index 42597a9c6e0..00000000000 --- a/robottelo/api/utils.py +++ /dev/null @@ -1,799 +0,0 @@ -"""Module containing convenience functions for working with the API.""" -import time - -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_string -from nailgun import entities -from nailgun import entity_mixins -from nailgun.client import request -from nailgun.entity_mixins import call_entity_method_with_timeout -from requests import HTTPError - -from robottelo import ssh -from robottelo.config import get_url -from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_PTABLE -from robottelo.constants import DEFAULT_PXE_TEMPLATE -from robottelo.constants import DEFAULT_TEMPLATE -from robottelo.constants import REPO_TYPE -from robottelo.exceptions import ImproperlyConfigured - - -def enable_rhrepo_and_fetchid( - basearch, org_id, product, repo, reposet, releasever=None, strict=False -): - """Enable a RedHat Repository and fetches it's Id. - - :param str org_id: The organization Id. - :param str product: The product name in which repository exists. - :param str reposet: The reposet name in which repository exists. - :param str repo: The repository name who's Id is to be fetched. - :param str basearch: The architecture of the repository. - :param str optional releasever: The releasever of the repository. - :param bool optional strict: Raise exception if the reposet was already enabled. - :return: Returns the repository Id. - :rtype: str - - """ - product = entities.Product(name=product, organization=org_id).search()[0] - r_set = entities.RepositorySet(name=reposet, product=product).search()[0] - payload = {} - if basearch is not None: - payload['basearch'] = basearch - if releasever is not None: - payload['releasever'] = releasever - payload['product_id'] = product.id - try: - r_set.enable(data=payload) - except HTTPError as e: - if ( - not strict - and e.response.status_code == 409 - and 'repository is already enabled' in e.response.json()['displayMessage'] - ): - pass - else: - raise - result = entities.Repository(name=repo).search(query={'organization_id': org_id}) - return result[0].id - - -def create_sync_custom_repo( - org_id=None, - product_name=None, - repo_name=None, - repo_url=None, - repo_type=None, - repo_unprotected=True, - docker_upstream_name=None, -): - """Create product/repo, sync it and returns repo_id""" - if org_id is None: - org_id = entities.Organization().create().id - product_name = product_name or gen_string('alpha') - repo_name = repo_name or gen_string('alpha') - # Creates new product and repository via API's - product = entities.Product(name=product_name, organization=org_id).create() - repo = entities.Repository( - name=repo_name, - url=repo_url or settings.repos.yum_1.url, - content_type=repo_type or REPO_TYPE['yum'], - product=product, - unprotected=repo_unprotected, - docker_upstream_name=docker_upstream_name, - ).create() - # Sync repository - entities.Repository(id=repo.id).sync() - return repo.id - - -def enable_sync_redhat_repo(rh_repo, org_id, timeout=1500): - """Enable the RedHat repo, sync it and returns repo_id""" - # Enable RH repo and fetch repository_id - repo_id = enable_rhrepo_and_fetchid( - basearch=rh_repo['basearch'], - org_id=org_id, - product=rh_repo['product'], - repo=rh_repo['name'], - reposet=rh_repo['reposet'], - releasever=rh_repo['releasever'], - ) - # Sync repository - call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=timeout) - return repo_id - - -def cv_publish_promote(name=None, env_name=None, repo_id=None, org_id=None): - """Create, publish and promote CV to selected environment""" - if org_id is None: - org_id = entities.Organization().create().id - # Create Life-Cycle content environment - kwargs = {'name': env_name} if env_name is not None else {} - lce = entities.LifecycleEnvironment(organization=org_id, **kwargs).create() - # Create content view(CV) - kwargs = {'name': name} if name is not None else {} - content_view = entities.ContentView(organization=org_id, **kwargs).create() - # Associate YUM repo to created CV - if repo_id is not None: - content_view.repository = [entities.Repository(id=repo_id)] - content_view = content_view.update(['repository']) - # Publish content view - content_view.publish() - # Promote the content view version. - content_view.read().version[0].promote(data={'environment_ids': lce.id}) - return content_view.read() - - -def one_to_one_names(name): - """Generate the names Satellite might use for a one to one field. - - Example of usage:: - - >>> one_to_one_names('person') == {'person_name', 'person_id'} - True - - :param name: A field name. - :returns: A set including both ``name`` and variations on ``name``. - - """ - return {name + '_name', name + '_id'} - - -def configure_provisioning(org=None, loc=None, compute=False, os=None): - """Create and configure org, loc, product, repo, cv, env. Update proxy, - domain, subnet, compute resource, provision templates and medium with - previously created entities and create a hostgroup using all mentioned - entities. - - :param str org: Default Organization that should be used in both host - discovering and host provisioning procedures - :param str loc: Default Location that should be used in both host - discovering and host provisioning procedures - :param bool compute: If False creates a default Libvirt compute resource - :param str os: Specify the os to be used while provisioning and to - associate related entities to the specified os. - :return: List of created entities that can be re-used further in - provisioning or validation procedure (e.g. hostgroup or domain) - """ - # Create new organization and location in case they were not passed - if org is None: - org = entities.Organization().create() - if loc is None: - loc = entities.Location(organization=[org]).create() - if settings.repos.rhel7_os is None: - raise ImproperlyConfigured('settings file is not configured for rhel os') - # Create a new Life-Cycle environment - lc_env = entities.LifecycleEnvironment(organization=org).create() - # Create a Product, Repository for custom RHEL7 contents - product = entities.Product(organization=org).create() - repo = entities.Repository( - product=product, url=settings.repos.rhel7_os, download_policy='immediate' - ).create() - - # Increased timeout value for repo sync and CV publishing and promotion - try: - old_task_timeout = entity_mixins.TASK_TIMEOUT - entity_mixins.TASK_TIMEOUT = 3600 - repo.sync() - # Create, Publish and promote CV - content_view = entities.ContentView(organization=org).create() - content_view.repository = [repo] - content_view = content_view.update(['repository']) - content_view.publish() - content_view = content_view.read() - content_view.read().version[0].promote(data={'environment_ids': lc_env.id}) - finally: - entity_mixins.TASK_TIMEOUT = old_task_timeout - # Search for existing organization puppet environment, otherwise create a - # new one, associate organization and location where it is appropriate. - environments = entities.Environment().search(query=dict(search=f'organization_id={org.id}')) - if len(environments) > 0: - environment = environments[0].read() - environment.location.append(loc) - environment = environment.update(['location']) - else: - environment = entities.Environment(organization=[org], location=[loc]).create() - - # Search for SmartProxy, and associate location - proxy = entities.SmartProxy().search(query={'search': f'name={settings.server.hostname}'}) - proxy = proxy[0].read() - if loc.id not in [location.id for location in proxy.location]: - proxy.location.append(loc) - if org.id not in [organization.id for organization in proxy.organization]: - proxy.organization.append(org) - proxy = proxy.update(['location', 'organization']) - - # Search for existing domain or create new otherwise. Associate org, - # location and dns to it - _, _, domain = settings.server.hostname.partition('.') - domain = entities.Domain().search(query={'search': f'name="{domain}"'}) - if len(domain) == 1: - domain = domain[0].read() - domain.location.append(loc) - domain.organization.append(org) - domain.dns = proxy - domain = domain.update(['dns', 'location', 'organization']) - else: - domain = entities.Domain(dns=proxy, location=[loc], organization=[org]).create() - - # Search if subnet is defined with given network. - # If so, just update its relevant fields otherwise, - # Create new subnet - network = settings.vlan_networking.subnet - subnet = entities.Subnet().search(query={'search': f'network={network}'}) - if len(subnet) == 1: - subnet = subnet[0].read() - subnet.domain = [domain] - subnet.location.append(loc) - subnet.organization.append(org) - subnet.dns = proxy - subnet.dhcp = proxy - subnet.tftp = proxy - subnet.discovery = proxy - subnet.ipam = 'DHCP' - subnet = subnet.update( - ['domain', 'discovery', 'dhcp', 'dns', 'location', 'organization', 'tftp', 'ipam'] - ) - else: - # Create new subnet - subnet = entities.Subnet( - network=network, - mask=settings.vlan_networking.netmask, - domain=[domain], - location=[loc], - organization=[org], - dns=proxy, - dhcp=proxy, - tftp=proxy, - discovery=proxy, - ipam='DHCP', - ).create() - - # Search if Libvirt compute-resource already exists - # If so, just update its relevant fields otherwise, - # Create new compute-resource with 'libvirt' provider. - # compute boolean is added to not block existing test's that depend on - # Libvirt resource and use this same functionality to all CR's. - if compute is False: - resource_url = f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system' - comp_res = [ - res - for res in entities.LibvirtComputeResource().search() - if res.provider == 'Libvirt' and res.url == resource_url - ] - if len(comp_res) > 0: - computeresource = entities.LibvirtComputeResource(id=comp_res[0].id).read() - computeresource.location.append(loc) - computeresource.organization.append(org) - computeresource.update(['location', 'organization']) - else: - # Create Libvirt compute-resource - entities.LibvirtComputeResource( - provider='libvirt', - url=resource_url, - set_console_password=False, - display_type='VNC', - location=[loc.id], - organization=[org.id], - ).create() - - # Get the Partition table ID - ptable = ( - entities.PartitionTable().search(query={'search': f'name="{DEFAULT_PTABLE}"'})[0].read() - ) - if loc.id not in [location.id for location in ptable.location]: - ptable.location.append(loc) - if org.id not in [organization.id for organization in ptable.organization]: - ptable.organization.append(org) - ptable = ptable.update(['location', 'organization']) - - # Get the OS ID - if os is None: - os = ( - entities.OperatingSystem() - .search(query={'search': 'name="RedHat" AND (major="6" OR major="7")'})[0] - .read() - ) - else: - os_ver = os.split(' ')[1].split('.') - os = ( - entities.OperatingSystem() - .search( - query={ - 'search': f'family="Redhat" AND major="{os_ver[0]}" AND minor="{os_ver[1]}")' - } - )[0] - .read() - ) - - # Get the Provisioning template_ID and update with OS, Org, Location - provisioning_template = entities.ProvisioningTemplate().search( - query={'search': f'name="{DEFAULT_TEMPLATE}"'} - ) - provisioning_template = provisioning_template[0].read() - provisioning_template.operatingsystem.append(os) - if org.id not in [organization.id for organization in provisioning_template.organization]: - provisioning_template.organization.append(org) - if loc.id not in [location.id for location in provisioning_template.location]: - provisioning_template.location.append(loc) - provisioning_template = provisioning_template.update( - ['location', 'operatingsystem', 'organization'] - ) - - # Get the PXE template ID and update with OS, Org, location - pxe_template = entities.ProvisioningTemplate().search( - query={'search': f'name="{DEFAULT_PXE_TEMPLATE}"'} - ) - pxe_template = pxe_template[0].read() - pxe_template.operatingsystem.append(os) - if org.id not in [organization.id for organization in pxe_template.organization]: - pxe_template.organization.append(org) - if loc.id not in [location.id for location in pxe_template.location]: - pxe_template.location.append(loc) - pxe_template = pxe_template.update(['location', 'operatingsystem', 'organization']) - - # Get the arch ID - arch = ( - entities.Architecture().search(query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0].read() - ) - - # Update the OS to associate arch, ptable, templates - os.architecture.append(arch) - os.ptable.append(ptable) - os.provisioning_template.append(provisioning_template) - os.provisioning_template.append(pxe_template) - os = os.update(['architecture', 'provisioning_template', 'ptable']) - # kickstart_repository is the content view and lce bind repo - kickstart_repository = entities.Repository().search( - query=dict(content_view_id=content_view.id, environment_id=lc_env.id, name=repo.name) - )[0] - # Create Hostgroup - host_group = entities.HostGroup( - architecture=arch, - domain=domain.id, - subnet=subnet.id, - lifecycle_environment=lc_env.id, - content_view=content_view.id, - location=[loc.id], - environment=environment.id, - puppet_proxy=proxy, - puppet_ca_proxy=proxy, - content_source=proxy, - kickstart_repository=kickstart_repository, - root_pass=gen_string('alphanumeric'), - operatingsystem=os.id, - organization=[org.id], - ptable=ptable.id, - ).create() - - return { - 'host_group': host_group.name, - 'domain': domain.name, - 'environment': environment.name, - 'ptable': ptable.name, - 'subnet': subnet.name, - 'os': os.title, - } - - -def create_role_permissions(role, permissions_types_names, search=None): # pragma: no cover - """Create role permissions found in dict permissions_types_names. - - :param role: nailgun.entities.Role - :param permissions_types_names: a dict containing resource types - and permission names to add to the role. - :param search: string that contains search criteria that should be applied - to the filter - - example usage:: - - permissions_types_names = { - None: ['access_dashboard'], - 'Organization': ['view_organizations'], - 'Location': ['view_locations'], - 'Katello::KTEnvironment': [ - 'view_lifecycle_environments', - 'edit_lifecycle_environments', - 'promote_or_remove_content_views_to_environments' - ] - } - role = entities.Role(name='example_role_name').create() - create_role_permissions( - role, - permissions_types_names, - 'name = {0}'.format(lce.name) - ) - """ - for resource_type, permissions_name in permissions_types_names.items(): - if resource_type is None: - permissions_entities = [] - for name in permissions_name: - result = entities.Permission().search(query={'search': f'name="{name}"'}) - if not result: - raise entities.APIResponseError(f'permission "{name}" not found') - if len(result) > 1: - raise entities.APIResponseError( - f'found more than one entity for permission "{name}"' - ) - entity_permission = result[0] - if entity_permission.name != name: - raise entities.APIResponseError( - 'the returned permission is different from the' - ' requested one "{} != {}"'.format(entity_permission.name, name) - ) - permissions_entities.append(entity_permission) - else: - if not permissions_name: - raise ValueError( - 'resource type "{}" empty. You must select at' - ' least one permission'.format(resource_type) - ) - - resource_type_permissions_entities = entities.Permission().search( - query={'per_page': '350', 'search': f'resource_type="{resource_type}"'} - ) - if not resource_type_permissions_entities: - raise entities.APIResponseError( - f'resource type "{resource_type}" permissions not found' - ) - - permissions_entities = [ - entity - for entity in resource_type_permissions_entities - if entity.name in permissions_name - ] - # ensure that all the requested permissions entities where - # retrieved - permissions_entities_names = {entity.name for entity in permissions_entities} - not_found_names = set(permissions_name).difference(permissions_entities_names) - if not_found_names: - raise entities.APIResponseError( - f'permissions names entities not found "{not_found_names}"' - ) - entities.Filter(permission=permissions_entities, role=role, search=search).create() - - -def wait_for_tasks( - search_query, search_rate=1, max_tries=10, poll_rate=None, poll_timeout=None, must_succeed=True -): - """Search for tasks by specified search query and poll them to ensure that - task has finished. - - :param search_query: Search query that will be passed to API call. - :param search_rate: Delay between searches. - :param max_tries: How many times search should be executed. - :param poll_rate: Delay between the end of one task check-up and - the start of the next check-up. Parameter for - ``nailgun.entities.ForemanTask.poll()`` method. - :param poll_timeout: Maximum number of seconds to wait until timing out. - Parameter for ``nailgun.entities.ForemanTask.poll()`` method. - :param must_succeed: Assert success result on finished task. - :return: List of ``nailgun.entities.ForemanTasks`` entities. - :raises: ``AssertionError``. If not tasks were found until timeout. - """ - for _ in range(max_tries): - tasks = entities.ForemanTask().search(query={'search': search_query}) - if len(tasks) > 0: - for task in tasks: - task.poll(poll_rate=poll_rate, timeout=poll_timeout, must_succeed=must_succeed) - break - else: - time.sleep(search_rate) - else: - raise AssertionError(f"No task was found using query '{search_query}'") - return tasks - - -def wait_for_syncplan_tasks(repo_backend_id=None, timeout=10, repo_name=None): - """Search the pulp tasks and identify repositories sync tasks with - specified name or backend_identifier - - :param repo_backend_id: The Backend ID for the repository to identify the - repo in Pulp environment - :param timeout: Value to decided how long to check for the Sync task - :param repo_name: If repo_backend_id can not be passed, pass the repo_name - """ - if repo_name: - repo_backend_id = ( - entities.Repository() - .search(query={'search': f'name="{repo_name}"', 'per_page': '1000'})[0] - .backend_identifier - ) - # Fetch the Pulp password - pulp_pass = ssh.command( - 'grep "^default_password" /etc/pulp/server.conf | awk \'{print $2}\'' - ).stdout.splitlines()[0] - # Set the Timeout value - timeup = time.time() + int(timeout) * 60 - # Search Filter to filter out the task based on backend-id and sync action - filtered_req = { - 'criteria': { - 'filters': { - 'tags': {'$in': [f"pulp:repository:{repo_backend_id}"]}, - 'task_type': {'$in': ["pulp.server.managers.repo.sync.sync"]}, - } - } - } - while True: - if time.time() > timeup: - raise entities.APIResponseError(f'Pulp task with repo_id {repo_backend_id} not found') - # Send request to pulp API to get the task info - req = request( - 'POST', - f'{get_url()}/pulp/api/v2/tasks/search/', - verify=False, - auth=('admin', f'{pulp_pass}'), - headers={'content-type': 'application/json'}, - data=filtered_req, - ) - # Check Status code of response - if req.status_code != 200: - raise entities.APIResponseError(f'Pulp task with repo_id {repo_backend_id} not found') - # Check content of response - # It is '[]' string for empty content when backend_identifier is wrong - if len(req.content) > 2: - if req.json()[0].get('state') in ['finished']: - return True - elif req.json()[0].get('error'): - raise AssertionError( - f"Pulp task with repo_id {repo_backend_id} error or not found: " - f"'{req.json().get('error')}'" - ) - time.sleep(2) - - -def wait_for_errata_applicability_task( - host_id, from_when, search_rate=1, max_tries=10, poll_rate=None, poll_timeout=15 -): - """Search the generate applicability task for given host and make sure it finishes - - :param int host_id: Content host ID of the host where we are regenerating applicability. - :param int from_when: Timestamp (in UTC) to limit number of returned tasks to investigate. - :param int search_rate: Delay between searches. - :param int max_tries: How many times search should be executed. - :param int poll_rate: Delay between the end of one task check-up and - the start of the next check-up. Parameter for - ``nailgun.entities.ForemanTask.poll()`` method. - :param int poll_timeout: Maximum number of seconds to wait until timing out. - Parameter for ``nailgun.entities.ForemanTask.poll()`` method. - :return: Relevant errata applicability task. - :raises: ``AssertionError``. If not tasks were found for given host until timeout. - """ - assert isinstance(host_id, int), 'Param host_id have to be int' - assert isinstance(from_when, int), 'Param from_when have to be int' - now = int(time.time()) - assert from_when <= now, 'Param from_when have to be timestamp in the past' - for _ in range(max_tries): - now = int(time.time()) - max_age = now - from_when + 1 - search_query = ( - '( label = Actions::Katello::Host::GenerateApplicability OR label = ' - 'Actions::Katello::Host::UploadPackageProfile ) AND started_at > "%s seconds ago"' - % max_age - ) - tasks = entities.ForemanTask().search(query={'search': search_query}) - tasks_finished = 0 - for task in tasks: - if ( - task.label == 'Actions::Katello::Host::GenerateApplicability' - and host_id in task.input['host_ids'] - ): - task.poll(poll_rate=poll_rate, timeout=poll_timeout) - tasks_finished += 1 - elif ( - task.label == 'Actions::Katello::Host::UploadPackageProfile' - and host_id == task.input['host']['id'] - ): - task.poll(poll_rate=poll_rate, timeout=poll_timeout) - tasks_finished += 1 - if tasks_finished > 0: - break - time.sleep(search_rate) - else: - raise AssertionError(f"No task was found using query '{search_query}' for host '{host_id}'") - - -def create_discovered_host(name=None, ip_address=None, mac_address=None, options=None): - """Creates a discovered host. - - :param str name: Name of discovered host. - :param str ip_address: A valid ip address. - :param str mac_address: A valid mac address. - :param dict options: additional facts to add to discovered host - :return: dict of ``entities.DiscoveredHost`` facts. - """ - if name is None: - name = gen_string('alpha') - if ip_address is None: - ip_address = gen_ipaddr() - if mac_address is None: - mac_address = gen_mac(multicast=False) - if options is None: - options = {} - facts = { - 'name': name, - 'discovery_bootip': ip_address, - 'discovery_bootif': mac_address, - 'interfaces': 'eth0', - 'ipaddress': ip_address, - 'ipaddress_eth0': ip_address, - 'macaddress': mac_address, - 'macaddress_eth0': mac_address, - } - facts.update(options) - return entities.DiscoveredHost().facts(json={'facts': facts}) - - -def update_vm_host_location(vm_client, location_id): - """Update vm client host location. - - :param vm_client: A subscribed Virtual Machine client instance. - :param location_id: The location id to update the vm_client host with. - """ - host = entities.Host().search(query={'search': f'name={vm_client.hostname}'})[0] - host.location = entities.Location(id=location_id) - host.update(['location']) - - -def check_create_os_with_title(os_title): - """Check if the OS is present, if not create the required OS - - :param os_title: OS title to check, and create (like: RedHat 7.5) - :return: Created or found OS - """ - # Check if OS that image needs is present or no, If not create the OS - result = entities.OperatingSystem().search(query={'search': f'title="{os_title}"'}) - if result: - os = result[0] - else: - os_name, _, os_version = os_title.partition(' ') - os_version_major, os_version_minor = os_version.split('.') - os = entities.OperatingSystem( - name=os_name, major=os_version_major, minor=os_version_minor - ).create() - return os - - -def attach_custom_product_subscription(prod_name=None, host_name=None): - """Attach custom product subscription to client host - :param str prod_name: custom product name - :param str host_name: client host name - """ - host = entities.Host().search(query={'search': f'{host_name}'})[0] - product_subscription = entities.Subscription().search(query={'search': f'name={prod_name}'})[0] - entities.HostSubscription(host=host.id).add_subscriptions( - data={'subscriptions': [{'id': product_subscription.id, 'quantity': 1}]} - ) - - -class templateupdate: - """Context Manager to unlock lock template for updating""" - - def __init__(self, temp): - """Context manager that takes entities.ProvisioningTemplate's object - - :param entities.ProvisioningTemplate temp: entities.ProvisioningTemplate's object - """ - self.temp = temp - if not isinstance(self.temp, entities.ProvisioningTemplate): - raise TypeError( - 'The template should be of type entities.ProvisioningTemplate, {} given'.format( - type(temp) - ) - ) - - def __enter__(self): - """Unlocks template for update""" - if self.temp.locked: - self.temp.locked = False - self.temp.update(['locked']) - - def __exit__(self, exc_type, exc_val, exc_tb): - """Locks template after update""" - if not self.temp.locked: - self.temp.locked = True - self.temp.update(['locked']) - - -def update_provisioning_template(name=None, old=None, new=None): - """Update provisioning template content - - :param str name: template provisioning name - :param str old: current content - :param str new: replace content - - :return bool: True/False - """ - temp = ( - entities.ProvisioningTemplate() - .search(query={'per_page': '1000', 'search': f'name="{name}"'})[0] - .read() - ) - if old in temp.template: - with templateupdate(temp): - temp.template = temp.template.replace(old, new, 1) - update = temp.update(['template']) - return new in update.template - elif new in temp.template: - return True - else: - raise ValueError(f'{old} does not exists in template {name}') - - -def apply_package_filter(content_view, repo, package, inclusion=True): - """Apply package filter on content view - - :param content_view: entity content view - :param repo: entity repository - :param str package: package name to filter - :param bool inclusion: True/False based on include or exclude filter - - :return list : list of content view versions - """ - cv_filter = entities.RPMContentViewFilter( - content_view=content_view, inclusion=inclusion, repository=[repo] - ).create() - cv_filter_rule = entities.ContentViewFilterRule( - content_view_filter=cv_filter, name=package - ).create() - assert cv_filter.id == cv_filter_rule.content_view_filter.id - content_view.publish() - content_view = content_view.read() - content_view_version_info = content_view.version[0].read() - return content_view_version_info - - -def create_org_admin_role(orgs, locs, name=None): - """Helper function to create org admin role for particular - organizations and locations by cloning 'Organization admin' role. - - :param list orgs: The list of organizations for which the org admin is - being created - :param list locs: The list of locations for which the org admin is - being created - :param str name: The name of cloned Org Admin role, autogenerates if None provided - :return dict: The object of ```nailgun.Role``` of Org Admin role. - """ - name = gen_string('alpha') if not name else name - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( - data={'role': {'name': name, 'organization_ids': orgs or [], 'location_ids': locs or []}} - ) - return entities.Role(id=org_admin['id']).read() - - -def create_org_admin_user(orgs, locs): - """Helper function to create an Org Admin user by assigning org admin role and assign - taxonomies to Role and User - - The taxonomies for role and user will be assigned based on parameters of this function - - :return User: Returns the ```nailgun.entities.User``` object with passwd attr - """ - # Create Org Admin Role - org_admin = create_org_admin_role(orgs=orgs, locs=locs) - # Create Org Admin User - user_login = gen_string('alpha') - user_passwd = gen_string('alphanumeric') - user = entities.User( - login=user_login, - password=user_passwd, - organization=orgs, - location=locs, - role=[org_admin.id], - ).create() - user.passwd = user_passwd - return user - - -def disable_syncplan(sync_plan): - """ - Disable sync plans after a test to reduce distracting task events, logs, and load on Satellite. - Note that only a Sync Plan with a repo would create a noticeable load. - You can also create sync plans in a disabled state where it is unlikely to impact the test. - """ - sync_plan.enabled = False - sync_plan = sync_plan.update(['enabled']) - assert sync_plan.enabled is False diff --git a/robottelo/cli/base.py b/robottelo/cli/base.py index 4f66a05a089..8d258500b27 100644 --- a/robottelo/cli/base.py +++ b/robottelo/cli/base.py @@ -6,59 +6,11 @@ from robottelo import ssh from robottelo.cli import hammer from robottelo.config import settings +from robottelo.exceptions import CLIDataBaseError, CLIError, CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.ssh import get_client -class CLIError(Exception): - """Indicates that a CLI command could not be run.""" - - -class CLIBaseError(Exception): - """Indicates that a CLI command has finished with return code different - from zero. - - :param status: CLI command return code - :param stderr: contents of the ``stderr`` - :param msg: explanation of the error - - """ - - def __init__(self, status, stderr, msg): - self.status = status - self.stderr = stderr - self.msg = msg - super().__init__(msg) - self.message = msg - - def __str__(self): - """Include class name, status, stderr and msg to string repr so - assertRaisesRegexp can be used to assert error present on any - attribute - """ - return repr(self) - - def __repr__(self): - """Include class name status, stderr and msg to improve logging""" - return '{}(status={!r}, stderr={!r}, msg={!r}'.format( - type(self).__name__, self.status, self.stderr, self.msg - ) - - -class CLIReturnCodeError(CLIBaseError): - """Error to be raised when an error occurs due to some validation error - when execution hammer cli. - See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details - """ - - -class CLIDataBaseError(CLIBaseError): - """Error to be raised when an error occurs due to some missing parameter - which cause a data base error on hammer - See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details - """ - - class Base: """Base class for hammer CLI interaction @@ -84,7 +36,7 @@ def _handle_response(cls, response, ignore_stderr=None): :param ignore_stderr: indicates whether to throw a warning in logs if ``stderr`` is not empty. :returns: contents of ``stdout``. - :raises robottelo.cli.base.CLIReturnCodeError: If return code is + :raises robottelo.exceptions.CLIReturnCodeError: If return code is different from zero. """ if isinstance(response.stderr, tuple): @@ -119,6 +71,16 @@ def add_operating_system(cls, options=None): return cls.execute(cls._construct_command(options)) + @classmethod + def ping(cls, options=None): + """ + Display status of Satellite. + """ + + cls.command_sub = 'ping' + + return cls.execute(cls._construct_command(options)) + @classmethod def create(cls, options=None, timeout=None): """ @@ -170,28 +132,24 @@ def delete(cls, options=None, timeout=None): return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) @classmethod - def delete_parameter(cls, options=None): + def delete_parameter(cls, options=None, timeout=None): """ Deletes parameter from record. """ cls.command_sub = 'delete-parameter' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options), ignore_stderr=False, timeout=timeout) @classmethod - def dump(cls, options=None): + def dump(cls, options=None, timeout=None): """ Displays the content for existing partition table. """ cls.command_sub = 'dump' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options), ignore_stderr=False, timeout=timeout) @classmethod def _get_username_password(cls, username=None, password=None): @@ -207,15 +165,9 @@ def _get_username_password(cls, username=None, password=None): """ if username is None: - try: - username = getattr(cls, 'foreman_admin_username') - except AttributeError: - username = settings.server.admin_username + username = getattr(cls, 'foreman_admin_username', settings.server.admin_username) if password is None: - try: - password = getattr(cls, 'foreman_admin_password') - except AttributeError: - password = settings.server.admin_password + password = getattr(cls, 'foreman_admin_password', settings.server.admin_password) return (username, password) @@ -255,16 +207,14 @@ def execute( ) if return_raw_response: return response - else: - return cls._handle_response(response, ignore_stderr=ignore_stderr) + return cls._handle_response(response, ignore_stderr=ignore_stderr) @classmethod def sm_execute(cls, command, hostname=None, timeout=None, **kwargs): """Executes the satellite-maintain cli commands on the server via ssh""" env_var = kwargs.get('env_var') or '' client = get_client(hostname=hostname or cls.hostname) - result = client.execute(f'{env_var} satellite-maintain {command}', timeout=timeout) - return result + return client.execute(f'{env_var} satellite-maintain {command}', timeout=timeout) @classmethod def exists(cls, options=None, search=None): @@ -419,6 +369,4 @@ def _construct_command(cls, options=None): if isinstance(val, list): val = ','.join(str(el) for el in val) tail += f' --{key}="{val}"' - cmd = f"{cls.command_base} {cls.command_sub or ''} {tail.strip()} {cls.command_end or ''}" - - return cmd + return f"{cls.command_base or ''} {cls.command_sub or ''} {tail.strip()} {cls.command_end or ''}" diff --git a/robottelo/cli/bootdisk.py b/robottelo/cli/bootdisk.py new file mode 100644 index 00000000000..50df399f7f3 --- /dev/null +++ b/robottelo/cli/bootdisk.py @@ -0,0 +1,41 @@ +""" +Usage:: + + hammer bootdisk [OPTIONS] SUBCOMMAND [ARG] ... + +Parameters:: + + SUBCOMMAND Subcommand + [ARG] ... Subcommand arguments + +Subcommands:: + + generic Download generic image + host Download host image + subnet Download subnet generic image +""" +from robottelo.cli.base import Base + + +class Bootdisk(Base): + """Manipulates Bootdisk.""" + + command_base = 'bootdisk' + + @classmethod + def generic(cls, options=None): + """Download generic image""" + cls.command_sub = 'generic' + return cls.execute(cls._construct_command(options), output_format='json') + + @classmethod + def host(cls, options=None): + """Download host image""" + cls.command_sub = 'host' + return cls.execute(cls._construct_command(options), output_format='json') + + @classmethod + def subnet(cls, options=None): + """Download subnet generic image""" + cls.command_sub = 'subnet' + return cls.execute(cls._construct_command(options), output_format='json') diff --git a/robottelo/cli/capsule.py b/robottelo/cli/capsule.py index 9f126e5a2a4..1e4e4a20b36 100644 --- a/robottelo/cli/capsule.py +++ b/robottelo/cli/capsule.py @@ -35,9 +35,7 @@ def content_add_lifecycle_environment(cls, options): cls.command_sub = 'content add-lifecycle-environment' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def content_available_lifecycle_environments(cls, options): @@ -45,9 +43,7 @@ def content_available_lifecycle_environments(cls, options): cls.command_sub = 'content available-lifecycle-environments' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def content_info(cls, options): @@ -55,9 +51,7 @@ def content_info(cls, options): cls.command_sub = 'content info' - result = cls.execute(cls._construct_command(options), output_format='json') - - return result + return cls.execute(cls._construct_command(options), output_format='json') @classmethod def content_lifecycle_environments(cls, options): @@ -65,9 +59,7 @@ def content_lifecycle_environments(cls, options): cls.command_sub = 'content lifecycle-environments' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def content_remove_lifecycle_environment(cls, options): @@ -75,9 +67,7 @@ def content_remove_lifecycle_environment(cls, options): cls.command_sub = 'content remove-lifecycle-environment' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def content_synchronization_status(cls, options): @@ -85,9 +75,7 @@ def content_synchronization_status(cls, options): cls.command_sub = 'content synchronization-status' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def content_synchronize(cls, options, return_raw_response=None, timeout=3600000): @@ -95,7 +83,7 @@ def content_synchronize(cls, options, return_raw_response=None, timeout=3600000) cls.command_sub = 'content synchronize' - result = cls.execute( + return cls.execute( cls._construct_command(options), output_format='csv', ignore_stderr=True, @@ -103,17 +91,13 @@ def content_synchronize(cls, options, return_raw_response=None, timeout=3600000) timeout=timeout, ) - return result - @classmethod def import_classes(cls, options): """Import puppet classes from puppet Capsule.""" cls.command_sub = 'import-classes' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def refresh_features(cls, options): @@ -121,6 +105,4 @@ def refresh_features(cls, options): cls.command_sub = 'refresh-features' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') diff --git a/robottelo/cli/content_export.py b/robottelo/cli/content_export.py index 064f17a720d..8d67ddfc032 100644 --- a/robottelo/cli/content_export.py +++ b/robottelo/cli/content_export.py @@ -31,7 +31,7 @@ class ContentExport(Base): command_requires_org = True @classmethod - def list(cls, output_format='json', options=None): + def list(cls, options=None, output_format='json'): """ List previous exports """ diff --git a/robottelo/cli/contentview.py b/robottelo/cli/contentview.py index 8677d7de655..fb8001f89d7 100644 --- a/robottelo/cli/contentview.py +++ b/robottelo/cli/contentview.py @@ -34,8 +34,7 @@ -h, --help print help """ from robottelo.cli import hammer -from robottelo.cli.base import Base -from robottelo.cli.base import CLIError +from robottelo.cli.base import Base, CLIError class ContentViewFilterRule(Base): @@ -166,6 +165,12 @@ def version_delete(cls, options): cls.command_sub = 'version delete' return cls.execute(cls._construct_command(options), ignore_stderr=True) + @classmethod + def version_republish_repositories(cls, options): + """Removes content-view version.""" + cls.command_sub = 'version republish-repositories' + return cls.execute(cls._construct_command(options), ignore_stderr=True) + @classmethod def remove_from_environment(cls, options=None): """Remove content-view from an environment""" diff --git a/robottelo/cli/discoveredhost.py b/robottelo/cli/discoveredhost.py index e2fb2bf65c9..6404dfc89ec 100644 --- a/robottelo/cli/discoveredhost.py +++ b/robottelo/cli/discoveredhost.py @@ -31,10 +31,28 @@ class DiscoveredHost(Base): def provision(cls, options=None): """Manually provision discovered host""" cls.command_sub = 'provision' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def facts(cls, options=None): """Get all the facts associated with discovered host""" cls.command_sub = 'facts' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), output_format='csv') + + @classmethod + def auto_provision(cls, options=None): + """Auto provision discovered host""" + cls.command_sub = 'auto-provision' + return cls.execute(cls._construct_command(options), output_format='csv') + + @classmethod + def reboot(cls, options=None): + """Reboot discovered host""" + cls.command_sub = 'reboot' + return cls.execute(cls._construct_command(options), output_format='csv') + + @classmethod + def refresh_facts(cls, options=None): + """Refresh facts associated with discovered host""" + cls.command_sub = 'refresh-facts' + return cls.execute(cls._construct_command(options), output_format='csv') diff --git a/robottelo/cli/factory.py b/robottelo/cli/factory.py deleted file mode 100644 index fb111fdbd6e..00000000000 --- a/robottelo/cli/factory.py +++ /dev/null @@ -1,2253 +0,0 @@ -""" -Factory object creation for all CLI methods -""" -import datetime -import os -import pprint -import random -from os import chmod -from tempfile import mkstemp -from time import sleep - -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_netmask -from fauxfactory import gen_string -from fauxfactory import gen_url - -from robottelo import constants -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.architecture import Architecture -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.content_credentials import ContentCredential -from robottelo.cli.contentview import ContentView -from robottelo.cli.contentview import ContentViewFilter -from robottelo.cli.contentview import ContentViewFilterRule -from robottelo.cli.discoveryrule import DiscoveryRule -from robottelo.cli.domain import Domain -from robottelo.cli.environment import Environment -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.http_proxy import HttpProxy -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.job_template import JobTemplate -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.location import Location -from robottelo.cli.medium import Medium -from robottelo.cli.model import Model -from robottelo.cli.operatingsys import OperatingSys -from robottelo.cli.org import Org -from robottelo.cli.partitiontable import PartitionTable -from robottelo.cli.product import Product -from robottelo.cli.realm import Realm -from robottelo.cli.report_template import ReportTemplate -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.role import Role -from robottelo.cli.scap_policy import Scappolicy -from robottelo.cli.scap_tailoring_files import TailoringFiles -from robottelo.cli.scapcontent import Scapcontent -from robottelo.cli.subnet import Subnet -from robottelo.cli.subscription import Subscription -from robottelo.cli.syncplan import SyncPlan -from robottelo.cli.template import Template -from robottelo.cli.template_input import TemplateInput -from robottelo.cli.user import User -from robottelo.cli.usergroup import UserGroup -from robottelo.cli.usergroup import UserGroupExternal -from robottelo.cli.virt_who_config import VirtWhoConfig -from robottelo.config import settings -from robottelo.logging import logger -from robottelo.utils import ssh -from robottelo.utils.datafactory import valid_cron_expressions -from robottelo.utils.decorators import cacheable -from robottelo.utils.manifest import clone - - -ORG_KEYS = ['organization', 'organization-id', 'organization-label'] -CONTENT_VIEW_KEYS = ['content-view', 'content-view-id'] -LIFECYCLE_KEYS = ['lifecycle-environment', 'lifecycle-environment-id'] - - -class CLIFactoryError(Exception): - """Indicates an error occurred while creating an entity using hammer""" - - -def create_object(cli_object, options, values): - """ - Creates with dictionary of arguments. - - :param cli_object: A valid CLI object. - :param dict options: The default options accepted by the cli_object - create - :param dict values: Custom values to override default ones. - :raise robottelo.cli.factory.CLIFactoryError: Raise an exception if object - cannot be created. - :rtype: dict - :return: A dictionary representing the newly created resource. - - """ - if values: - diff = set(values.keys()).difference(set(options.keys())) - if diff: - logger.debug( - "Option(s) {} not supported by CLI factory. Please check for " - "a typo or update default options".format(diff) - ) - for key in set(options.keys()).intersection(set(values.keys())): - options[key] = values[key] - - try: - result = cli_object.create(options) - except CLIReturnCodeError as err: - # If the object is not created, raise exception, stop the show. - raise CLIFactoryError( - 'Failed to create {} with data:\n{}\n{}'.format( - cli_object.__name__, pprint.pformat(options, indent=2), err.msg - ) - ) - - # Sometimes we get a list with a dictionary and not - # a dictionary. - if type(result) is list and len(result) > 0: - result = result[0] - - return result - - -def _entity_with_credentials(credentials, cli_entity_cls): - """Create entity class using credentials. If credentials is None will - return cli_entity_cls itself - - :param credentials: tuple (login, password) - :param cli_entity_cls: Cli Entity Class - :return: Cli Entity Class - """ - if credentials is not None: - cli_entity_cls = cli_entity_cls.with_user(*credentials) - return cli_entity_cls - - -@cacheable -def make_activation_key(options=None): - """Creates an Activation Key - - :param options: Check options using `hammer activation-key create --help` on satellite. - - :returns ActivationKey object - """ - # Organization Name, Label or ID is a required field. - if ( - not options - or not options.get('organization') - and not options.get('organization-label') - and not options.get('organization-id') - ): - raise CLIFactoryError('Please provide a valid Organization.') - - args = { - 'content-view': None, - 'content-view-id': None, - 'description': None, - 'lifecycle-environment': None, - 'lifecycle-environment-id': None, - 'max-hosts': None, - 'name': gen_alphanumeric(), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'unlimited-hosts': None, - 'service-level': None, - 'purpose-role': None, - 'purpose-usage': None, - 'purpose-addons': None, - } - - return create_object(ActivationKey, args, options) - - -@cacheable -def make_architecture(options=None): - """Creates an Architecture - - :param options: Check options using `hammer architecture create --help` on satellite. - - :returns Architecture object - """ - args = {'name': gen_alphanumeric(), 'operatingsystem-ids': None} - - return create_object(Architecture, args, options) - - -@cacheable -def make_content_view(options=None): - """Creates a Content View - - :param options: Check options using `hammer content-view create --help` on satellite. - - :returns ContentView object - """ - return make_content_view_with_credentials(options) - - -def make_content_view_with_credentials(options=None, credentials=None): - """Helper function to create CV with credentials - - If credentials is None, the default credentials in robottelo.properties will be used. - """ - # Organization ID is a required field. - if not options or not options.get('organization-id'): - raise CLIFactoryError('Please provide a valid ORG ID.') - - args = { - 'component-ids': None, - 'composite': False, - 'description': None, - 'import-only': False, - 'label': None, - 'name': gen_string('alpha', 10), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'product': None, - 'product-id': None, - 'repositories': None, - 'repository-ids': None, - } - - cv_cls = _entity_with_credentials(credentials, ContentView) - return create_object(cv_cls, args, options) - - -@cacheable -def make_content_view_filter(options=None): - """Creates a Content View Filter - - :param options: Check options using `hammer content-view filter create --help` on satellite. - - :returns ContentViewFilter object - """ - - args = { - 'content-view': None, - 'content-view-id': None, - 'description': None, - 'inclusion': None, - 'name': gen_string('alpha', 10), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'original-packages': None, - 'repositories': None, - 'repository-ids': None, - 'type': None, - } - - return create_object(ContentViewFilter, args, options) - - -@cacheable -def make_content_view_filter_rule(options=None): - """Creates a Content View Filter Rule - - :param options: Check options using `hammer content-view filter rule create --help` on - satellite. - - :returns ContentViewFilterRule object - """ - - args = { - 'content-view': None, - 'content-view-filter': None, - 'content-view-filter-id': None, - 'content-view-id': None, - 'date-type': None, - 'end-date': None, - 'errata-id': None, - 'errata-ids': None, - 'max-version': None, - 'min-version': None, - 'name': None, - 'names': None, - 'start-date': None, - 'types': None, - 'version': None, - } - - return create_object(ContentViewFilterRule, args, options) - - -@cacheable -def make_discoveryrule(options=None): - """Creates a Discovery Rule - - :param options: Check options using `hammer discovery-rule create --help` on satellite. - - :returns DiscoveryRule object - """ - - # Organizations, Locations, search query, hostgroup are required fields. - if not options: - raise CLIFactoryError('Please provide required parameters') - # Organizations fields is required - if not any(options.get(key) for key in ['organizations', 'organization-ids']): - raise CLIFactoryError('Please provide a valid organization field.') - # Locations field is required - if not any(options.get(key) for key in ['locations', 'location-ids']): - raise CLIFactoryError('Please provide a valid location field.') - # search query is required - if not options.get('search'): - raise CLIFactoryError('Please provider a valid search query') - # hostgroup is required - if not any(options.get(key) for key in ['hostgroup', 'hostgroup-id']): - raise CLIFactoryError('Please provider a valid hostgroup') - - args = { - 'enabled': None, - 'hostgroup': None, - 'hostgroup-id': None, - 'hostgroup-title': None, - 'hostname': None, - 'hosts-limit': None, - 'location-ids': None, - 'locations': None, - 'max-count': None, - 'name': gen_alphanumeric(), - 'organizations': None, - 'organization-ids': None, - 'priority': None, - 'search': None, - } - - return create_object(DiscoveryRule, args, options) - - -@cacheable -def make_content_credential(options=None): - """Creates a content credential. - - In Satellite 6.8, only gpg_key option is supported. - - :param options: Check options using `hammer content-credential create --help` on satellite. - - :returns ContentCredential object - """ - # Organization ID is a required field. - if not options or not options.get('organization-id'): - raise CLIFactoryError('Please provide a valid ORG ID.') - - # Create a fake gpg key file if none was provided - if not options.get('path'): - (_, key_filename) = mkstemp(text=True) - os.chmod(key_filename, 0o700) - with open(key_filename, 'w') as gpg_key_file: - gpg_key_file.write(gen_alphanumeric(gen_integer(20, 50))) - else: - # If the key is provided get its local path and remove it from options - # to not override the remote path - key_filename = options.pop('path') - - args = { - 'path': f'/tmp/{gen_alphanumeric()}', - 'content-type': 'gpg_key', - 'name': gen_alphanumeric(), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - } - - # Upload file to server - ssh.get_client().put(key_filename, args['path']) - - return create_object(ContentCredential, args, options) - - -@cacheable -def make_location(options=None): - """Creates a Location - - :param options: Check options using `hammer location create --help` on satellite. - - :returns Location object - """ - args = { - 'compute-resource-ids': None, - 'compute-resources': None, - 'description': None, - 'domain-ids': None, - 'domains': None, - 'environment-ids': None, - 'environments': None, - 'puppet-environment-ids': None, - 'puppet-environments': None, - 'hostgroup-ids': None, - 'hostgroups': None, - 'medium-ids': None, - 'name': gen_alphanumeric(), - 'parent-id': None, - 'provisioning-template-ids': None, - 'provisioning-templates': None, - 'realm-ids': None, - 'realms': None, - 'smart-proxy-ids': None, - 'smart-proxies': None, - 'subnet-ids': None, - 'subnets': None, - 'user-ids': None, - 'users': None, - } - - return create_object(Location, args, options) - - -@cacheable -def make_model(options=None): - """Creates a Hardware Model - - :param options: Check options using `hammer model create --help` on satellite. - - :returns Model object - """ - args = {'hardware-model': None, 'info': None, 'name': gen_alphanumeric(), 'vendor-class': None} - - return create_object(Model, args, options) - - -@cacheable -def make_partition_table(options=None): - """Creates a Partition Table - - :param options: Check options using `hammer partition-table create --help` on satellite. - - :returns PartitionTable object - """ - if options is None: - options = {} - (_, layout) = mkstemp(text=True) - os.chmod(layout, 0o700) - with open(layout, 'w') as ptable: - ptable.write(options.get('content', 'default ptable content')) - - args = { - 'file': f'/tmp/{gen_alphanumeric()}', - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric(), - 'operatingsystem-ids': None, - 'operatingsystems': None, - 'organization-ids': None, - 'organizations': None, - 'os-family': random.choice(constants.OPERATING_SYSTEMS), - } - - # Upload file to server - ssh.get_client().put(layout, args['file']) - - return create_object(PartitionTable, args, options) - - -@cacheable -def make_product(options=None): - """Creates a Product - - :param options: Check options using `hammer product create --help` on satellite. - - :returns Product object - """ - return make_product_with_credentials(options) - - -def make_product_with_credentials(options=None, credentials=None): - """Helper function to create product with credentials""" - # Organization ID is a required field. - if not options or not options.get('organization-id'): - raise CLIFactoryError('Please provide a valid ORG ID.') - - args = { - 'description': gen_string('alpha', 20), - 'gpg-key': None, - 'gpg-key-id': None, - 'label': gen_string('alpha', 20), - 'name': gen_string('alpha', 20), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'sync-plan': None, - 'sync-plan-id': None, - } - product_cls = _entity_with_credentials(credentials, Product) - return create_object(product_cls, args, options) - - -def make_product_wait(options=None, wait_for=5): - """Wrapper function for make_product to make it wait before erroring out. - - This is a temporary workaround for BZ#1332650: Sometimes cli product - create errors for no reason when there are multiple product creation - requests at the sametime although the product entities are created. This - workaround will attempt to wait for 5 seconds and query the - product again to make sure it is actually created. If it is not found, - it will fail and stop. - - Note: This wrapper method is created instead of patching make_product - because this issue does not happen for all entities and this workaround - should be removed once the root cause is identified/fixed. - """ - # Organization ID is a required field. - if not options or not options.get('organization-id'): - raise CLIFactoryError('Please provide a valid ORG ID.') - options['name'] = options.get('name', gen_string('alpha')) - try: - product = make_product(options) - except CLIFactoryError as err: - sleep(wait_for) - try: - product = Product.info( - {'name': options.get('name'), 'organization-id': options.get('organization-id')} - ) - except CLIReturnCodeError: - raise err - if not product: - raise err - return product - - -@cacheable -def make_repository(options=None): - """Creates a Repository - - :param options: Check options using `hammer repository create --help` on satellite. - - :returns Repository object - """ - return make_repository_with_credentials(options) - - -def make_repository_with_credentials(options=None, credentials=None): - """Helper function to create Repository with credentials""" - # Product ID is a required field. - if not options or not options.get('product-id'): - raise CLIFactoryError('Please provide a valid Product ID.') - - args = { - 'checksum-type': None, - 'content-type': 'yum', - 'include-tags': None, - 'docker-upstream-name': None, - 'download-policy': None, - 'gpg-key': None, - 'gpg-key-id': None, - 'ignorable-content': None, - 'label': None, - 'mirroring-policy': None, - 'name': gen_string('alpha', 15), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'product': None, - 'product-id': None, - 'publish-via-http': 'true', - 'http-proxy': None, - 'http-proxy-id': None, - 'http-proxy-policy': None, - 'ansible-collection-requirements': None, - 'ansible-collection-requirements-file': None, - 'ansible-collection-auth-token': None, - 'ansible-collection-auth-url': None, - 'url': settings.repos.yum_1.url, - 'upstream-username': None, - 'upstream-password': None, - } - repo_cls = _entity_with_credentials(credentials, Repository) - return create_object(repo_cls, args, options) - - -@cacheable -def make_role(options=None): - """Creates a Role - - :param options: Check options using `hammer role create --help` on satellite. - - :returns Role object - """ - # Assigning default values for attributes - args = {'name': gen_alphanumeric(6)} - - return create_object(Role, args, options) - - -@cacheable -def make_filter(options=None): - """Creates a Role Filter - - :param options: Check options using `hammer filter create --help` on satellite. - - :returns Role object - """ - args = { - 'location-ids': None, - 'locations': None, - 'organization-ids': None, - 'organizations': None, - 'override': None, - 'permission-ids': None, - 'permissions': None, - 'role': None, - 'role-id': None, - 'search': None, - } - - # Role and permissions are required fields. - if not options: - raise CLIFactoryError('Please provide required parameters') - - # Do we have at least one role field? - if not any(options.get(key) for key in ['role', 'role-id']): - raise CLIFactoryError('Please provide a valid role field.') - - # Do we have at least one permissions field? - if not any(options.get(key) for key in ['permissions', 'permission-ids']): - raise CLIFactoryError('Please provide a valid permissions field.') - - return create_object(Filter, args, options) - - -@cacheable -def make_scap_policy(options=None): - """Creates a Scap Policy - - :param options: Check options using `hammer policy create --help` on satellite. - - :returns Scappolicy object - """ - # Assigning default values for attributes - # SCAP ID and SCAP profile ID is a required field. - if ( - not options - and not options.get('scap-content-id') - and not options.get('scap-content-profile-id') - and not options.get('period') - and not options.get('deploy-by') - ): - raise CLIFactoryError( - 'Please provide a valid SCAP ID or SCAP Profile ID or Period or Deploy by option' - ) - args = { - 'description': None, - 'scap-content-id': None, - 'scap-content-profile-id': None, - 'deploy-by': None, - 'period': None, - 'weekday': None, - 'day-of-month': None, - 'cron-line': None, - 'hostgroup-ids': None, - 'hostgroups': None, - 'locations': None, - 'organizations': None, - 'tailoring-file': None, - 'tailoring-file-id': None, - 'tailoring-file-profile-id': None, - 'location-ids': None, - 'name': gen_alphanumeric().lower(), - 'organization-ids': None, - } - - return create_object(Scappolicy, args, options) - - -@cacheable -def make_subnet(options=None): - """Creates a Subnet - - :param options: Check options using `hammer subnet create --help` on satellite. - - :returns Subnet object - """ - args = { - 'boot-mode': None, - 'dhcp-id': None, - 'dns-id': None, - 'dns-primary': None, - 'dns-secondary': None, - 'domain-ids': None, - 'domains': None, - 'from': None, - 'gateway': None, - 'ipam': None, - 'location-ids': None, - 'locations': None, - 'mask': gen_netmask(), - 'name': gen_alphanumeric(8), - 'network': gen_ipaddr(ip3=True), - 'organization-ids': None, - 'organizations': None, - 'tftp-id': None, - 'to': None, - 'vlanid': None, - } - - return create_object(Subnet, args, options) - - -@cacheable -def make_sync_plan(options=None): - """Creates a Sync Plan - - :param options: Check options using `hammer sync-plan create --help` on satellite. - - :returns SyncPlan object - """ - # Organization ID is a required field. - if not options or not options.get('organization-id'): - raise CLIFactoryError('Please provide a valid ORG ID.') - - args = { - 'description': gen_string('alpha', 20), - 'enabled': 'true', - 'interval': random.choice(list(constants.SYNC_INTERVAL.values())), - 'name': gen_string('alpha', 20), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'sync-date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), - 'cron-expression': None, - } - if options.get('interval', args['interval']) == constants.SYNC_INTERVAL[ - 'custom' - ] and not options.get('cron-expression'): - args['cron-expression'] = gen_choice(valid_cron_expressions()) - return create_object(SyncPlan, args, options) - - -@cacheable -def make_host(options=None): - """Creates a Host - - :param options: Check options using `hammer host create --help` on satellite. - - :returns Host object - """ - args = { - 'architecture': None, - 'architecture-id': None, - 'ask-root-password': None, - 'autoheal': None, - 'build': None, - 'comment': None, - 'compute-attributes': None, - 'compute-profile': None, - 'compute-profile-id': None, - 'compute-resource': None, - 'compute-resource-id': None, - 'content-source-id': None, - 'content-view': None, - 'content-view-id': None, - 'domain': None, - 'domain-id': None, - 'enabled': None, - 'environment': None, - 'environment-id': None, - 'hostgroup': None, - 'hostgroup-id': None, - 'hostgroup-title': None, - 'hypervisor-guest-uuids': None, - 'image': None, - 'image-id': None, - 'interface': None, - 'ip': gen_ipaddr(), - 'kickstart-repository-id': None, - 'lifecycle-environment': None, - 'lifecycle-environment-id': None, - 'location': None, - 'location-id': None, - 'mac': gen_mac(multicast=False), - 'managed': None, - 'medium': None, - 'medium-id': None, - 'model': None, - 'model-id': None, - 'name': gen_string('alpha', 10), - 'operatingsystem': None, - 'operatingsystem-id': None, - 'openscap-proxy-id': None, - 'organization': None, - 'organization-id': None, - 'overwrite': None, - 'owner': None, - 'owner-id': None, - 'owner-type': None, - 'parameters': None, - 'partition-table': None, - 'partition-table-id': None, - 'progress-report-id': None, - 'provision-method': None, - 'puppet-ca-proxy': None, - 'puppet-ca-proxy-id': None, - 'puppet-class-ids': None, - 'puppet-classes': None, - 'puppet-proxy': None, - 'puppet-proxy-id': None, - 'pxe-loader': None, - 'realm': None, - 'realm-id': None, - 'root-password': gen_string('alpha', 8), - 'service-level': None, - 'subnet': None, - 'subnet-id': None, - 'volume': None, - } - - return create_object(Host, args, options) - - -@cacheable -def make_fake_host(options=None): - """Wrapper function for make_host to pass all required options for creation - of a fake host - """ - if options is None: - options = {} - - # Try to use default Satellite entities, otherwise create them if they were - # not passed or defined previously - if not options.get('organization') and not options.get('organization-id'): - try: - options['organization-id'] = Org.info({'name': constants.DEFAULT_ORG})['id'] - except CLIReturnCodeError: - options['organization-id'] = make_org()['id'] - if not options.get('location') and not options.get('location-id'): - try: - options['location-id'] = Location.info({'name': constants.DEFAULT_LOC})['id'] - except CLIReturnCodeError: - options['location-id'] = make_location()['id'] - if not options.get('domain') and not options.get('domain-id'): - options['domain-id'] = make_domain( - { - 'location-ids': options.get('location-id'), - 'locations': options.get('location'), - 'organization-ids': options.get('organization-id'), - 'organizations': options.get('organization'), - } - )['id'] - if not options.get('architecture') and not options.get('architecture-id'): - try: - options['architecture-id'] = Architecture.info( - {'name': constants.DEFAULT_ARCHITECTURE} - )['id'] - except CLIReturnCodeError: - options['architecture-id'] = make_architecture()['id'] - if not options.get('operatingsystem') and not options.get('operatingsystem-id'): - try: - options['operatingsystem-id'] = OperatingSys.list( - {'search': 'name="RedHat" AND (major="7" OR major="8")'} - )[0]['id'] - except IndexError: - options['operatingsystem-id'] = make_os( - { - 'architecture-ids': options.get('architecture-id'), - 'architectures': options.get('architecture'), - 'partition-table-ids': options.get('partition-table-id'), - 'partition-tables': options.get('partition-table'), - } - )['id'] - if not options.get('partition-table') and not options.get('partition-table-id'): - try: - options['partition-table-id'] = PartitionTable.list( - { - 'operatingsystem': options.get('operatingsystem'), - 'operatingsystem-id': options.get('operatingsystem-id'), - } - )[0]['id'] - except IndexError: - options['partition-table-id'] = make_partition_table( - { - 'location-ids': options.get('location-id'), - 'locations': options.get('location'), - 'operatingsystem-ids': options.get('operatingsystem-id'), - 'organization-ids': options.get('organization-id'), - 'organizations': options.get('organization'), - } - )['id'] - - # Finally, create a new medium (if none was passed) - if not options.get('medium') and not options.get('medium-id'): - options['medium-id'] = make_medium( - { - 'location-ids': options.get('location-id'), - 'locations': options.get('location'), - 'operatingsystems': options.get('operatingsystem'), - 'operatingsystem-ids': options.get('operatingsystem-id'), - 'organization-ids': options.get('organization-id'), - 'organizations': options.get('organization'), - } - )['id'] - - return make_host(options) - - -@cacheable -def make_host_collection(options=None): - """Creates a Host Collection - - :param options: Check options using `hammer host-collection create --help` on satellite. - - :returns HostCollection object - """ - # Assigning default values for attributes - args = { - 'description': None, - 'host-collection-ids': None, - 'hosts': None, - 'max-hosts': None, - 'name': gen_string('alpha', 15), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'unlimited-hosts': None, - } - - return create_object(HostCollection, args, options) - - -@cacheable -def make_job_invocation(options=None): - """Creates a Job Invocation - - :param options: Check options using `hammer job-invocation create --help` on satellite. - - :returns JobInvocation object - """ - return make_job_invocation_with_credentials(options) - - -def make_job_invocation_with_credentials(options=None, credentials=None): - """Helper function to create Job Invocation with credentials""" - - args = { - 'async': None, - 'bookmark': None, - 'bookmark-id': None, - 'concurrency-level': None, - 'cron-line': None, - 'description-format': None, - 'dynamic': None, - 'effective-user': None, - 'end-time': None, - 'input-files': None, - 'inputs': None, - 'job-template': None, - 'job-template-id': None, - 'max-iteration': None, - 'search-query': None, - 'start-at': None, - 'start-before': None, - 'time-span': None, - } - - jinv_cls = _entity_with_credentials(credentials, JobInvocation) - return create_object(jinv_cls, args, options) - - -@cacheable -def make_job_template(options=None): - """Creates a Job Template - - :param options: Check options using `hammer job-template create --help` on satellite. - - :returns JobTemplate object - """ - args = { - 'audit-comment': None, - 'current-user': None, - 'description-format': None, - 'file': None, - 'job-category': 'Miscellaneous', - 'location-ids': None, - 'locations': None, - 'name': None, - 'organization-ids': None, - 'organizations': None, - 'overridable': None, - 'provider-type': 'SSH', - 'snippet': None, - 'value': None, - } - - return create_object(JobTemplate, args, options) - - -@cacheable -def make_user(options=None): - """Creates a User - - :param options: Check options using `hammer user create --help` on satellite. - - :returns User object - """ - login = gen_alphanumeric(6) - - # Assigning default values for attributes - args = { - 'admin': None, - 'auth-source-id': 1, - 'default-location-id': None, - 'default-organization-id': None, - 'description': None, - 'firstname': gen_alphanumeric(), - 'lastname': gen_alphanumeric(), - 'location-ids': None, - 'login': login, - 'mail': f'{login}@example.com', - 'organization-ids': None, - 'password': gen_alphanumeric(), - 'timezone': None, - } - logger.debug( - 'User "{}" password not provided {} was generated'.format(args['login'], args['password']) - ) - - return create_object(User, args, options) - - -@cacheable -def make_usergroup(options=None): - """Creates a User Group - - :param options: Check options using `hammer user-group create --help` on satellite. - - :returns UserGroup object - """ - # Assigning default values for attributes - args = { - 'admin': None, - 'name': gen_alphanumeric(), - 'role-ids': None, - 'roles': None, - 'user-group-ids': None, - 'user-groups': None, - 'user-ids': None, - 'users': None, - } - - return create_object(UserGroup, args, options) - - -@cacheable -def make_usergroup_external(options=None): - """Creates an External User Group - - :param options: Check options using `hammer user-group external create --help` on satellite. - - :returns UserGroupExternal object - """ - # UserGroup Name or ID is a required field. - if not options or not options.get('user-group') and not options.get('user-group-id'): - raise CLIFactoryError('Please provide a valid UserGroup.') - - # Assigning default values for attributes - args = { - 'auth-source-id': 1, - 'name': gen_alphanumeric(8), - 'user-group': None, - 'user-group-id': None, - } - - return create_object(UserGroupExternal, args, options) - - -@cacheable -def make_ldap_auth_source(options=None): - """Creates an LDAP Auth Source - - :param options: Check options using `hammer auth-source ldap create --help` on satellite. - - :returns LDAPAuthSource object - """ - # Assigning default values for attributes - args = { - 'account': None, - 'account-password': None, - 'attr-firstname': None, - 'attr-lastname': None, - 'attr-login': None, - 'attr-mail': None, - 'attr-photo': None, - 'base-dn': None, - 'groups-base': None, - 'host': None, - 'ldap-filter': None, - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric(), - 'onthefly-register': None, - 'organization-ids': None, - 'organizations': None, - 'port': None, - 'server-type': None, - 'tls': None, - 'usergroup-sync': None, - } - - return create_object(LDAPAuthSource, args, options) - - -@cacheable -def make_compute_resource(options=None): - """Creates a Compute Resource - - :param options: Check options using `hammer compute-resource create --help` on satellite. - - :returns ComputeResource object - """ - args = { - 'caching-enabled': None, - 'datacenter': None, - 'description': None, - 'display-type': None, - 'domain': None, - 'location': None, - 'location-id': None, - 'location-ids': None, - 'location-title': None, - 'location-titles': None, - 'locations': None, - 'name': gen_alphanumeric(8), - 'organization': None, - 'organization-id': None, - 'organization-ids': None, - 'organization-title': None, - 'organization-titles': None, - 'organizations': None, - 'ovirt-quota': None, - 'password': None, - 'project-domain-id': None, - 'project-domain-name': None, - 'provider': None, - 'public-key': None, - 'public-key-path': None, - 'region': None, - 'server': None, - 'set-console-password': None, - 'tenant': None, - 'url': None, - 'use-v4': None, - 'user': None, - 'uuid': None, - } - - if options is None: - options = {} - - if options.get('provider') is None: - options['provider'] = constants.FOREMAN_PROVIDERS['libvirt'] - if options.get('url') is None: - options['url'] = 'qemu+tcp://localhost:16509/system' - - return create_object(ComputeResource, args, options) - - -@cacheable -def make_org(options=None): - """Creates an Organization - - :param options: Check options using `hammer organization create --help` on satellite. - - :returns Organization object - """ - return make_org_with_credentials(options) - - -def make_org_with_credentials(options=None, credentials=None): - """Helper function to create organization with credentials""" - # Assigning default values for attributes - args = { - 'compute-resource-ids': None, - 'compute-resources': None, - 'provisioning-template-ids': None, - 'provisioning-templates': None, - 'description': None, - 'domain-ids': None, - 'environment-ids': None, - 'environments': None, - 'hostgroup-ids': None, - 'hostgroups': None, - 'label': None, - 'media-ids': None, - 'media': None, - 'name': gen_alphanumeric(6), - 'realm-ids': None, - 'realms': None, - 'smart-proxy-ids': None, - 'smart-proxies': None, - 'subnet-ids': None, - 'subnets': None, - 'user-ids': None, - 'users': None, - } - org_cls = _entity_with_credentials(credentials, Org) - return create_object(org_cls, args, options) - - -@cacheable -def make_realm(options=None): - """Creates a REALM - - :param options: Check options using `hammer realm create --help` on satellite. - - :returns Realm object - """ - # Assigning default values for attributes - args = { - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric(6), - 'organization-ids': None, - 'organizations': None, - 'realm-proxy-id': None, - 'realm-type': None, - } - - return create_object(Realm, args, options) - - -@cacheable -def make_report_template(options=None): - """Creates a Report Template - - :param options: Check options using `hammer report-template create --help` on satellite. - - :returns ReportTemplate object - """ - if options is not None and 'content' in options.keys(): - content = options.pop('content') - else: - content = gen_alphanumeric() - - args = { - 'audit-comment': None, - 'default': None, - 'file': content, - 'interactive': None, - 'location': None, - 'location-id': None, - 'location-ids': None, - 'location-title': None, - 'location-titles': None, - 'locations': None, - 'locked': None, - 'name': gen_alphanumeric(10), - 'organization': None, - 'organization-id': None, - 'organization-ids': None, - 'organization-title': None, - 'organization-titles': None, - 'organizations': None, - 'snippet': None, - } - return create_object(ReportTemplate, args, options) - - -@cacheable -def make_os(options=None): - """Creates an Operating System - - :param options: Check options using `hammer os create --help` on satellite. - - :returns OperatingSys object - """ - # Assigning default values for attributes - args = { - 'architecture-ids': None, - 'architectures': None, - 'provisioning-template-ids': None, - 'provisioning-templates': None, - 'description': None, - 'family': None, - 'major': random.randint(0, 10), - 'media': None, - 'medium-ids': None, - 'minor': random.randint(0, 10), - 'name': gen_alphanumeric(6), - 'partition-table-ids': None, - 'partition-tables': None, - 'password-hash': None, - 'release-name': None, - } - - return create_object(OperatingSys, args, options) - - -@cacheable -def make_scapcontent(options=None): - """Creates Scap Content - - :param options: Check options using `hammer scap-content create --help` on satellite. - - :returns ScapContent object - """ - # Assigning default values for attributes - args = { - 'scap-file': None, - 'original-filename': None, - 'location-ids': None, - 'locations': None, - 'title': gen_alphanumeric().lower(), - 'organization-ids': None, - 'organizations': None, - } - - return create_object(Scapcontent, args, options) - - -@cacheable -def make_domain(options=None): - """Creates a Domain - - :param options: Check options using `hammer domain create --help` on satellite. - - :returns Domain object - """ - # Assigning default values for attributes - args = { - 'description': None, - 'dns-id': None, - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric().lower(), - 'organization-ids': None, - 'organizations': None, - } - - return create_object(Domain, args, options) - - -@cacheable -def make_hostgroup(options=None): - """Creates a Hostgroup - - :param options: Check options using `hammer hostgroup create --help` on satellite. - - :returns Hostgroup object - """ - # Assigning default values for attributes - args = { - 'architecture': None, - 'architecture-id': None, - 'compute-profile': None, - 'compute-profile-id': None, - 'config-group-ids': None, - 'config-groups': None, - 'content-source-id': None, - 'content-source': None, - 'content-view': None, - 'content-view-id': None, - 'domain': None, - 'domain-id': None, - 'environment': None, - 'puppet-environment': None, - 'environment-id': None, - 'puppet-environment-id': None, - 'locations': None, - 'location-ids': None, - 'kickstart-repository-id': None, - 'lifecycle-environment': None, - 'lifecycle-environment-id': None, - 'lifecycle-environment-organization-id': None, - 'medium': None, - 'medium-id': None, - 'name': gen_alphanumeric(6), - 'operatingsystem': None, - 'operatingsystem-id': None, - 'organizations': None, - 'organization-titles': None, - 'organization-ids': None, - 'parent': None, - 'parent-id': None, - 'parent-title': None, - 'partition-table': None, - 'partition-table-id': None, - 'puppet-ca-proxy': None, - 'puppet-ca-proxy-id': None, - 'puppet-class-ids': None, - 'puppet-classes': None, - 'puppet-proxy': None, - 'puppet-proxy-id': None, - 'pxe-loader': None, - 'query-organization': None, - 'query-organization-id': None, - 'query-organization-label': None, - 'realm': None, - 'realm-id': None, - 'root-password': None, - 'subnet': None, - 'subnet-id': None, - } - - return create_object(HostGroup, args, options) - - -@cacheable -def make_medium(options=None): - """Creates a Medium - - :param options: Check options using `hammer medium create --help` on satellite. - - :returns Medium object - """ - # Assigning default values for attributes - args = { - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric(6), - 'operatingsystem-ids': None, - 'operatingsystems': None, - 'organization-ids': None, - 'organizations': None, - 'os-family': None, - 'path': 'http://{}'.format(gen_string('alpha', 6)), - } - - return create_object(Medium, args, options) - - -@cacheable -def make_environment(options=None): - """Creates a Puppet Environment - - :param options: Check options using `hammer environment create --help` on satellite. - - :returns Environment object - """ - # Assigning default values for attributes - args = { - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric(6), - 'organization-ids': None, - 'organizations': None, - } - - return create_object(Environment, args, options) - - -@cacheable -def make_lifecycle_environment(options=None): - """Creates a Lifecycle Environment - - :param options: Check options using `hammer lifecycle-environment create --help` on satellite. - - :returns LifecycleEnvironment object - """ - # Organization Name, Label or ID is a required field. - if ( - not options - or 'organization' not in options - and 'organization-label' not in options - and 'organization-id' not in options - ): - raise CLIFactoryError('Please provide a valid Organization.') - - if not options.get('prior'): - options['prior'] = 'Library' - - # Assigning default values for attributes - args = { - 'description': None, - 'label': None, - 'name': gen_alphanumeric(6), - 'organization': None, - 'organization-id': None, - 'organization-label': None, - 'prior': None, - 'registry-name-pattern': None, - 'registry-unauthenticated-pull': None, - } - - return create_object(LifecycleEnvironment, args, options) - - -@cacheable -def make_tailoringfile(options=None): - """Creates a tailoring File - - :param options: Check options using `hammer tailoring-file create --help` on satellite. - - :returns TailoringFile object - """ - # Assigning default values for attributes - args = { - 'scap-file': None, - 'original-filename': None, - 'location-ids': None, - 'locations': None, - 'name': gen_alphanumeric().lower(), - 'organization-ids': None, - 'organizations': None, - } - - return create_object(TailoringFiles, args, options) - - -@cacheable -def make_template(options=None): - """Creates a Template - - :param options: Check options using `hammer template create --help` on satellite. - - :returns Template object - """ - # Assigning default values for attribute - args = { - 'audit-comment': None, - 'file': f'/root/{gen_alphanumeric()}', - 'location-ids': None, - 'locked': None, - 'name': gen_alphanumeric(6), - 'operatingsystem-ids': None, - 'organization-ids': None, - 'type': random.choice(constants.TEMPLATE_TYPES), - } - - # Write content to file or random text - if options is not None and 'content' in options.keys(): - content = options.pop('content') - else: - content = gen_alphanumeric() - - # Special handling for template factory - (_, layout) = mkstemp(text=True) - chmod(layout, 0o700) - with open(layout, 'w') as ptable: - ptable.write(content) - # Upload file to server - ssh.get_client().put(layout, args['file']) - # End - Special handling for template factory - - return create_object(Template, args, options) - - -@cacheable -def make_template_input(options=None): - """ - Creates Template Input - - :param options: Check options using `hammer template-input create --help` on satellite. - - :returns TemplateInput object - """ - if not options or not options.get('input-type') or not options.get('template-id'): - raise CLIFactoryError('Please provide valid template-id and input-type') - - args = { - 'advanced': None, - 'description': None, - 'fact-name': None, - 'input-type': None, - 'location': None, - 'location-id': None, - 'location-title': None, - 'name': gen_alphanumeric(6), - 'options': None, - 'organization': None, - 'organization-id': None, - 'organization-title': None, - 'puppet-class-name': None, - 'puppet-parameter-name': None, - 'required': None, - 'resource-type': None, - 'template-id': None, - 'value-type': None, - 'variable-name': None, - } - return create_object(TemplateInput, args, options) - - -@cacheable -def make_virt_who_config(options=None): - """Creates a Virt Who Configuration - - :param options: Check options using `hammer virt-who-config create --help` on satellite. - - :returns VirtWhoConfig object - """ - args = { - 'blacklist': None, - 'debug': None, - 'filtering-mode': 'none', - 'hypervisor-id': 'hostname', - 'hypervisor-password': None, - 'hypervisor-server': None, - 'hypervisor-type': None, - 'hypervisor-username': None, - 'interval': '60', - 'name': gen_alphanumeric(6), - 'no-proxy': None, - 'organization': None, - 'organization-id': None, - 'organization-title': None, - 'proxy': None, - 'satellite-url': settings.server.hostname, - 'whitelist': None, - } - return create_object(VirtWhoConfig, args, options) - - -def activationkey_add_subscription_to_repo(options=None): - """Helper function that adds subscription to an activation key""" - if ( - not options - or not options.get('organization-id') - or not options.get('activationkey-id') - or not options.get('subscription') - ): - raise CLIFactoryError('Please provide valid organization, activation key and subscription.') - # List the subscriptions in given org - subscriptions = Subscription.list( - {'organization-id': options['organization-id']}, per_page=False - ) - # Add subscription to activation-key - if options['subscription'] not in (sub['name'] for sub in subscriptions): - raise CLIFactoryError( - 'Subscription {} not found in the given org'.format(options['subscription']) - ) - for subscription in subscriptions: - if subscription['name'] == options['subscription']: - if subscription['quantity'] != 'Unlimited' and int(subscription['quantity']) == 0: - raise CLIFactoryError('All the subscriptions are already consumed') - try: - ActivationKey.add_subscription( - { - 'id': options['activationkey-id'], - 'subscription-id': subscription['id'], - 'quantity': 1, - } - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to add subscription to activation key\n{err.msg}') - - -def setup_org_for_a_custom_repo(options=None): - """Sets up Org for the given custom repo by: - - 1. Checks if organization and lifecycle environment were given, otherwise - creates new ones. - 2. Creates a new product with the custom repo. Synchronizes the repo. - 3. Checks if content view was given, otherwise creates a new one and - - adds the RH repo - - publishes - - promotes to the lifecycle environment - 4. Checks if activation key was given, otherwise creates a new one and - associates it with the content view. - 5. Adds the custom repo subscription to the activation key - - :return: A dictionary with the entity ids of Activation key, Content view, - Lifecycle Environment, Organization, Product and Repository - - """ - if not options or not options.get('url'): - raise CLIFactoryError('Please provide valid custom repo URL.') - # Create new organization and lifecycle environment if needed - if options.get('organization-id') is None: - org_id = make_org()['id'] - else: - org_id = options['organization-id'] - if options.get('lifecycle-environment-id') is None: - env_id = make_lifecycle_environment({'organization-id': org_id})['id'] - else: - env_id = options['lifecycle-environment-id'] - # Create custom product and repository - custom_product = make_product({'organization-id': org_id}) - custom_repo = make_repository( - {'content-type': 'yum', 'product-id': custom_product['id'], 'url': options.get('url')} - ) - # Synchronize custom repository - try: - Repository.synchronize({'id': custom_repo['id']}) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') - # Create CV if needed and associate repo with it - if options.get('content-view-id') is None: - cv_id = make_content_view({'organization-id': org_id})['id'] - else: - cv_id = options['content-view-id'] - try: - ContentView.add_repository( - {'id': cv_id, 'organization-id': org_id, 'repository-id': custom_repo['id']} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') - # Publish a new version of CV - try: - ContentView.publish({'id': cv_id}) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') - # Get the version id - cvv = ContentView.info({'id': cv_id})['versions'][-1] - # Promote version to next env - try: - ContentView.version_promote( - {'id': cvv['id'], 'organization-id': org_id, 'to-lifecycle-environment-id': env_id} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to promote version to next environment\n{err.msg}') - # Create activation key if needed and associate content view with it - if options.get('activationkey-id') is None: - activationkey_id = make_activation_key( - { - 'content-view-id': cv_id, - 'lifecycle-environment-id': env_id, - 'organization-id': org_id, - } - )['id'] - else: - activationkey_id = options['activationkey-id'] - # Given activation key may have no (or different) CV associated. - # Associate activation key with CV just to be sure - try: - ActivationKey.update( - {'content-view-id': cv_id, 'id': activationkey_id, 'organization-id': org_id} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to associate activation-key with CV\n{err.msg}') - - # Add custom_product subscription to activation-key, if SCA mode is disabled - if ( - Org.info({'fields': 'Simple content access', 'id': org_id})['simple-content-access'] - != 'Enabled' - ): - activationkey_add_subscription_to_repo( - { - 'activationkey-id': activationkey_id, - 'organization-id': org_id, - 'subscription': custom_product['name'], - } - ) - return { - 'activationkey-id': activationkey_id, - 'content-view-id': cv_id, - 'lifecycle-environment-id': env_id, - 'organization-id': org_id, - 'product-id': custom_product['id'], - 'repository-id': custom_repo['id'], - } - - -def _setup_org_for_a_rh_repo(options=None): - """Sets up Org for the given Red Hat repository by: - - 1. Checks if organization and lifecycle environment were given, otherwise - creates new ones. - 2. Clones and uploads manifest. - 3. Enables RH repo and synchronizes it. - 4. Checks if content view was given, otherwise creates a new one and - - adds the RH repo - - publishes - - promotes to the lifecycle environment - 5. Checks if activation key was given, otherwise creates a new one and - associates it with the content view. - 6. Adds the RH repo subscription to the activation key - - Note that in most cases you should use ``setup_org_for_a_rh_repo`` instead - as it's more flexible. - - :return: A dictionary with the entity ids of Activation key, Content view, - Lifecycle Environment, Organization and Repository - - """ - if ( - not options - or not options.get('product') - or not options.get('repository-set') - or not options.get('repository') - ): - raise CLIFactoryError('Please provide valid product, repository-set and repo.') - # Create new organization and lifecycle environment if needed - if options.get('organization-id') is None: - org_id = make_org()['id'] - else: - org_id = options['organization-id'] - if options.get('lifecycle-environment-id') is None: - env_id = make_lifecycle_environment({'organization-id': org_id})['id'] - else: - env_id = options['lifecycle-environment-id'] - # Enable repo from Repository Set - try: - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': options['repository-set'], - 'organization-id': org_id, - 'product': options['product'], - 'releasever': options.get('releasever'), - } - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to enable repository set\n{err.msg}') - # Fetch repository info - try: - rhel_repo = Repository.info( - { - 'name': options['repository'], - 'organization-id': org_id, - 'product': options['product'], - } - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to fetch repository info\n{err.msg}') - # Synchronize the RH repository - try: - Repository.synchronize( - { - 'name': options['repository'], - 'organization-id': org_id, - 'product': options['product'], - } - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') - # Create CV if needed and associate repo with it - if options.get('content-view-id') is None: - cv_id = make_content_view({'organization-id': org_id})['id'] - else: - cv_id = options['content-view-id'] - try: - ContentView.add_repository( - {'id': cv_id, 'organization-id': org_id, 'repository-id': rhel_repo['id']} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') - # Publish a new version of CV - try: - ContentView.publish({'id': cv_id}) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') - # Get the version id - try: - cvv = ContentView.info({'id': cv_id})['versions'][-1] - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to fetch content view info\n{err.msg}') - # Promote version1 to next env - try: - ContentView.version_promote( - {'id': cvv['id'], 'organization-id': org_id, 'to-lifecycle-environment-id': env_id} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to promote version to next environment\n{err.msg}') - # Create activation key if needed and associate content view with it - if options.get('activationkey-id') is None: - activationkey_id = make_activation_key( - { - 'content-view-id': cv_id, - 'lifecycle-environment-id': env_id, - 'organization-id': org_id, - } - )['id'] - else: - activationkey_id = options['activationkey-id'] - # Given activation key may have no (or different) CV associated. - # Associate activation key with CV just to be sure - try: - ActivationKey.update( - {'id': activationkey_id, 'organization-id': org_id, 'content-view-id': cv_id} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to associate activation-key with CV\n{err.msg}') - - # Add default subscription to activation-key, if SCA mode is disabled - if ( - Org.info({'fields': 'Simple content access', 'id': org_id})['simple-content-access'] - != 'Enabled' - ): - if constants.DEFAULT_SUBSCRIPTION_NAME not in ActivationKey.subscriptions( - {'id': activationkey_id, 'organization-id': org_id} - ): - activationkey_add_subscription_to_repo( - { - 'organization-id': org_id, - 'activationkey-id': activationkey_id, - 'subscription': options.get( - 'subscription', constants.DEFAULT_SUBSCRIPTION_NAME - ), - } - ) - - return { - 'activationkey-id': activationkey_id, - 'content-view-id': cv_id, - 'lifecycle-environment-id': env_id, - 'organization-id': org_id, - 'repository-id': rhel_repo['id'], - } - - -def setup_org_for_a_rh_repo(options=None, force_manifest_upload=False, force_use_cdn=False): - """Wrapper above ``_setup_org_for_a_rh_repo`` to use custom downstream repo - instead of CDN's 'Satellite Capsule', 'Satellite Tools' and base OS repos if - ``settings.robottelo.cdn == 0`` and URL for custom repositories is set in properties. - - :param options: a dict with options to pass to function - ``_setup_org_for_a_rh_repo``. See its docstring for more details - :param force_use_cdn: bool flag whether to use CDN even if there's - downstream repo available and ``settings.robottelo.cdn == 0``. - :param force_manifest_upload: bool flag whether to upload a manifest to - organization even if downstream custom repo is used instead of CDN. - Useful when test relies on organization with manifest (e.g. uses some - other RH repo afterwards). Defaults to False. - - :return: a dict with entity ids (see ``_setup_org_for_a_rh_repo`` and - ``setup_org_for_a_custom_repo``). - """ - custom_repo_url = None - if options.get('repository') == constants.REPOS['rhst6']['name']: - custom_repo_url = settings.repos.sattools_repo.rhel6 - elif options.get('repository') == constants.REPOS['rhst7']['name']: - custom_repo_url = settings.repos.sattools_repo.rhel7 - elif options.get('repository') == constants.REPOS['rhel6']['name']: - custom_repo_url = settings.repos.rhel6_os - elif options.get('repository') == constants.REPOS['rhel7']['name']: - custom_repo_url = settings.repos.rhel7_os - elif 'Satellite Capsule' in options.get('repository'): - custom_repo_url = settings.repos.capsule_repo - if force_use_cdn or settings.robottelo.cdn or not custom_repo_url: - return _setup_org_for_a_rh_repo(options) - else: - options['url'] = custom_repo_url - result = setup_org_for_a_custom_repo(options) - if force_manifest_upload: - with clone() as manifest: - ssh.get_client().put(manifest, manifest.filename) - try: - Subscription.upload( - {'file': manifest.filename, 'organization-id': result.get('organization-id')} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') - - # Add default subscription to activation-key, if SCA mode is disabled - if ( - Org.info({'fields': 'Simple content access', 'id': result['organization-id']})[ - 'simple-content-access' - ] - != 'Enabled' - ): - activationkey_add_subscription_to_repo( - { - 'activationkey-id': result['activationkey-id'], - 'organization-id': result['organization-id'], - 'subscription': constants.DEFAULT_SUBSCRIPTION_NAME, - } - ) - return result - - -def _get_capsule_vm_distro_repos(distro): - """Return the right RH repos info for the capsule setup""" - rh_repos = [] - if distro == 'rhel7': - # Red Hat Enterprise Linux 7 Server - rh_product_arch = constants.REPOS['rhel7']['arch'] - rh_product_releasever = constants.REPOS['rhel7']['releasever'] - rh_repos.append( - { - 'product': constants.PRDS['rhel'], - 'repository-set': constants.REPOSET['rhel7'], - 'repository': constants.REPOS['rhel7']['name'], - 'repository-id': constants.REPOS['rhel7']['id'], - 'releasever': rh_product_releasever, - 'arch': rh_product_arch, - 'cdn': True, - } - ) - # Red Hat Software Collections (for 7 Server) - rh_repos.append( - { - 'product': constants.PRDS['rhscl'], - 'repository-set': constants.REPOSET['rhscl7'], - 'repository': constants.REPOS['rhscl7']['name'], - 'repository-id': constants.REPOS['rhscl7']['id'], - 'releasever': rh_product_releasever, - 'arch': rh_product_arch, - 'cdn': True, - } - ) - # Red Hat Satellite Capsule 6.2 (for RHEL 7 Server) - rh_repos.append( - { - 'product': constants.PRDS['rhsc'], - 'repository-set': constants.REPOSET['rhsc7'], - 'repository': constants.REPOS['rhsc7']['name'], - 'repository-id': constants.REPOS['rhsc7']['id'], - 'url': settings.repos.capsule_repo, - 'cdn': settings.robottelo.cdn or not settings.repos.capsule_repo, - } - ) - else: - raise CLIFactoryError(f'distro "{distro}" not supported') - - return rh_product_arch, rh_product_releasever, rh_repos - - -def add_role_permissions(role_id, resource_permissions): - """Create role permissions found in resource permissions dict - - :param role_id: The role id - :param resource_permissions: a dict containing resources with permission - names and other Filter options - - Usage:: - - role = make_role({'organization-id': org['id']}) - resource_permissions = { - 'Katello::ActivationKey': { - 'permissions': [ - 'view_activation_keys', - 'create_activation_keys', - 'edit_activation_keys', - 'destroy_activation_keys' - ], - 'search': "name ~ {}".format(ak_name_like) - }, - } - add_role_permissions(role['id'], resource_permissions) - """ - available_permissions = Filter.available_permissions() - # group the available permissions by resource type - available_rc_permissions = {} - for permission in available_permissions: - permission_resource = permission['resource'] - if permission_resource not in available_rc_permissions: - available_rc_permissions[permission_resource] = [] - available_rc_permissions[permission_resource].append(permission) - # create only the required role permissions per resource type - for resource_type, permission_data in resource_permissions.items(): - permission_names = permission_data.get('permissions') - if permission_names is None: - raise CLIFactoryError(f'Permissions not provided for resource: {resource_type}') - # ensure that the required resource type is available - if resource_type not in available_rc_permissions: - raise CLIFactoryError( - f'Resource "{resource_type}" not in the list of available resources' - ) - available_permission_names = [ - permission['name'] - for permission in available_rc_permissions[resource_type] - if permission['name'] in permission_names - ] - # ensure that all the required permissions are available - missing_permissions = set(permission_names).difference(set(available_permission_names)) - if missing_permissions: - raise CLIFactoryError( - 'Permissions "{}" are not available in Resource "{}"'.format( - list(missing_permissions), resource_type - ) - ) - # Create the current resource type role permissions - options = {'role-id': role_id} - options.update(permission_data) - make_filter(options=options) - - -def setup_cdn_and_custom_repositories(org_id, repos, download_policy='on_demand', synchronize=True): - """Setup cdn and custom repositories - - :param int org_id: The organization id - :param list repos: a list of dict repositories options - :param str download_policy: update the repositories with this download - policy - :param bool synchronize: Whether to synchronize the repositories. - :return: a dict containing the content view and repos info - """ - custom_product = None - repos_info = [] - for repo in repos: - custom_repo_url = repo.get('url') - cdn = repo.get('cdn', False) - if not cdn and not custom_repo_url: - raise CLIFactoryError('Custom repository with url not supplied') - if cdn: - RepositorySet.enable( - { - 'organization-id': org_id, - 'product': repo['product'], - 'name': repo['repository-set'], - 'basearch': repo.get('arch', constants.DEFAULT_ARCHITECTURE), - 'releasever': repo.get('releasever'), - } - ) - repo_info = Repository.info( - {'organization-id': org_id, 'name': repo['repository'], 'product': repo['product']} - ) - else: - if custom_product is None: - custom_product = make_product_wait({'organization-id': org_id}) - repo_info = make_repository( - { - 'product-id': custom_product['id'], - 'organization-id': org_id, - 'url': custom_repo_url, - } - ) - if download_policy: - # Set download policy - Repository.update({'download-policy': download_policy, 'id': repo_info['id']}) - repos_info.append(repo_info) - if synchronize: - # Synchronize the repositories - for repo_info in repos_info: - Repository.synchronize({'id': repo_info['id']}, timeout=4800000) - return custom_product, repos_info - - -def setup_cdn_and_custom_repos_content( - target_sat, - org_id, - lce_id=None, - repos=None, - upload_manifest=True, - download_policy='on_demand', - rh_subscriptions=None, - default_cv=False, -): - """Setup cdn and custom repositories, content view and activations key - - :param target_sat: sat object - :param int org_id: The organization id - :param int lce_id: the lifecycle environment id - :param list repos: a list of dict repositories options - :param bool default_cv: whether to use the Default Organization CV - :param bool upload_manifest: whether to upload the organization manifest - :param str download_policy: update the repositories with this download - policy - :param list rh_subscriptions: a list of RH subscription to attach to - activation key - :return: a dict containing the activation key, content view and repos info - """ - if lce_id is None and not default_cv: - raise TypeError('lce_id must be specified') - if repos is None: - repos = [] - if rh_subscriptions is None: - rh_subscriptions = [] - - if upload_manifest: - # Upload the organization manifest - try: - target_sat.upload_manifest(org_id, clone(), interface='CLI') - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') - - custom_product, repos_info = setup_cdn_and_custom_repositories( - org_id=org_id, repos=repos, download_policy=download_policy - ) - if default_cv: - activation_key = make_activation_key( - {'organization-id': org_id, 'lifecycle-environment': 'Library'} - ) - content_view = ContentView.info( - {'organization-id': org_id, 'name': 'Default Organization View'} - ) - else: - # Create a content view - content_view = make_content_view({'organization-id': org_id}) - # Add repositories to content view - for repo_info in repos_info: - ContentView.add_repository( - { - 'id': content_view['id'], - 'organization-id': org_id, - 'repository-id': repo_info['id'], - } - ) - # Publish the content view - ContentView.publish({'id': content_view['id']}) - # Get the latest content view version id - content_view_version = ContentView.info({'id': content_view['id']})['versions'][-1] - # Promote content view version to lifecycle environment - ContentView.version_promote( - { - 'id': content_view_version['id'], - 'organization-id': org_id, - 'to-lifecycle-environment-id': lce_id, - } - ) - content_view = ContentView.info({'id': content_view['id']}) - activation_key = make_activation_key( - { - 'organization-id': org_id, - 'lifecycle-environment-id': lce_id, - 'content-view-id': content_view['id'], - } - ) - # Get organization subscriptions - subscriptions = Subscription.list({'organization-id': org_id}, per_page=False) - # Add subscriptions to activation-key - needed_subscription_names = list(rh_subscriptions) - if custom_product: - needed_subscription_names.append(custom_product['name']) - added_subscription_names = [] - for subscription in subscriptions: - if ( - subscription['name'] in needed_subscription_names - and subscription['name'] not in added_subscription_names - ): - ActivationKey.add_subscription( - {'id': activation_key['id'], 'subscription-id': subscription['id'], 'quantity': 1} - ) - added_subscription_names.append(subscription['name']) - if len(added_subscription_names) == len(needed_subscription_names): - break - missing_subscription_names = set(needed_subscription_names).difference( - set(added_subscription_names) - ) - if missing_subscription_names: - raise CLIFactoryError(f'Missing subscriptions: {missing_subscription_names}') - data = dict( - activation_key=activation_key, - content_view=content_view, - product=custom_product, - repos=repos_info, - ) - if lce_id: - lce = LifecycleEnvironment.info({'id': lce_id, 'organization-id': org_id}) - data['lce'] = lce - - return data - - -@cacheable -def make_http_proxy(options=None): - """Creates a HTTP Proxy - - :param options: Check options using `hammer http-proxy create --help` on satellite. - - :returns HttpProxy object - """ - # Assigning default values for attributes - args = { - 'location': None, - 'location-id': None, - 'location-title': None, - 'locations': None, - 'location-ids': None, - 'location-titles': None, - 'name': gen_string('alpha', 15), - 'organization': None, - 'organization-id': None, - 'organization-title': None, - 'organizations': None, - 'organization-ids': None, - 'organization-titles': None, - 'password': None, - 'url': '{}:{}'.format(gen_url(scheme='https'), gen_integer(min_value=10, max_value=9999)), - 'username': None, - } - - return create_object(HttpProxy, args, options) diff --git a/robottelo/cli/hammer.py b/robottelo/cli/hammer.py index f6ddb11b4f3..b84e6bf2158 100644 --- a/robottelo/cli/hammer.py +++ b/robottelo/cli/hammer.py @@ -26,23 +26,39 @@ def _normalize_obj(obj): """ if isinstance(obj, dict): return {_normalize(k): _normalize_obj(v) for k, v in obj.items()} - elif isinstance(obj, list): + if isinstance(obj, list): return [_normalize_obj(v) for v in obj] # doing this to conform to csv parser - elif isinstance(obj, int) and not isinstance(obj, bool): + if isinstance(obj, int) and not isinstance(obj, bool): return str(obj) return obj +def is_csv(output): + """Verifies if the output string is eligible for converting into CSV""" + sniffer = csv.Sniffer() + try: + sniffer.sniff(output) + return True + except csv.Error: + return False + + def parse_csv(output): """Parse CSV output from Hammer CLI and convert it to python dictionary.""" # ignore warning about puppet and ostree deprecation output.replace('Puppet and OSTree will no longer be supported in Katello 3.16\n', '') - reader = csv.reader(output.splitlines()) + is_rex = 'Job invocation' in output + is_pkg_list = 'Nvra' in output + # Validate if the output is eligible for CSV conversions else return as it is + if not is_csv(output) and not is_rex and not is_pkg_list: + return output + output = output.splitlines()[0:2] if is_rex else output.splitlines() + reader = csv.reader(output) # Generate the key names, spaces will be converted to dashes "-" keys = [_normalize(header) for header in next(reader)] # For each entry, create a dict mapping each key with each value - return [dict(zip(keys, values)) for values in reader if len(values) > 0] + return [dict(zip(keys, values, strict=True)) for values in reader if len(values) > 0] def parse_help(output): @@ -183,7 +199,7 @@ def parse_info(output): if line.startswith(' '): # sub-properties are indented # values are separated by ':' or '=>', but not by '::' which can be # entity name like 'test::params::keys' - if line.find(':') != -1 and not line.find('::') != -1: + if line.find(':') != -1 and line.find('::') == -1: key, value = line.lstrip().split(":", 1) elif line.find('=>') != -1 and len(line.lstrip().split(" =>", 1)) == 2: key, value = line.lstrip().split(" =>", 1) diff --git a/robottelo/cli/host.py b/robottelo/cli/host.py index 6ec096cfe10..05f042f9dde 100644 --- a/robottelo/cli/host.py +++ b/robottelo/cli/host.py @@ -215,9 +215,7 @@ def reboot(cls, options=None): cls.command_sub = 'reboot' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def reports(cls, options=None): @@ -268,9 +266,7 @@ def start(cls, options=None): cls.command_sub = 'start' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def status(cls, options=None): @@ -290,9 +286,7 @@ def status(cls, options=None): cls.command_sub = 'status' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def stop(cls, options=None): @@ -313,9 +307,7 @@ def stop(cls, options=None): cls.command_sub = 'stop' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def subscription_register(cls, options=None): diff --git a/robottelo/cli/lifecycleenvironment.py b/robottelo/cli/lifecycleenvironment.py index 72435669b62..c22d2268746 100644 --- a/robottelo/cli/lifecycleenvironment.py +++ b/robottelo/cli/lifecycleenvironment.py @@ -29,9 +29,7 @@ class LifecycleEnvironment(Base): @classmethod def list(cls, options=None, per_page=False): - result = super().list(options, per_page=per_page) - - return result + return super().list(options, per_page=per_page) @classmethod def paths(cls, options=None): diff --git a/robottelo/cli/operatingsys.py b/robottelo/cli/operatingsys.py index 1a1a63bdebd..8bf4411d56d 100644 --- a/robottelo/cli/operatingsys.py +++ b/robottelo/cli/operatingsys.py @@ -45,9 +45,7 @@ def add_architecture(cls, options=None): cls.command_sub = 'add-architecture' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def add_provisioning_template(cls, options=None): @@ -57,9 +55,7 @@ def add_provisioning_template(cls, options=None): cls.command_sub = 'add-provisioning-template' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def add_ptable(cls, options=None): @@ -69,9 +65,7 @@ def add_ptable(cls, options=None): cls.command_sub = 'add-ptable' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def remove_architecture(cls, options=None): @@ -81,9 +75,7 @@ def remove_architecture(cls, options=None): cls.command_sub = 'remove-architecture' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def remove_provisioning_template(cls, options=None): @@ -93,9 +85,7 @@ def remove_provisioning_template(cls, options=None): cls.command_sub = 'remove-provisioning-template' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def remove_ptable(cls, options=None): @@ -105,6 +95,4 @@ def remove_ptable(cls, options=None): cls.command_sub = 'remove-ptable ' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) diff --git a/robottelo/cli/org.py b/robottelo/cli/org.py index 0ae1308013a..38103aff3a5 100644 --- a/robottelo/cli/org.py +++ b/robottelo/cli/org.py @@ -21,6 +21,7 @@ add-subnet Associate a resource add-user Associate a resource create Create an organization + configure-cdn Update the CDN configuration delete Delete an organization delete-parameter Delete parameter for an organization. info Show an organization @@ -166,3 +167,9 @@ def remove_user(cls, options=None): """Removes an user from an org""" cls.command_sub = 'remove-user' return cls.execute(cls._construct_command(options)) + + @classmethod + def configure_cdn(cls, options=None): + """Update the CDN configuration""" + cls.command_sub = 'configure-cdn' + return cls.execute(cls._construct_command(options)) diff --git a/robottelo/cli/product.py b/robottelo/cli/product.py index efbf7ed09e1..7a7f03eeff4 100644 --- a/robottelo/cli/product.py +++ b/robottelo/cli/product.py @@ -39,9 +39,7 @@ def remove_sync_plan(cls, options=None): cls.command_sub = 'remove-sync-plan' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def set_sync_plan(cls, options=None): @@ -51,9 +49,7 @@ def set_sync_plan(cls, options=None): cls.command_sub = 'set-sync-plan' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def synchronize(cls, options=None): @@ -69,6 +65,4 @@ def update_proxy(cls, options=None): cls.command_sub = 'update-proxy' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) diff --git a/robottelo/cli/report_template.py b/robottelo/cli/report_template.py index 43b92d0bb98..962c5929bc7 100644 --- a/robottelo/cli/report_template.py +++ b/robottelo/cli/report_template.py @@ -26,9 +26,8 @@ from robottelo import ssh from robottelo.cli.base import Base -from robottelo.cli.base import CLIError -from robottelo.constants import DataFile -from robottelo.constants import REPORT_TEMPLATE_FILE +from robottelo.constants import REPORT_TEMPLATE_FILE, DataFile +from robottelo.exceptions import CLIError class ReportTemplate(Base): diff --git a/robottelo/cli/repository.py b/robottelo/cli/repository.py index 85296f25e51..94fa8baa180 100644 --- a/robottelo/cli/repository.py +++ b/robottelo/cli/repository.py @@ -60,7 +60,7 @@ def synchronize(cls, options, return_raw_response=None, timeout=3600000): cls.command_sub = 'synchronize' return cls.execute( cls._construct_command(options), - output_format='csv', + output_format='base', ignore_stderr=True, return_raw_response=return_raw_response, timeout=timeout, diff --git a/robottelo/cli/sm_packages.py b/robottelo/cli/sm_packages.py index d56a1156ec9..d4674279172 100644 --- a/robottelo/cli/sm_packages.py +++ b/robottelo/cli/sm_packages.py @@ -30,6 +30,7 @@ class Packages(Base): def lock(cls, options=None): """Build satellite-maintain packages lock""" cls.command_sub = 'lock' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -37,6 +38,7 @@ def lock(cls, options=None): def unlock(cls, options=None): """Build satellite-maintain packages unlock""" cls.command_sub = 'unlock' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -44,6 +46,7 @@ def unlock(cls, options=None): def is_locked(cls, options=None): """Build satellite-maintain packages is-locked""" cls.command_sub = 'is-locked' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -51,6 +54,7 @@ def is_locked(cls, options=None): def status(cls, options=None): """Build satellite-maintain packages status""" cls.command_sub = 'status' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -74,5 +78,6 @@ def update(cls, packages='', options=None): def check_update(cls, options=None): """Build satellite-maintain packages check-update""" cls.command_sub = 'check-update' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) diff --git a/robottelo/cli/srpm.py b/robottelo/cli/srpm.py index b0e71d8969d..62578bbaa69 100644 --- a/robottelo/cli/srpm.py +++ b/robottelo/cli/srpm.py @@ -25,15 +25,11 @@ def info(cls, options=None): """Show a SRPM Info""" cls.command_sub = 'info' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def list(cls, options=None): """List SRPMs""" cls.command_sub = 'list' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') diff --git a/robottelo/cli/subscription.py b/robottelo/cli/subscription.py index 86b57d51d41..0e22eaeaccd 100644 --- a/robottelo/cli/subscription.py +++ b/robottelo/cli/subscription.py @@ -47,7 +47,7 @@ def refresh_manifest(cls, options=None, timeout=None): return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) @classmethod - def manifest_history(cls, options=None): + def manifest_history(cls, options=None, timeout=None): """Provided history for subscription manifest""" cls.command_sub = 'manifest-history' - return cls.execute(cls._construct_command(options)) + return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout) diff --git a/robottelo/cli/syncplan.py b/robottelo/cli/syncplan.py index 3f8b9b8b347..151a72acb84 100644 --- a/robottelo/cli/syncplan.py +++ b/robottelo/cli/syncplan.py @@ -17,9 +17,20 @@ update """ from robottelo.cli.base import Base +from robottelo.exceptions import CLIError class SyncPlan(Base): """Manipulates Katello engine's sync-plan command.""" command_base = 'sync-plan' + + @classmethod + def create(cls, options=None): + """Create a SyncPlan""" + cls.command_sub = 'create' + + if options.get('interval') == 'custom cron' and options.get('cron-expression') is None: + raise CLIError('Missing "cron-expression" option for "custom cron" interval.') + + return super().create(options) diff --git a/robottelo/cli/template.py b/robottelo/cli/template.py index 4dbe709c33b..3729fc4bf76 100644 --- a/robottelo/cli/template.py +++ b/robottelo/cli/template.py @@ -46,18 +46,14 @@ def add_operatingsystem(cls, options=None): """Adds operating system, requires "id" and "operatingsystem-id".""" cls.command_sub = 'add-operatingsystem' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def remove_operatingsystem(cls, options=None): """Remove operating system, requires "id" and "operatingsystem-id".""" cls.command_sub = 'remove-operatingsystem' - result = cls.execute(cls._construct_command(options), output_format='csv') - - return result + return cls.execute(cls._construct_command(options), output_format='csv') @classmethod def clone(cls, options=None): diff --git a/robottelo/cli/template_input.py b/robottelo/cli/template_input.py index 1393642cb35..cfe548384c3 100644 --- a/robottelo/cli/template_input.py +++ b/robottelo/cli/template_input.py @@ -15,8 +15,7 @@ info Show template input details list List template inputs """ -from robottelo.cli.base import Base -from robottelo.cli.base import CLIError +from robottelo.cli.base import Base, CLIError class TemplateInput(Base): diff --git a/robottelo/cli/template_sync.py b/robottelo/cli/template_sync.py index 6da3b683653..72ee70fa00f 100644 --- a/robottelo/cli/template_sync.py +++ b/robottelo/cli/template_sync.py @@ -50,15 +50,11 @@ def exports(cls, options=None): """Export Satellite Templates to Git/Local Directory.""" cls.command_base = 'export-templates' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) @classmethod def imports(cls, options=None): """Import Satellite Templates to Git/Local Directory.""" cls.command_base = 'import-templates' - result = cls.execute(cls._construct_command(options)) - - return result + return cls.execute(cls._construct_command(options)) diff --git a/robottelo/cli/webhook.py b/robottelo/cli/webhook.py index 70edda993e9..c7b93f76d99 100644 --- a/robottelo/cli/webhook.py +++ b/robottelo/cli/webhook.py @@ -13,10 +13,8 @@ Options: -h, --help Print help """ -from robottelo.cli.base import Base -from robottelo.cli.base import CLIError -from robottelo.constants import WEBHOOK_EVENTS -from robottelo.constants import WEBHOOK_METHODS +from robottelo.cli.base import Base, CLIError +from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS class Webhook(Base): @@ -43,5 +41,4 @@ def create(cls, options=None): 'See See "hammer webhook create --help" for the list of supported methods' ) - result = super().create(options) - return result + return super().create(options) diff --git a/robottelo/config/__init__.py b/robottelo/config/__init__.py index 197cc36f60f..cca2258c7e6 100644 --- a/robottelo/config/__init__.py +++ b/robottelo/config/__init__.py @@ -1,3 +1,4 @@ +import builtins import logging import os from pathlib import Path @@ -8,8 +9,7 @@ from nailgun.config import ServerConfig from robottelo.config.validators import VALIDATORS -from robottelo.logging import logger -from robottelo.logging import robottelo_root_dir +from robottelo.logging import logger, robottelo_root_dir if not os.getenv('ROBOTTELO_DIR'): # dynaconf robottelo file uses ROBOTELLO_DIR for screenshots @@ -21,36 +21,33 @@ def get_settings(): :return: A validated Lazy settings object """ - settings = LazySettings( - envvar_prefix="ROBOTTELO", - core_loaders=["YAML"], - settings_file="settings.yaml", - preload=["conf/*.yaml"], - includes=["settings.local.yaml", ".secrets.yaml", ".secrets_*.yaml"], - envless_mode=True, - lowercase_read=True, - load_dotenv=True, - ) - settings.validators.register(**VALIDATORS) - try: - settings.validators.validate() - except ValidationError as err: - logger.warning(f'Dynaconf validation failed, continuing for the sake of unit tests\n{err}') - - return settings - - -settings = get_settings() + if builtins.__sphinx_build__: + settings = None + except AttributeError: + settings = LazySettings( + envvar_prefix="ROBOTTELO", + core_loaders=["YAML"], + settings_file="settings.yaml", + preload=["conf/*.yaml"], + includes=["settings.local.yaml", ".secrets.yaml", ".secrets_*.yaml"], + envless_mode=True, + lowercase_read=True, + load_dotenv=True, + ) + settings.validators.register(**VALIDATORS) + try: + settings.validators.validate() + except ValidationError as err: + logger.warning( + f'Dynaconf validation failed, continuing for the sake of unit tests\n{err}' + ) -if not os.getenv('BROKER_DIRECTORY'): - # set the BROKER_DIRECTORY envar so broker knows where to operate from - if _broker_dir := settings.robottelo.get('BROKER_DIRECTORY'): - logger.debug(f'Setting BROKER_DIRECTORY to {_broker_dir}') - os.environ['BROKER_DIRECTORY'] = _broker_dir + return settings +settings = get_settings() robottelo_tmp_dir = Path(settings.robottelo.tmp_dir) robottelo_tmp_dir.mkdir(parents=True, exist_ok=True) @@ -104,7 +101,7 @@ def user_nailgun_config(username=None, password=None): """ creds = (username, password) - return ServerConfig(get_url(), creds, verify=False) + return ServerConfig(get_url(), creds, verify=settings.server.verify_ca) def setting_is_set(option): @@ -143,12 +140,13 @@ def configure_nailgun(): of this. * Set a default value for ``nailgun.entities.GPGKey.content``. """ - from nailgun import entities - from nailgun import entity_mixins + from nailgun import entities, entity_mixins from nailgun.config import ServerConfig entity_mixins.CREATE_MISSING = True - entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig(get_url(), get_credentials(), verify=False) + entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig( + get_url(), get_credentials(), verify=settings.server.verify_ca + ) gpgkey_init = entities.GPGKey.__init__ def patched_gpgkey_init(self, server_config=None, **kwargs): @@ -185,7 +183,7 @@ def configure_airgun(): 'webdriver': settings.ui.webdriver, 'webdriver_binary': settings.ui.webdriver_binary, }, - 'webkaifuku': {'config': settings.ui.webkaifuku} or {}, + 'webkaifuku': {'config': settings.ui.webkaifuku}, } ) diff --git a/robottelo/config/validators.py b/robottelo/config/validators.py index 7a8760608ad..013be110fe0 100644 --- a/robottelo/config/validators.py +++ b/robottelo/config/validators.py @@ -1,8 +1,6 @@ from dynaconf import Validator -from robottelo.constants import AZURERM_VALID_REGIONS -from robottelo.constants import VALID_GCE_ZONES - +from robottelo.constants import AZURERM_VALID_REGIONS, VALID_GCE_ZONES VALIDATORS = dict( supportability=[ @@ -25,12 +23,15 @@ ), Validator('server.admin_password', default='changeme'), Validator('server.admin_username', default='admin'), - Validator('server.deploy_workflow', must_exist=True), + Validator('server.deploy_workflows', must_exist=True, is_type_of=dict), + Validator('server.deploy_workflows.product', must_exist=True), + Validator('server.deploy_workflows.os', must_exist=True), Validator('server.deploy_arguments', must_exist=True, is_type_of=dict, default={}), Validator('server.scheme', default='https'), Validator('server.port', default=443), Validator('server.ssh_username', default='root'), Validator('server.ssh_password', default=None), + Validator('server.verify_ca', default=False), ], content_host=[ Validator('content_host.default_rhel_version', must_exist=True), @@ -66,10 +67,11 @@ Validator('bugzilla.api_key', must_exist=True), ], capsule=[ - Validator('capsule.instance_name', must_exist=True), Validator('capsule.version.release', must_exist=True), Validator('capsule.version.source', must_exist=True), - Validator('capsule.deploy_workflow', must_exist=True), + Validator('capsule.deploy_workflows', must_exist=True, is_type_of=dict), + Validator('capsule.deploy_workflows.product', must_exist=True), + Validator('capsule.deploy_workflows.os', must_exist=True), Validator('capsule.deploy_arguments', must_exist=True, is_type_of=dict, default={}), ], certs=[ @@ -223,7 +225,12 @@ Validator( 'oscap.content_path', must_exist=True, - ) + ), + Validator( + 'oscap.profile', + default='security7', + must_exist=True, + ), ], osp=[ Validator( diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index c2f305a7a15..6664aedc310 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -18,6 +18,7 @@ class Colored(Box): # This should be updated after each version branch SATELLITE_VERSION = "6.14" SATELLITE_OS_VERSION = "8" +SAT_NON_GA_VERSIONS = ['6.14', '6.15'] # Default system ports HTTPS_PORT = '443' @@ -58,7 +59,6 @@ class Colored(Box): 'ec2': 'EC2', 'vmware': 'VMware', 'openstack': 'RHEL OpenStack Platform', - 'rackspace': 'Rackspace', 'google': 'Google', 'azurerm': 'Azure Resource Manager', } @@ -116,6 +116,8 @@ class Colored(Box): GCE_MACHINE_TYPE_DEFAULT = 'f1-micro' GCE_NETWORK_DEFAULT = 'default' GCE_EXTERNAL_IP_DEFAULT = True +GCE_RHEL_CLOUD_PROJECTS = ['rhel-cloud', 'rhel-sap-cloud'] +GCE_TARGET_RHEL_IMAGE_NAME = 'rhel-7' # AzureRM specific constants AZURERM_VALID_REGIONS = [ @@ -199,6 +201,8 @@ class Colored(Box): INSTALL_MEDIUM_URL = "http://mirror.fakeos.org/%s/$major.$minor/os/$arch" +HTTPS_MEDIUM_URL = "https://partha.fedorapeople.org/test-repos/kickstart-zoo" + VALID_GPG_KEY_FILE = "valid_gpg_key.txt" ZOO_CUSTOM_GPG_KEY = "zoo_custom_gpgkey.txt" @@ -215,10 +219,12 @@ class Colored(Box): SYNC_INTERVAL = {'hour': "hourly", 'day': "daily", 'week': "weekly", 'custom': "custom cron"} REPO_TYPE = { + "deb": "deb", 'yum': "yum", 'ostree': "ostree", 'docker': "docker", - "ansible_collection": "ansible collection", + 'ansible_collection': "ansible_collection", + 'file': "file", } DOWNLOAD_POLICIES = { @@ -257,9 +263,6 @@ class Colored(Box): 'rhel-8-for-x86_64-appstream-rpms', ) -INSTALL_RHEL7_STEPS = 'yum -y install satellite' -INSTALL_RHEL8_STEPS = 'dnf -y module enable satellite:el8 && dnf -y install satellite' - # On importing manifests, Red Hat repositories are listed like this: # Product -> RepositorySet -> Repository # We need to first select the Product, then the reposet and then the repos @@ -287,9 +290,9 @@ class Colored(Box): 'rhsc7': 'Red Hat Satellite Capsule 6.11 (for RHEL 7 Server) (RPMs)', 'rhsc8': 'Red Hat Satellite Capsule 6.13 for RHEL 8 x86_64 (RPMs)', 'rhsc7_iso': 'Red Hat Satellite Capsule 6.4 (for RHEL 7 Server) (ISOs)', - 'rhsclient7': 'Red Hat Satellite Client 6 for RHEL 7 Server RPMs x86_64', - 'rhsclient8': 'Red Hat Satellite Client 6 for RHEL 8 x86_64 RPMs', - 'rhsclient9': 'Red Hat Satellite Client 6 for RHEL 9 x86_64 RPMs', + 'rhsclient7': 'Red Hat Satellite Client 6 (for RHEL 7 Server) (RPMs)', + 'rhsclient8': 'Red Hat Satellite Client 6 for RHEL 8 x86_64 (RPMs)', + 'rhsclient9': 'Red Hat Satellite Client 6 for RHEL 9 x86_64 (RPMs)', 'rhst7': 'Red Hat Satellite Tools 6.9 (for RHEL 7 Server) (RPMs)', 'rhst7_610': 'Red Hat Satellite Tools 6.10 (for RHEL 7 Server) (RPMs)', 'rhst6': 'Red Hat Satellite Tools 6.9 (for RHEL 6 Server) (RPMs)', @@ -315,7 +318,6 @@ class Colored(Box): 'rhel7_extra': 'Red Hat Enterprise Linux 7 Server - Extras (RPMs)', 'rhel7_optional': 'Red Hat Enterprise Linux 7 Server - Optional (RPMs)', 'rhel7_sup': 'Red Hat Enterprise Linux 7 Server - Supplementary (RPMs)', - 'rhst7_610': 'Red Hat Satellite Tools 6.10 (for RHEL 7 Server) (RPMs)', } SM_OVERALL_STATUS = { @@ -407,7 +409,7 @@ class Colored(Box): 'name': ('Red Hat Satellite Client 6 for RHEL 8 x86_64 RPMs'), 'version': '6', 'reposet': REPOSET['rhsclient8'], - 'product': PRDS['rhel'], + 'product': PRDS['rhel8'], 'distro': 'rhel8', 'key': PRODUCT_KEY_SAT_CLIENT, }, @@ -416,7 +418,7 @@ class Colored(Box): 'name': ('Red Hat Satellite Client 6 for RHEL 9 x86_64 RPMs'), 'version': '6', 'reposet': REPOSET['rhsclient9'], - 'product': PRDS['rhel'], + 'product': PRDS['rhel9'], 'distro': 'rhel9', 'key': PRODUCT_KEY_SAT_CLIENT, }, @@ -529,32 +531,32 @@ class Colored(Box): }, 'rhel8_bos': { 'id': 'rhel-8-for-x86_64-baseos-kickstart', - 'name': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS Kickstart 8.8', - 'version': '8.8', + 'name': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS Kickstart 8.9', + 'version': '8.9', 'reposet': REPOSET['kickstart']['rhel8'], 'product': PRDS['rhel8'], 'distro': 'rhel8', }, 'rhel8_aps': { 'id': 'rhel-8-for-x86_64-appstream-kickstart', - 'name': 'Red Hat Enterprise Linux 8 for x86_64 - AppStream Kickstart 8.8', - 'version': '8.8', + 'name': 'Red Hat Enterprise Linux 8 for x86_64 - AppStream Kickstart 8.9', + 'version': '8.9', 'reposet': REPOSET['kickstart']['rhel8_aps'], 'product': PRDS['rhel8'], 'distro': 'rhel8', }, 'rhel9_bos': { 'id': 'rhel-9-for-x86_64-baseos-kickstart', - 'name': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS Kickstart 9.2', - 'version': '9.2', + 'name': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS Kickstart 9.3', + 'version': '9.3', 'reposet': REPOSET['kickstart']['rhel9'], 'product': PRDS['rhel9'], 'distro': 'rhel9', }, 'rhel9_aps': { 'id': 'rhel-9-for-x86_64-appstream-kickstart', - 'name': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream Kickstart 9.2', - 'version': '9.2', + 'name': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream Kickstart 9.3', + 'version': '9.3', 'reposet': REPOSET['kickstart']['rhel9_aps'], 'product': PRDS['rhel9'], 'distro': 'rhel9', @@ -750,6 +752,9 @@ class Colored(Box): REAL_RHEL7_0_1_PACKAGE_FILENAME = 'python-pulp-common-2.21.0.2-1.el7sat.noarch.rpm' REAL_RHEL7_0_2_PACKAGE_NAME = 'python2-psutil' # for RHBA-2021:1314 REAL_RHEL7_0_2_PACKAGE_FILENAME = 'python2-psutil-5.7.2-2.el7sat.x86_64.rpm' +REAL_RHEL8_1_PACKAGE_NAME = 'puppet-agent' # for RHSA-2022:4867 +REAL_RHEL8_1_PACKAGE_FILENAME = 'puppet-agent-6.19.1-1.el8sat.x86_64' +REAL_RHEL8_2_PACKAGE_FILENAME = 'puppet-agent-6.26.0-1.el8sat.x86_64' FAKE_0_CUSTOM_PACKAGE_GROUP_NAME = 'birds' FAKE_3_YUM_OUTDATED_PACKAGES = [ 'acme-package-1.0.1-1.noarch', @@ -804,6 +809,7 @@ class Colored(Box): FAKE_2_ERRATA_ID = 'RHSA-2012:0055' # for FAKE_1_CUSTOM_PACKAGE REAL_RHEL7_0_ERRATA_ID = 'RHBA-2020:3615' # for REAL_RHEL7_0_0_PACKAGE REAL_RHEL7_1_ERRATA_ID = 'RHBA-2017:0395' # tcsh bug fix update +REAL_RHEL8_1_ERRATA_ID = 'RHSA-2022:4867' # for REAL_RHEL8_1_PACKAGE FAKE_1_YUM_REPOS_COUNT = 32 FAKE_3_YUM_REPOS_COUNT = 78 FAKE_9_YUM_SECURITY_ERRATUM = [ @@ -857,6 +863,7 @@ class Colored(Box): PULP_EXPORT_DIR = '/var/lib/pulp/exports/' PULP_IMPORT_DIR = '/var/lib/pulp/imports/' +EXPORT_LIBRARY_NAME = 'Export-Library' PUPPET_COMMON_INSTALLER_OPTS = { 'foreman-proxy-puppetca': 'true', @@ -872,6 +879,10 @@ class Colored(Box): 'enable-foreman-cli-puppet', ] PUPPET_CAPSULE_INSTALLER = ['enable-puppet'] +CAPSULE_REGISTRATION_OPTS = { + 'foreman-proxy-registration': 'true', + 'foreman-proxy-templates': 'true', +} KICKSTART_CONTENT = [ 'treeinfo', @@ -1535,8 +1546,6 @@ class Colored(Box): 'mail': 'mail', } -OSCAP_TARGET_CORES = 4 -OSCAP_TARGET_MEMORY = '16GiB' OSCAP_PERIOD = {'weekly': 'Weekly', 'monthly': 'Monthly', 'custom': 'Custom'} OSCAP_TAILORING_FILE = 'ssg-rhel7-ds-tailoring.xml' @@ -1641,105 +1650,63 @@ class Colored(Box): 'Viewer', ] -BOOKMARK_ENTITIES = [ - {'name': 'ActivationKey', 'controller': 'katello_activation_keys'}, - {'name': 'Dashboard', 'controller': 'dashboard', 'skip_for_ui': True}, - {'name': 'Audit', 'controller': 'audits', 'skip_for_ui': True}, - {'name': 'Report', 'controller': 'config_reports', 'skip_for_ui': True}, - {'name': 'Task', 'controller': 'foreman_tasks_tasks', 'skip_for_ui': True}, - # TODO Load manifest for the test_positive_end_to_end from the ui/test_bookmarks.py - # {'name': 'Subscriptions', 'controller': 'subscriptions', 'skip_for_ui': True}, - {'name': 'Product', 'controller': 'katello_products'}, - {'name': 'Repository', 'controller': 'katello_repositories', 'skip_for_ui': True}, - {'name': 'ContentCredential', 'controller': 'katello_content_credentials'}, - {'name': 'SyncPlan', 'controller': 'katello_sync_plans'}, - {'name': 'ContentView', 'controller': 'katello_content_views'}, - {'name': 'Errata', 'controller': 'katello_errata', 'skip_for_ui': True}, - {'name': 'Package', 'controller': 'katello_erratum_packages', 'skip_for_ui': True}, - {'name': 'ContainerImageTag', 'controller': 'katello_docker_tags', 'skip_for_ui': True}, - {'name': 'Host', 'controller': 'hosts', 'setup': entities.Host}, - {'name': 'ContentHost', 'controller': 'hosts', 'skip_for_ui': True}, - {'name': 'HostCollection', 'controller': 'katello_host_collections'}, - {'name': 'Architecture', 'controller': 'architectures'}, +BOOKMARK_ENTITIES_SELECTION = [ { - 'name': 'HardwareModel', - 'controller': 'models', - 'setup': entities.Model, - 'skip_for_ui': True, + 'name': 'ActivationKey', + 'controller': 'katello_activation_keys', + 'session_name': 'activationkey', + 'old_ui': True, }, + {'name': 'Errata', 'controller': 'katello_errata', 'session_name': 'errata', 'old_ui': True}, + {'name': 'Host', 'controller': 'hosts', 'setup': entities.Host, 'session_name': 'host_new'}, { - 'name': 'InstallationMedia', - 'controller': 'media', - 'setup': entities.Media, - 'skip_for_ui': True, + 'name': 'UserGroup', + 'controller': 'usergroups', + 'setup': entities.UserGroup, + 'session_name': 'usergroup', }, - {'name': 'OperatingSystem', 'controller': 'operatingsystems'}, { 'name': 'PartitionTable', 'controller': 'ptables', 'setup': entities.PartitionTable, - 'skip_for_ui': False, - }, - {'name': 'ProvisioningTemplate', 'controller': 'provisioning_templates'}, - { - 'name': 'HostGroup', - 'controller': 'hostgroups', - 'setup': entities.HostGroup, - 'skip_for_ui': True, + 'session_name': 'partitiontable', }, { - 'name': 'DiscoveryRule', - 'controller': 'discovery_rules', - 'skip_for_ui': True, - 'setup': entities.DiscoveryRule, + 'name': 'Product', + 'controller': 'katello_products', + 'session_name': 'product', + 'old_ui': True, }, { - 'name': 'GlobalParameter', - 'controller': 'common_parameters', - 'setup': entities.CommonParameter, - 'skip_for_ui': True, + 'name': 'ProvisioningTemplate', + 'controller': 'provisioning_templates', + 'session_name': 'provisioningtemplate', }, - {'name': 'Role', 'controller': 'ansible_roles', 'setup': entities.Role}, - {'name': 'Variables', 'controller': 'ansible_variables', 'skip_for_ui': True}, - {'name': 'SmartProxy', 'controller': 'smart_proxies', 'skip_for_ui': True}, - { - 'name': 'ComputeResource', - 'controller': 'compute_resources', - 'setup': entities.LibvirtComputeResource, - }, - {'name': 'ComputeProfile', 'controller': 'compute_profiles', 'setup': entities.ComputeProfile}, - {'name': 'Subnet', 'controller': 'subnets', 'setup': entities.Subnet}, - {'name': 'Domain', 'controller': 'domains', 'setup': entities.Domain}, - {'name': 'Realm', 'controller': 'realms', 'setup': entities.Realm, 'skip_for_ui': True}, - {'name': 'Location', 'controller': 'locations'}, - {'name': 'Organization', 'controller': 'organizations'}, - {'name': 'User', 'controller': 'users'}, - {'name': 'UserGroup', 'controller': 'usergroups', 'setup': entities.UserGroup}, - {'name': 'Role', 'controller': 'roles'}, - {'name': 'Settings', 'controller': 'settings', 'skip_for_ui': True}, + {'name': 'Repository', 'controller': 'katello_repositories', 'session_name': 'repository'}, ] STRING_TYPES = ['alpha', 'numeric', 'alphanumeric', 'latin1', 'utf8', 'cjk', 'html'] VMWARE_CONSTANTS = { - 'cluster': 'Satellite-Engineering', 'folder': 'vm', - 'guest_os': 'Red Hat Enterprise Linux 7 (64-bit)', 'scsicontroller': 'LSI Logic Parallel', 'virtualhw_version': 'Default', 'pool': 'Resources', 'network_interface_name': 'VMXNET 3', - 'datastore': 'Local-Ironforge', - 'network_interfaces': 'qe_%s', } + HAMMER_CONFIG = "~/.hammer/cli.modules.d/foreman.yml" HAMMER_SESSIONS = "~/.hammer/sessions" +INSTALLER_CONFIG_FILE = '/etc/foreman-installer/scenarios.d/satellite.yaml' SATELLITE_ANSWER_FILE = "/etc/foreman-installer/scenarios.d/satellite-answers.yaml" CAPSULE_ANSWER_FILE = "/etc/foreman-installer/scenarios.d/capsule-answers.yaml" MAINTAIN_HAMMER_YML = "/etc/foreman-maintain/foreman-maintain-hammer.yml" SATELLITE_MAINTAIN_YML = "/etc/foreman-maintain/foreman_maintain.yml" +FOREMAN_SETTINGS_YML = '/etc/foreman/settings.yaml' + +FOREMAN_NIGHTLY_URL = 'https://yum.theforeman.org/releases/nightly/el8/x86_64/' FOREMAN_TEMPLATE_IMPORT_URL = 'https://github.com/SatelliteQE/foreman_templates.git' FOREMAN_TEMPLATE_IMPORT_API_URL = 'http://api.github.com/repos/SatelliteQE/foreman_templates' @@ -1876,9 +1843,122 @@ class Colored(Box): "user", ] -FAM_MODULE_PATH = ( - '/usr/share/ansible/collections/ansible_collections/redhat/satellite/plugins/modules' -) +FAM_TEST_PLAYBOOKS = [ + "activation_keys_role", + "activation_key", + "architecture", + "auth_source_ldap", + "auth_sources_ldap_role", + "bookmark", + "compute_attribute", + "compute_profile_ovirt", + "compute_profiles_role", + "compute_profile", + "compute_resources_role", + "compute_resource", + "config_group", + "content_credentials_role", + "content_credential", + "content_export_info", + "content_export_library", + "content_export_repository", + "content_export_version", + "content_rhel_role", + "content_upload_ostree", + "content_upload", + "content_view_filter_info", + "content_view_filter_rule_info", + "content_view_filter_rule", + "content_view_filter", + "content_view_info", + "content_view_publish_role", + "content_views_role", + "content_view_version_cleanup_role", + "content_view_version_info", + "content_view_version", + "content_view", + "convert2rhel", + "discovery_rule", + "domain_info", + "domains_role", + "domain", + "external_usergroup", + "filters", + "global_parameter", + "hardware_model", + "host_collection", + "host_errata_info", + "hostgroup_info", + "hostgroups_role", + "hostgroup", + "host_info", + "host_interface_attributes", + "host_power", + "host", + "http_proxy", + "image", + "installation_medium", + "inventory_plugin_ansible", + "inventory_plugin", + "job_invocation", + "job_template", + "katello_hostgroup", + "katello_smart_proxy", + "lifecycle_environments_role", + "lifecycle_environment", + "locations_role", + "location", + "luna_hostgroup", + "manifest_role", + "module_defaults", + "operatingsystems_role", + "operatingsystem", + "organization_info", + "organizations_role", + "organization", + "os_default_template", + "partition_table", + "product", + "provisioning_templates_role", + "provisioning_template", + "puppetclasses_import", + "puppet_environment", + "realm", + "redhat_manifest", + "repositories_role", + "repository_info", + "repository_ostree", + "repository_set_info", + "repository_set", + "repository_sync", + "repository", + "resource_info", + "role", + "scap_content", + "scap_tailoring_file", + "setting_info", + "settings_role", + "setting", + "smart_class_parameter_override_value", + "smart_class_parameter", + "smart_proxy", + "status_info", + "subnet_info", + "subnets_role", + "subnet", + "subscription_info", + "subscription_manifest", + "sync_plans_role", + "sync_plan", + "templates_import", + "usergroup", + "user", + "wait_for_task", +] + +FAM_ROOT_DIR = '/usr/share/ansible/collections/ansible_collections/redhat/satellite' + +FAM_MODULE_PATH = f'{FAM_ROOT_DIR}/plugins/modules' RH_SAT_ROLES = [ 'activation_keys', @@ -1962,6 +2042,13 @@ class Colored(Box): "PATCH", ] +OPENSSH_RECOMMENDATION = 'Decreased security: OpenSSH config permissions' +DNF_RECOMMENDATION = ( + 'The dnf installs lower versions of packages when the "best" ' + 'option is not present in the /etc/dnf/dnf.conf' +) + +EXPIRED_MANIFEST = 'expired-manifest.zip' # Data File Paths class DataFile(Box): @@ -1981,3 +2068,5 @@ class DataFile(Box): SNIPPET_DATA_FILE = DATA_DIR.joinpath(SNIPPET_DATA_FILE) PARTITION_SCRIPT_DATA_FILE = DATA_DIR.joinpath(PARTITION_SCRIPT_DATA_FILE) OS_TEMPLATE_DATA_FILE = DATA_DIR.joinpath(OS_TEMPLATE_DATA_FILE) + FAKE_3_YUM_REPO_RPMS_ANT = DATA_DIR.joinpath(FAKE_3_YUM_REPO_RPMS[0]) + EXPIRED_MANIFEST_FILE = DATA_DIR.joinpath(EXPIRED_MANIFEST) diff --git a/robottelo/constants/repos.py b/robottelo/constants/repos.py index bf54b5518bc..9d968a3830b 100644 --- a/robottelo/constants/repos.py +++ b/robottelo/constants/repos.py @@ -2,7 +2,7 @@ PULP_FIXTURE_ROOT = 'https://fixtures.pulpproject.org/' PULP_SUBPATHS_COMBINED = { - 'yum': ['rpm-zchunk/', 'rpm-modular/'], + 'yum': ['rpm-zstd-metadata/', 'rpm-modular/'], 'file': ['file-large/', 'file-many/'], } CUSTOM_3RD_PARTY_REPO = 'http://repo.calcforge.org/fedora/21/x86_64/' @@ -12,7 +12,7 @@ CUSTOM_RPM_SHA = 'https://fixtures.pulpproject.org/rpm-with-sha/' CUSTOM_RPM_SHA_512 = 'https://fixtures.pulpproject.org/rpm-with-sha-512/' FAKE_5_YUM_REPO = 'https://rplevka.fedorapeople.org/fakerepo01/' -FAKE_YUM_DRPM_REPO = 'https://fixtures.pulpproject.org/drpm-signed/' +FAKE_YUM_MISSING_REPO = 'https://fixtures.pulpproject.org/missing-repo/' FAKE_YUM_SRPM_REPO = 'https://fixtures.pulpproject.org/srpm-signed/' FAKE_YUM_SRPM_DUPLICATE_REPO = 'https://fixtures.pulpproject.org/srpm-duplicate/' FAKE_YUM_MD5_REPO = 'https://fixtures.pulpproject.org/rpm-with-md5/' @@ -25,6 +25,6 @@ FAKE_0_YUM_REPO_STRING_BASED_VERSIONS = ( 'https://fixtures.pulpproject.org/rpm-string-version-updateinfo/' ) - +FAKE_ZST_REPO = 'https://fixtures.pulpproject.org/rpm-zstd-metadata' ANSIBLE_GALAXY = 'https://galaxy.ansible.com/' ANSIBLE_HUB = 'https://cloud.redhat.com/api/automation-hub/' diff --git a/robottelo/content_info.py b/robottelo/content_info.py index df9a3806f93..fd404f5a7d9 100644 --- a/robottelo/content_info.py +++ b/robottelo/content_info.py @@ -5,7 +5,7 @@ import requests from robottelo import ssh -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError def get_repo_files(repo_path, extension='rpm', hostname=None): @@ -46,7 +46,7 @@ def get_repo_files_urls_by_url(url, extension='rpm'): if result.status_code != 200: raise requests.HTTPError(f'{url} is not accessible') - links = re.findall(r'(?<=href=").*?(?=">)', result.text) + links = re.findall(r'(?<=href=")(?!\.\.).*?(?=">)', result.text) if 'Packages/' not in links: files = sorted(line for line in links if extension in line) return [f'{url}{file}' for file in files] diff --git a/robottelo/exceptions.py b/robottelo/exceptions.py index d1f9886dce0..a6100564873 100644 --- a/robottelo/exceptions.py +++ b/robottelo/exceptions.py @@ -65,3 +65,62 @@ class ProxyError(Exception): class DownloadFileError(Exception): """Indicates an error when failure in downloading file from server.""" + + +class CLIFactoryError(Exception): + """Indicates an error occurred while creating an entity using hammer""" + + +class CLIError(Exception): + """Indicates that a CLI command could not be run.""" + + +class CapsuleHostError(Exception): + """Indicates error in capsule configuration etc""" + + pass + + +class CLIBaseError(Exception): + """Indicates that a CLI command has finished with return code different + from zero. + + :param status: CLI command return code + :param stderr: contents of the ``stderr`` + :param msg: explanation of the error + + """ + + def __init__(self, status, stderr, msg): + self.status = status + self.stderr = stderr + self.msg = msg + super().__init__(msg) + self.message = msg + + def __str__(self): + """Include class name, status, stderr and msg to string repr so + assertRaisesRegexp can be used to assert error present on any + attribute + """ + return repr(self) + + def __repr__(self): + """Include class name status, stderr and msg to improve logging""" + return '{}(status={!r}, stderr={!r}, msg={!r}'.format( + type(self).__name__, self.status, self.stderr, self.msg + ) + + +class CLIReturnCodeError(CLIBaseError): + """Error to be raised when an error occurs due to some validation error + when execution hammer cli. + See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details + """ + + +class CLIDataBaseError(CLIBaseError): + """Error to be raised when an error occurs due to some missing parameter + which cause a data base error on hammer + See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details + """ diff --git a/robottelo/host_helpers/__init__.py b/robottelo/host_helpers/__init__.py index 9757660606b..fa564dda27f 100644 --- a/robottelo/host_helpers/__init__.py +++ b/robottelo/host_helpers/__init__.py @@ -1,12 +1,16 @@ -from robottelo.host_helpers.capsule_mixins import CapsuleInfo -from robottelo.host_helpers.capsule_mixins import EnablePluginsCapsule -from robottelo.host_helpers.contenthost_mixins import HostInfo -from robottelo.host_helpers.contenthost_mixins import SystemFacts -from robottelo.host_helpers.contenthost_mixins import VersionedContent -from robottelo.host_helpers.satellite_mixins import ContentInfo -from robottelo.host_helpers.satellite_mixins import EnablePluginsSatellite -from robottelo.host_helpers.satellite_mixins import Factories -from robottelo.host_helpers.satellite_mixins import SystemInfo +from robottelo.host_helpers.capsule_mixins import CapsuleInfo, EnablePluginsCapsule +from robottelo.host_helpers.contenthost_mixins import ( + HostInfo, + SystemFacts, + VersionedContent, +) +from robottelo.host_helpers.satellite_mixins import ( + ContentInfo, + EnablePluginsSatellite, + Factories, + ProvisioningSetup, + SystemInfo, +) class ContentHostMixins(HostInfo, SystemFacts, VersionedContent): @@ -17,5 +21,7 @@ class CapsuleMixins(CapsuleInfo, EnablePluginsCapsule): pass -class SatelliteMixins(ContentInfo, Factories, SystemInfo, EnablePluginsSatellite): +class SatelliteMixins( + ContentInfo, Factories, SystemInfo, EnablePluginsSatellite, ProvisioningSetup +): pass diff --git a/robottelo/host_helpers/api_factory.py b/robottelo/host_helpers/api_factory.py index d74290934b5..1316bca9078 100644 --- a/robottelo/host_helpers/api_factory.py +++ b/robottelo/host_helpers/api_factory.py @@ -3,20 +3,23 @@ example: my_satellite.api_factory.api_method() """ from contextlib import contextmanager +from datetime import datetime +import time -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_string +from fauxfactory import gen_ipaddr, gen_mac, gen_string from nailgun import entity_mixins +from nailgun.client import request from nailgun.entity_mixins import call_entity_method_with_timeout from requests import HTTPError from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_PTABLE -from robottelo.constants import DEFAULT_PXE_TEMPLATE -from robottelo.constants import DEFAULT_TEMPLATE -from robottelo.constants import REPO_TYPE +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + DEFAULT_PTABLE, + DEFAULT_PXE_TEMPLATE, + DEFAULT_TEMPLATE, + REPO_TYPE, +) from robottelo.exceptions import ImproperlyConfigured from robottelo.host_helpers.repository_mixins import initiate_repo_helpers @@ -48,6 +51,7 @@ def make_http_proxy(self, org, http_proxy_type): password=settings.http_proxy.password, organization=[org.id], ).create() + return None def cv_publish_promote(self, name=None, env_name=None, repo_id=None, org_id=None): """Create, publish and promote CV to selected environment""" @@ -141,7 +145,7 @@ def enable_sync_redhat_repo(self, rh_repo, org_id, timeout=1500): """Enable the RedHat repo, sync it and returns repo_id""" # Enable RH repo and fetch repository_id repo_id = self.enable_rhrepo_and_fetchid( - basearch=rh_repo['basearch'], + basearch=rh_repo.get('basearch', rh_repo.get('arch', DEFAULT_ARCHITECTURE)), org_id=org_id, product=rh_repo['product'], repo=rh_repo['name'], @@ -466,14 +470,14 @@ def create_role_permissions( if entity_permission.name != name: raise self._satellite.api.APIResponseError( 'the returned permission is different from the' - ' requested one "{} != {}"'.format(entity_permission.name, name) + f' requested one "{entity_permission.name} != {name}"' ) permissions_entities.append(entity_permission) else: if not permissions_name: raise ValueError( - 'resource type "{}" empty. You must select at' - ' least one permission'.format(resource_type) + f'resource type "{resource_type}" empty. You must select at' + ' least one permission' ) resource_type_permissions_entities = self._satellite.api.Permission().search( @@ -573,8 +577,8 @@ def satellite_setting(self, key_val: str): setting = self._satellite.api.Setting().search( query={'search': f'name={name.strip()}'} )[0] - except IndexError: - raise KeyError(f'The setting {name} in not available in satellite.') + except IndexError as err: + raise KeyError(f'The setting {name} in not available in satellite.') from err old_value = setting.value setting.value = value.strip() setting.update({'value'}) @@ -604,10 +608,9 @@ def update_provisioning_template(self, name=None, old=None, new=None): self.temp.template = self.temp.template.replace(old, new, 1) update = self.temp.update(['template']) return new in update.template - elif new in self.temp.template: + if new in self.temp.template: return True - else: - raise ValueError(f'{old} does not exists in template {name}') + raise ValueError(f'{old} does not exists in template {name}') def disable_syncplan(self, sync_plan): """ @@ -630,3 +633,139 @@ def template_update(self, temp): if not template.locked: template.locked = True template.update(['locked']) + + def attach_custom_product_subscription(self, prod_name=None, host_name=None): + """Attach custom product subscription to client host + :param str prod_name: custom product name + :param str host_name: client host name + """ + host = self._satellite.api.Host().search(query={'search': f'{host_name}'})[0] + product_subscription = self._satellite.api.Subscription().search( + query={'search': f'name={prod_name}'} + )[0] + self._satellite.api.HostSubscription(host=host.id).add_subscriptions( + data={'subscriptions': [{'id': product_subscription.id, 'quantity': 1}]} + ) + + def wait_for_errata_applicability_task( + self, + host_id, + from_when, + search_rate=1, + max_tries=10, + poll_rate=None, + poll_timeout=15, + ): + """Search the generate applicability task for given host and make sure it finishes + + :param int host_id: Content host ID of the host where we are regenerating applicability. + :param int from_when: Epoch Time (seconds in UTC) to limit number of returned tasks to investigate. + :param int search_rate: Delay between searches. + :param int max_tries: How many times search should be executed. + :param int poll_rate: Delay between the end of one task check-up and + the start of the next check-up. Parameter for + ``nailgun.entities.ForemanTask.poll()`` method. + :param int poll_timeout: Maximum number of seconds to wait until timing out. + Parameter for ``nailgun.entities.ForemanTask.poll()`` method. + :return: Relevant errata applicability task. + :raises: ``AssertionError``. If not tasks were found for given host until timeout. + """ + assert isinstance(host_id, int), 'Param host_id have to be int' + assert isinstance(from_when, int), 'Param from_when have to be int' + now = int(time.time()) + assert from_when <= now, 'Param from_when have to be epoch time in the past' + for _ in range(max_tries): + now = int(time.time()) + # Format epoch time for search, one second prior margin of safety + timestamp = datetime.fromtimestamp(from_when - 1).strftime('%m-%d-%Y %H:%M:%S') + # Long format to match search: ex. 'January 03, 2024 at 03:08:08 PM' + long_format = datetime.strptime(timestamp, '%m-%d-%Y %H:%M:%S').strftime( + '%B %d, %Y at %I:%M:%S %p' + ) + search_query = ( + '( label = Actions::Katello::Applicability::Hosts::BulkGenerate OR' + ' label = Actions::Katello::Host::UploadPackageProfile ) AND' + f' started_at >= "{long_format}" ' + ) + tasks = self._satellite.api.ForemanTask().search(query={'search': search_query}) + tasks_finished = 0 + for task in tasks: + if ( + task.label == 'Actions::Katello::Applicability::Hosts::BulkGenerate' + and 'host_ids' in task.input + and host_id in task.input['host_ids'] + ) or ( + task.label == 'Actions::Katello::Host::UploadPackageProfile' + and 'host' in task.input + and host_id == task.input['host']['id'] + ): + task.poll(poll_rate=poll_rate, timeout=poll_timeout) + tasks_finished += 1 + if tasks_finished > 0: + break + time.sleep(search_rate) + else: + raise AssertionError( + f'No task was found using query " {search_query} " for host id: {host_id}' + ) + + def wait_for_syncplan_tasks(self, repo_backend_id=None, timeout=10, repo_name=None): + """Search the pulp tasks and identify repositories sync tasks with + specified name or backend_identifier + + :param repo_backend_id: The Backend ID for the repository to identify the + repo in Pulp environment + :param timeout: Value to decided how long to check for the Sync task + :param repo_name: If repo_backend_id can not be passed, pass the repo_name + """ + if repo_name: + repo_backend_id = ( + self._satellite.api.Repository() + .search(query={'search': f'name="{repo_name}"', 'per_page': '1000'})[0] + .backend_identifier + ) + # Fetch the Pulp password + pulp_pass = self._satellite.execute( + 'grep "^default_password" /etc/pulp/server.conf | awk \'{print $2}\'' + ).stdout.splitlines()[0] + # Set the Timeout value + timeup = time.time() + int(timeout) * 60 + # Search Filter to filter out the task based on backend-id and sync action + filtered_req = { + 'criteria': { + 'filters': { + 'tags': {'$in': [f"pulp:repository:{repo_backend_id}"]}, + 'task_type': {'$in': ["pulp.server.managers.repo.sync.sync"]}, + } + } + } + while True: + if time.time() > timeup: + raise self._satellite.api.APIResponseError( + f'Pulp task with repo_id {repo_backend_id} not found' + ) + # Send request to pulp API to get the task info + req = request( + 'POST', + f'{self._satellite.url}/pulp/api/v2/tasks/search/', + verify=False, + auth=('admin', f'{pulp_pass}'), + headers={'content-type': 'application/json'}, + data=filtered_req, + ) + # Check Status code of response + if req.status_code != 200: + raise self._satellite.api.APIResponseError( + f'Pulp task with repo_id {repo_backend_id} not found' + ) + # Check content of response + # It is '[]' string for empty content when backend_identifier is wrong + if len(req.content) > 2: + if req.json()[0].get('state') in ['finished']: + return True + if req.json()[0].get('error'): + raise AssertionError( + f"Pulp task with repo_id {repo_backend_id} error or not found: " + f"'{req.json().get('error')}'" + ) + time.sleep(2) diff --git a/robottelo/host_helpers/capsule_mixins.py b/robottelo/host_helpers/capsule_mixins.py index f9c5d9652bc..9ae7edfb761 100644 --- a/robottelo/host_helpers/capsule_mixins.py +++ b/robottelo/host_helpers/capsule_mixins.py @@ -1,8 +1,9 @@ +from datetime import datetime, timedelta import time -from datetime import datetime -from robottelo.constants import PUPPET_CAPSULE_INSTALLER -from robottelo.constants import PUPPET_COMMON_INSTALLER_OPTS +from dateutil.parser import parse + +from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS from robottelo.logging import logger from robottelo.utils.installer import InstallerCommand @@ -56,30 +57,90 @@ def wait_for_tasks( for task in tasks: task.poll(poll_rate=poll_rate, timeout=poll_timeout, must_succeed=must_succeed) break - else: - time.sleep(search_rate) + time.sleep(search_rate) else: raise AssertionError(f"No task was found using query '{search_query}'") return tasks - def wait_for_sync(self, timeout=600, start_time=datetime.utcnow()): - """Wait for capsule sync to finish and assert the sync task succeeded""" - # Assert that a task to sync lifecycle environment to the capsule - # is started (or finished already) + def wait_for_sync(self, start_time=None, timeout=600): + """Wait for capsule sync to finish and assert success. + Assert that a task to sync lifecycle environment to the + capsule is started (or finished already), and succeeded. + :raises: ``AssertionError``: If a capsule sync verification fails based on the conditions. + + - Found some active sync task(s) for capsule, or it just finished (recent sync time). + - Any active sync task(s) polled, succeeded, and the capsule last_sync_time is updated. + - last_sync_time after final task is on or newer than start_time. + - The total sync time duration (seconds) is within timeout and not negative. + + :param start_time: (datetime): UTC time to compare against capsule's last_sync_time. + Default: None (current UTC). + :param timeout: (int) maximum seconds for active task(s) and queries to finish. + + :return: + list of polled finished tasks that were in-progress from `active_sync_tasks`. + """ + # Fetch initial capsule sync status logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") - sync_status = self.nailgun_capsule.content_get_sync() - logger.info(f"Active tasks {sync_status['active_sync_tasks']}") + sync_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + # Current UTC time for start_time, if not provided + if start_time is None: + start_time = datetime.utcnow().replace(microsecond=0) + # 1s margin of safety for rounding + start_time = ( + (start_time - timedelta(seconds=1)) + .replace(microsecond=0) + .strftime('%Y-%m-%d %H:%M:%S UTC') + ) + # Assert presence of recent sync activity: + # one or more ongoing sync tasks for the capsule, + # Or, capsule's last_sync_time is on or after start_time + assert len(sync_status['active_sync_tasks']) or ( + parse(sync_status['last_sync_time']) >= parse(start_time) + ), ( + f"No active or recent sync found for capsule {self.hostname}." + f" `active_sync_tasks` was empty: {sync_status['active_sync_tasks']}," + f" and the `last_sync_time`: {sync_status['last_sync_time']}," + f" was prior to the `start_time`: {start_time}." + ) + sync_tasks = [] + # Poll and verify succeeds, any active sync task from initial status. + logger.info(f"Active tasks: {sync_status['active_sync_tasks']}") + for task in sync_status['active_sync_tasks']: + sync_tasks.append(self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout)) + logger.info(f"Active sync task :id {task['id']} succeeded.") + + # Fetch updated capsule status (expect no ongoing sync) + logger.info(f"Querying updated sync status from capsule {self.hostname}.") + updated_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + + # Total time taken is not negative (sync prior to start_time), + # and did not exceed timeout. assert ( - len(sync_status['active_sync_tasks']) - or datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') - > start_time + timedelta(seconds=0) + <= parse(updated_status['last_sync_time']) - parse(start_time) + <= timedelta(seconds=timeout) + ), ( + f"No recent sync task(s) were found for capsule: {self.hostname}, or task(s) timed out." + f" `last_sync_time`: ({updated_status['last_sync_time']}) was prior to `start_time`: ({start_time})" + f" or exceeded timeout ({timeout}s)." ) + # No failed or active tasks remaining + assert len(updated_status['last_failed_sync_tasks']) == 0 + assert len(updated_status['active_sync_tasks']) == 0 - # Wait till capsule sync finishes and assert the sync task succeeded - for task in sync_status['active_sync_tasks']: - self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout) - sync_status = self.nailgun_capsule.content_get_sync() - assert len(sync_status['last_failed_sync_tasks']) == 0 + # Last sync task end time is the same as capsule's last sync time. + if len(sync_status['active_sync_tasks']): + final_task_id = sync_status['active_sync_tasks'][-1]['id'] + final_task_end_time = self.satellite.api.ForemanTask(id=final_task_id).read().ended_at + + assert parse(final_task_end_time) == parse(updated_status['last_sync_time']), ( + f"Final Task :id: {final_task_id}," + f" `end_time` does not match capsule `last_sync_time`. Capsule: {self.hostname}" + ) + + # return any polled sync tasks, that were initially in-progress + return sync_tasks def get_published_repo_url(self, org, prod, repo, lce=None, cv=None): """Forms url of a repo or CV published on a Satellite or Capsule. @@ -93,5 +154,4 @@ def get_published_repo_url(self, org, prod, repo, lce=None, cv=None): """ if lce and cv: return f'{self.url}/pulp/content/{org}/{lce}/{cv}/custom/{prod}/{repo}/' - else: - return f'{self.url}/pulp/content/{org}/Library/custom/{prod}/{repo}/' + return f'{self.url}/pulp/content/{org}/Library/custom/{prod}/{repo}/' diff --git a/robottelo/host_helpers/cli_factory.py b/robottelo/host_helpers/cli_factory.py index 4a8e947d759..7d3f9f1afcb 100644 --- a/robottelo/host_helpers/cli_factory.py +++ b/robottelo/host_helpers/cli_factory.py @@ -4,38 +4,35 @@ example: my_satellite.cli_factory.make_org() """ import datetime +from functools import lru_cache, partial import inspect import os +from os import chmod import pprint import random -from functools import lru_cache -from functools import partial -from os import chmod from tempfile import mkstemp from time import sleep from box import Box -from fauxfactory import gen_alpha -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_netmask -from fauxfactory import gen_url +from fauxfactory import ( + gen_alpha, + gen_alphanumeric, + gen_choice, + gen_integer, + gen_ipaddr, + gen_mac, + gen_netmask, + gen_url, +) from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError from robottelo.cli.proxy import CapsuleTunnelError from robottelo.config import settings +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.host_helpers.repository_mixins import initiate_repo_helpers from robottelo.utils.manifest import clone -class CLIFactoryError(Exception): - """Indicates an error occurred while creating an entity using hammer""" - - def create_object(cli_object, options, values=None, credentials=None): """ Creates with dictionary of arguments. @@ -62,9 +59,9 @@ def create_object(cli_object, options, values=None, credentials=None): 'Failed to create {} with data:\n{}\n{}'.format( cli_object.__name__, pprint.pformat(options, indent=2), err.msg ) - ) + ) from err # Sometimes we get a list with a dictionary and not a dictionary. - if type(result) is list and len(result) > 0: + if isinstance(result, list) and len(result) > 0: result = result[0] return Box(result) @@ -128,6 +125,7 @@ def create_object(cli_object, options, values=None, credentials=None): '_entity_cls': 'Repository', 'name': gen_alpha, 'url': settings.repos.yum_1.url, + 'content-type': 'yum', }, 'role': {'name': gen_alphanumeric}, 'filter': {}, @@ -142,7 +140,9 @@ def create_object(cli_object, options, values=None, credentials=None): 'sync_plan': { 'description': gen_alpha, 'enabled': 'true', - 'interval': lambda: random.choice(list(constants.SYNC_INTERVAL.values())), + 'interval': lambda: random.choice( + [i for i in constants.SYNC_INTERVAL.values() if i != 'custom cron'] + ), 'name': gen_alpha, 'sync-date': lambda: datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), }, @@ -155,7 +155,10 @@ def create_object(cli_object, options, values=None, credentials=None): 'host_collection': { 'name': gen_alpha, }, - 'job_invocation': {}, + 'job_invocation': {'_redirect': 'job_invocation_with_credentials'}, + 'job_invocation_with_credentials': { + '_entity_cls': 'JobInvocation', + }, 'job_template': { 'job-category': 'Miscellaneous', 'provider-type': 'SSH', @@ -195,6 +198,7 @@ def create_object(cli_object, options, values=None, credentials=None): 'name': gen_alphanumeric, }, 'os': { + '_entity_cls': 'OperatingSys', 'major': partial(random.randint, 0, 10), 'minor': partial(random.randint, 0, 10), 'name': gen_alphanumeric, @@ -274,15 +278,13 @@ def __getattr__(self, name): # evaluate functions that provide default values fields = self._evaluate_functions(fields) return partial(create_object, entity_cls, fields) - else: - raise AttributeError(f'unknown factory method name: {name}') + raise AttributeError(f'unknown factory method name: {name}') def _evaluate_function(self, function): """Some functions may require an instance reference""" if 'self' in inspect.signature(function).parameters: return function(self) - else: - return function() + return function() def _evaluate_functions(self, iterable): """Run functions that are used to populate data in lists/dicts""" @@ -294,6 +296,7 @@ def _evaluate_functions(self, iterable): for key, item in iterable.items() if not key.startswith('_') } + return None @lru_cache def _find_entity_class(self, entity_name): @@ -301,6 +304,7 @@ def _find_entity_class(self, entity_name): for name, class_obj in self._satellite.cli.__dict__.items(): if entity_name == name.lower(): return class_obj + return None def make_content_credential(self, options=None): """Creates a content credential. @@ -395,8 +399,8 @@ def make_product_wait(self, options=None, wait_for=5): product = self._satellite.cli.Product.info( {'name': options.get('name'), 'organization-id': options.get('organization-id')} ) - except CLIReturnCodeError: - raise err + except CLIReturnCodeError as nested_err: + raise nested_err from err if not product: raise err return product @@ -504,7 +508,7 @@ def make_proxy(self, options=None): args['url'] = url return create_object(self._satellite.cli.Proxy, args, options) except CapsuleTunnelError as err: - raise CLIFactoryError(f'Failed to create ssh tunnel: {err}') + raise CLIFactoryError(f'Failed to create ssh tunnel: {err}') from None args['url'] = options['url'] return create_object(self._satellite.cli.Proxy, args, options) @@ -528,7 +532,7 @@ def make_template(self, options=None): } # Write content to file or random text - if options is not None and 'content' in options.keys(): + if options is not None and 'content' in options: content = options.pop('content') else: content = gen_alphanumeric() @@ -570,7 +574,7 @@ def activationkey_add_subscription_to_repo(self, options=None): except CLIReturnCodeError as err: raise CLIFactoryError( f'Failed to add subscription to activation key\n{err.msg}' - ) + ) from err def setup_org_for_a_custom_repo(self, options=None): """Sets up Org for the given custom repo by: @@ -585,6 +589,7 @@ def setup_org_for_a_custom_repo(self, options=None): 4. Checks if activation key was given, otherwise creates a new one and associates it with the content view. 5. Adds the custom repo subscription to the activation key + 6. Override custom product to true ( turned off by default in 6.14 ) :return: A dictionary with the entity ids of Activation key, Content view, Lifecycle Environment, Organization, Product and Repository @@ -608,7 +613,7 @@ def setup_org_for_a_custom_repo(self, options=None): try: self._satellite.cli.Repository.synchronize({'id': custom_repo['id']}) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') + raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') from err # Create CV if needed and associate repo with it if options.get('content-view-id') is None: cv_id = self.make_content_view({'organization-id': org_id})['id'] @@ -619,21 +624,32 @@ def setup_org_for_a_custom_repo(self, options=None): {'id': cv_id, 'organization-id': org_id, 'repository-id': custom_repo['id']} ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') + raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') from err # Publish a new version of CV try: self._satellite.cli.ContentView.publish({'id': cv_id}) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') + raise CLIFactoryError( + f'Failed to publish new version of content view\n{err.msg}' + ) from err # Get the version id - cvv = self._satellite.cli.ContentView.info({'id': cv_id})['versions'][-1] + cv_info = self._satellite.cli.ContentView.info({'id': cv_id}) + lce_promoted = cv_info['lifecycle-environments'] + cvv = cv_info['versions'][-1] # Promote version to next env try: - self._satellite.cli.ContentView.version_promote( - {'id': cvv['id'], 'organization-id': org_id, 'to-lifecycle-environment-id': env_id} - ) + if env_id not in [int(lce['id']) for lce in lce_promoted]: + self._satellite.cli.ContentView.version_promote( + { + 'id': cvv['id'], + 'organization-id': org_id, + 'to-lifecycle-environment-id': env_id, + } + ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to promote version to next environment\n{err.msg}') + raise CLIFactoryError( + f'Failed to promote version to next environment\n{err.msg}' + ) from err # Create activation key if needed and associate content view with it if options.get('activationkey-id') is None: activationkey_id = self.make_activation_key( @@ -652,7 +668,9 @@ def setup_org_for_a_custom_repo(self, options=None): {'content-view-id': cv_id, 'id': activationkey_id, 'organization-id': org_id} ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to associate activation-key with CV\n{err.msg}') + raise CLIFactoryError( + f'Failed to associate activation-key with CV\n{err.msg}' + ) from err # Add custom_product subscription to activation-key, if SCA mode is disabled if self._satellite.is_sca_mode_enabled(org_id) is False: @@ -663,6 +681,11 @@ def setup_org_for_a_custom_repo(self, options=None): 'subscription': custom_product['name'], } ) + # Override custom product to true ( turned off by default in 6.14 ) + custom_repo = self._satellite.cli.Repository.info({'id': custom_repo['id']}) + self._satellite.cli.ActivationKey.content_override( + {'id': activationkey_id, 'content-label': custom_repo['content-label'], 'value': 'true'} + ) return { 'activationkey-id': activationkey_id, 'content-view-id': cv_id, @@ -677,7 +700,7 @@ def _setup_org_for_a_rh_repo(self, options=None): 1. Checks if organization and lifecycle environment were given, otherwise creates new ones. - 2. Clones and uploads manifest. + 2. If manifest does not exist, clone and upload it. 3. Enables RH repo and synchronizes it. 4. Checks if content view was given, otherwise creates a new one and - adds the RH repo @@ -703,15 +726,16 @@ def _setup_org_for_a_rh_repo(self, options=None): env_id = self.make_lifecycle_environment({'organization-id': org_id})['id'] else: env_id = options['lifecycle-environment-id'] - # Clone manifest and upload it - with clone() as manifest: - self._satellite.put(manifest.path, manifest.name) - try: - self._satellite.cli.Subscription.upload( - {'file': manifest.name, 'organization-id': org_id} - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') + # If manifest does not exist, clone and upload it + if len(self._satellite.cli.Subscription.exists({'organization-id': org_id})) == 0: + with clone() as manifest: + self._satellite.put(manifest.path, manifest.name) + try: + self._satellite.cli.Subscription.upload( + {'file': manifest.name, 'organization-id': org_id} + ) + except CLIReturnCodeError as err: + raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') from err # Enable repo from Repository Set try: self._satellite.cli.RepositorySet.enable( @@ -724,7 +748,7 @@ def _setup_org_for_a_rh_repo(self, options=None): } ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to enable repository set\n{err.msg}') + raise CLIFactoryError(f'Failed to enable repository set\n{err.msg}') from err # Fetch repository info try: rhel_repo = self._satellite.cli.Repository.info( @@ -735,7 +759,7 @@ def _setup_org_for_a_rh_repo(self, options=None): } ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to fetch repository info\n{err.msg}') + raise CLIFactoryError(f'Failed to fetch repository info\n{err.msg}') from err # Synchronize the RH repository try: self._satellite.cli.Repository.synchronize( @@ -746,7 +770,7 @@ def _setup_org_for_a_rh_repo(self, options=None): } ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') + raise CLIFactoryError(f'Failed to synchronize repository\n{err.msg}') from err # Create CV if needed and associate repo with it if options.get('content-view-id') is None: cv_id = self.make_content_view({'organization-id': org_id})['id'] @@ -757,24 +781,28 @@ def _setup_org_for_a_rh_repo(self, options=None): {'id': cv_id, 'organization-id': org_id, 'repository-id': rhel_repo['id']} ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') + raise CLIFactoryError(f'Failed to add repository to content view\n{err.msg}') from err # Publish a new version of CV try: self._satellite.cli.ContentView.publish({'id': cv_id}) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') + raise CLIFactoryError( + f'Failed to publish new version of content view\n{err.msg}' + ) from err # Get the version id try: cvv = self._satellite.cli.ContentView.info({'id': cv_id})['versions'][-1] except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to fetch content view info\n{err.msg}') + raise CLIFactoryError(f'Failed to fetch content view info\n{err.msg}') from err # Promote version1 to next env try: self._satellite.cli.ContentView.version_promote( {'id': cvv['id'], 'organization-id': org_id, 'to-lifecycle-environment-id': env_id} ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to promote version to next environment\n{err.msg}') + raise CLIFactoryError( + f'Failed to promote version to next environment\n{err.msg}' + ) from err # Create activation key if needed and associate content view with it if options.get('activationkey-id') is None: activationkey_id = self.make_activation_key( @@ -793,7 +821,9 @@ def _setup_org_for_a_rh_repo(self, options=None): {'id': activationkey_id, 'organization-id': org_id, 'content-view-id': cv_id} ) except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to associate activation-key with CV\n{err.msg}') + raise CLIFactoryError( + f'Failed to associate activation-key with CV\n{err.msg}' + ) from err # Add default subscription to activation-key, if SCA mode is disabled if self._satellite.is_sca_mode_enabled(org_id) is False: @@ -806,6 +836,11 @@ def _setup_org_for_a_rh_repo(self, options=None): ), } ) + # Override RHST product to true ( turned off by default in 6.14 ) + rhel_repo = self._satellite.cli.Repository.info({'id': rhel_repo['id']}) + self._satellite.cli.ActivationKey.content_override( + {'id': activationkey_id, 'content-label': rhel_repo['content-label'], 'value': 'true'} + ) return { 'activationkey-id': activationkey_id, 'content-view-id': cv_id, @@ -845,32 +880,31 @@ def setup_org_for_a_rh_repo( custom_repo_url = settings.repos.capsule_repo if force_use_cdn or settings.robottelo.cdn or not custom_repo_url: return self._setup_org_for_a_rh_repo(options) - else: - options['url'] = custom_repo_url - result = self.setup_org_for_a_custom_repo(options) - if force_manifest_upload: - with clone() as manifest: - self._satellite.put(manifest.path, manifest.name) - try: - self._satellite.cli.Subscription.upload( - { - 'file': manifest.name, - 'organization-id': result.get('organization-id'), - } - ) - except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') + options['url'] = custom_repo_url + result = self.setup_org_for_a_custom_repo(options) + if force_manifest_upload: + with clone() as manifest: + self._satellite.put(manifest.path, manifest.name) + try: + self._satellite.cli.Subscription.upload( + { + 'file': manifest.name, + 'organization-id': result.get('organization-id'), + } + ) + except CLIReturnCodeError as err: + raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') from err - # Add default subscription to activation-key, if SCA mode is disabled - if self._satellite.is_sca_mode_enabled(result['organization-id']) is False: - self.activationkey_add_subscription_to_repo( - { - 'activationkey-id': result['activationkey-id'], - 'organization-id': result['organization-id'], - 'subscription': constants.DEFAULT_SUBSCRIPTION_NAME, - } - ) - return result + # Add default subscription to activation-key, if SCA mode is disabled + if self._satellite.is_sca_mode_enabled(result['organization-id']) is False: + self.activationkey_add_subscription_to_repo( + { + 'activationkey-id': result['activationkey-id'], + 'organization-id': result['organization-id'], + 'subscription': constants.DEFAULT_SUBSCRIPTION_NAME, + } + ) + return result @staticmethod def _get_capsule_vm_distro_repos(distro): @@ -1069,7 +1103,7 @@ def setup_cdn_and_custom_repos_content( try: self._satellite.upload_manifest(org_id, interface='CLI') except CLIReturnCodeError as err: - raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') + raise CLIFactoryError(f'Failed to upload manifest\n{err.msg}') from err custom_product, repos_info = self.setup_cdn_and_custom_repositories( org_id=org_id, repos=repos, download_policy=download_policy diff --git a/robottelo/host_helpers/contenthost_mixins.py b/robottelo/host_helpers/contenthost_mixins.py index 63570b13d92..feac3cc89ae 100644 --- a/robottelo/host_helpers/contenthost_mixins.py +++ b/robottelo/host_helpers/contenthost_mixins.py @@ -1,14 +1,12 @@ """A collection of mixins for robottelo.hosts classes""" -import json from functools import cached_property +import json from tempfile import NamedTemporaryFile from robottelo import constants -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings +from robottelo.config import robottelo_tmp_dir, settings from robottelo.logging import logger -from robottelo.utils.ohsnap import dogfood_repofile_url -from robottelo.utils.ohsnap import dogfood_repository +from robottelo.utils.ohsnap import dogfood_repofile_url, dogfood_repository class VersionedContent: @@ -20,10 +18,21 @@ def _v_major(self): @cached_property def REPOSET(self): - return { - 'rhel': constants.REPOSET[f'rhel{self._v_major}'], - 'rhst': constants.REPOSET[f'rhst{self._v_major}'], - } + try: + if self._v_major > 7: + sys_reposets = { + 'rhel_bos': constants.REPOSET[f'rhel{self._v_major}_bos'], + 'rhel_aps': constants.REPOSET[f'rhel{self._v_major}_aps'], + } + else: + sys_reposets = { + 'rhel': constants.REPOSET[f'rhel{self._v_major}'], + 'rhscl': constants.REPOSET[f'rhscl{self._v_major}'], + } + reposets = {'rhst': constants.REPOSET[f'rhst{self._v_major}']} + except KeyError as err: + raise ValueError(f'Unsupported system version: {self._v_major}') from err + return sys_reposets | reposets @cached_property def REPOS(self): @@ -54,7 +63,7 @@ def OSCAP(self): 'cbrhel': constants.OSCAP_PROFILE[f'cbrhel{self._v_major}'], } - def _dogfood_helper(self, product, release, snap, repo=None): + def _dogfood_helper(self, product, release, repo=None): """Function to return repository related attributes based on the input and the host object """ @@ -67,50 +76,30 @@ def _dogfood_helper(self, product, release, snap, repo=None): product = self.__class__.__name__.lower() repo = repo or product # if repo is not specified, set it to the same as the product is release = self.satellite.version if not release else str(release) + # issue warning if requesting repofile of different version than the product is settings_release = settings.server.version.release.split('.') if len(settings_release) == 2: settings_release.append('0') - settings_release = '.'.join(settings_release[:3]) # keep only major.minor.patch + settings_release = '.'.join(settings_release) if product != 'client' and release != settings_release: logger.warning( 'Satellite release in settings differs from the one passed to the function ' 'or the version of the Satellite object. ' f'settings: {settings_release}, parameter: {release}' ) - snap = str(snap or settings.server.version.get("snap")) - return product, release, snap, v_major, repo + return product, release, v_major, repo def download_repofile(self, product=None, release=None, snap=''): """Downloads the tools/client, capsule, or satellite repos on the machine""" - product, release, snap, v_major, _ = self._dogfood_helper(product, release, snap) + product, release, v_major, _ = self._dogfood_helper(product, release) url = dogfood_repofile_url(settings.ohsnap, product, release, v_major, snap) self.execute(f'curl -o /etc/yum.repos.d/dogfood.repo -L {url}') def dogfood_repository(self, repo=None, product=None, release=None, snap=''): """Returns a repository definition based on the arguments provided""" - product, release, snap, v_major, repo = self._dogfood_helper(product, release, snap, repo) + product, release, v_major, repo = self._dogfood_helper(product, release, repo) return dogfood_repository(settings.ohsnap, repo, product, release, v_major, snap, self.arch) - def enable_tools_repo(self, organization_id): - return self.satellite.api_factory.enable_rhrepo_and_fetchid( - basearch=constants.DEFAULT_ARCHITECTURE, - org_id=organization_id, - product=constants.PRDS['rhel'], - repo=self.REPOS['rhst']['name'], - reposet=self.REPOSET['rhst'], - releasever=None, - ) - - def enable_rhel_repo(self, organization_id): - return self.satellite.api_factory.enable_rhrepo_and_fetchid( - basearch=constants.DEFAULT_ARCHITECTURE, - org_id=organization_id, - product=constants.PRDS['rhel'], - repo=self.REPOS['rhel']['name'], - reposet=self.REPOSET['rhel'], - releasever=None, - ) - def create_custom_html_repo(self, rpm_url, repo_name=None, update=False, remove_rpm=None): """Creates a custom yum repository, that will be published on https @@ -147,6 +136,12 @@ def applicable_errata_count(self): """return the applicable errata count for a host""" return self.nailgun_host.read().content_facet_attributes['errata_counts']['total'] + @property + def applicable_package_count(self): + """return the applicable package count for a host""" + self.run('subscription-manager repos') + return self.nailgun_host.read().content_facet_attributes['applicable_package_count'] + class SystemFacts: """Helpers mixin that enables getting/setting subscription-manager facts on a host""" diff --git a/robottelo/host_helpers/repository_mixins.py b/robottelo/host_helpers/repository_mixins.py index 9d032c4727d..09a5fb1529d 100644 --- a/robottelo/host_helpers/repository_mixins.py +++ b/robottelo/host_helpers/repository_mixins.py @@ -7,12 +7,14 @@ from robottelo import constants from robottelo.config import settings -from robottelo.exceptions import DistroNotSupportedError -from robottelo.exceptions import OnlyOneOSRepositoryAllowed -from robottelo.exceptions import ReposContentSetupWasNotPerformed -from robottelo.exceptions import RepositoryAlreadyCreated -from robottelo.exceptions import RepositoryAlreadyDefinedError -from robottelo.exceptions import RepositoryDataNotFound +from robottelo.exceptions import ( + DistroNotSupportedError, + OnlyOneOSRepositoryAllowed, + ReposContentSetupWasNotPerformed, + RepositoryAlreadyCreated, + RepositoryAlreadyDefinedError, + RepositoryDataNotFound, +) def initiate_repo_helpers(satellite): @@ -121,6 +123,59 @@ class YumRepository(BaseRepository): _type = constants.REPO_TYPE['yum'] +class FileRepository(BaseRepository): + """Custom File repository""" + + _type = constants.REPO_TYPE['file'] + + +class DebianRepository(BaseRepository): + """Custom Debian repository.""" + + _type = constants.REPO_TYPE["deb"] + + def __init__( + self, url=None, distro=None, content_type=None, deb_errata_url=None, deb_releases=None + ): + super().__init__(url=url, distro=distro, content_type=content_type) + self._deb_errata_url = deb_errata_url + self._deb_releases = deb_releases + + @property + def deb_errata_url(self): + return self._deb_errata_url + + @property + def deb_releases(self): + return self._deb_releases + + def create( + self, + organization_id, + product_id, + download_policy=None, + synchronize=True, + ): + """Create the repository for the supplied product id""" + create_options = { + 'product-id': product_id, + 'content-type': self.content_type, + 'url': self.url, + 'deb-releases': self._deb_releases, + } + + if self._deb_errata_url is not None: + create_options['deb-errata-url'] = self._deb_errata_url + + repo_info = self.satellite.cli_factory.make_repository(create_options) + self._repo_info = repo_info + + if synchronize: + self.synchronize() + + return repo_info + + class DockerRepository(BaseRepository): """Custom Docker repository""" @@ -149,6 +204,34 @@ def create(self, organization_id, product_id, download_policy=None, synchronize= return repo_info +class AnsibleRepository(BaseRepository): + """Custom Ansible Collection repository""" + + _type = constants.REPO_TYPE['ansible_collection'] + + def __init__(self, url=None, distro=None, requirements=None): + self._requirements = requirements + super().__init__(url=url, distro=distro) + + @property + def requirements(self): + return self._requirements + + def create(self, organization_id, product_id, download_policy=None, synchronize=True): + repo_info = self.satellite.cli_factory.make_repository( + { + 'product-id': product_id, + 'content-type': self.content_type, + 'url': self.url, + 'ansible-collection-requirements': f'{{collections: {self.requirements}}}', + } + ) + self._repo_info = repo_info + if synchronize: + self.synchronize() + return repo_info + + class OSTreeRepository(BaseRepository): """Custom OSTree repository""" @@ -305,8 +388,7 @@ def __repr__(self): f'' ) - else: - return f'' + return f'' def create( self, @@ -590,7 +672,9 @@ def setup_content_view(self, org_id, lce_id=None): content_view = self.satellite.cli.ContentView.info({'id': content_view['id']}) return content_view, lce - def setup_activation_key(self, org_id, content_view_id, lce_id, subscription_names=None): + def setup_activation_key( + self, org_id, content_view_id, lce_id, subscription_names=None, override=None + ): """Create activation and associate content-view, lifecycle environment and subscriptions""" if subscription_names is None: @@ -602,6 +686,19 @@ def setup_activation_key(self, org_id, content_view_id, lce_id, subscription_nam 'content-view-id': content_view_id, } ) + if override is not None: + for repo in self.satellite.cli.ActivationKey.product_content( + {'id': activation_key['id'], 'content-access-mode-all': 1} + ): + self.satellite.cli.ActivationKey.content_override( + { + 'id': activation_key['id'], + 'content-label': repo['label'], + 'value': int(override), + } + ) + if self.satellite.is_sca_mode_enabled(org_id): + return activation_key # Add subscriptions to activation-key # Get organization subscriptions subscriptions = self.satellite.cli.Subscription.list( @@ -646,17 +743,18 @@ def setup_content( upload_manifest=False, download_policy='on_demand', rh_subscriptions=None, + override=None, ): """ Setup content view and activation key of all the repositories. :param org_id: The organization id - :param lce_id: The lifecycle environment id + :param lce_id: The lifecycle environment id :param upload_manifest: Whether to upload the manifest (The manifest is uploaded only if needed) :param download_policy: The repositories download policy - :param rh_subscriptions: The RH subscriptions to be added to activation - key + :param rh_subscriptions: The RH subscriptions to be added to activation key + :param override: Content override (True = enable, False = disable, None = no action) """ if self._repos_info: raise RepositoryAlreadyCreated('Repositories already created can not setup content') @@ -675,9 +773,18 @@ def setup_content( subscription_names = list(rh_subscriptions) if custom_product_name: subscription_names.append(custom_product_name) - activation_key = self.setup_activation_key( - org_id, content_view['id'], lce_id, subscription_names=subscription_names - ) + if not self.satellite.is_sca_mode_enabled(org_id): + activation_key = self.setup_activation_key( + org_id, + content_view['id'], + lce_id, + subscription_names=subscription_names, + override=override, + ) + else: + activation_key = self.setup_activation_key( + org_id, content_view['id'], lce_id, override=override + ) setup_content_data = dict( activation_key=activation_key, content_view=content_view, @@ -720,13 +827,12 @@ def setup_virtual_machine( patch_os_release_distro = self.os_repo.distro rh_repo_ids = [] if enable_rh_repos: - rh_repo_ids = [getattr(repo, 'rh_repository_id') for repo in self.rh_repos] + rh_repo_ids = [repo.rh_repository_id for repo in self.rh_repos] repo_labels = [] if enable_custom_repos: repo_labels = [ repo['label'] for repo in self.custom_repos_info if repo['content-type'] == 'yum' ] - vm.contenthost_setup( self.satellite, self.organization['label'], diff --git a/robottelo/host_helpers/satellite_mixins.py b/robottelo/host_helpers/satellite_mixins.py index 82f4a4275c8..3c26862c3bf 100644 --- a/robottelo/host_helpers/satellite_mixins.py +++ b/robottelo/host_helpers/satellite_mixins.py @@ -1,4 +1,5 @@ import contextlib +from functools import lru_cache import io import os import random @@ -6,15 +7,18 @@ import requests -from robottelo.cli.base import CLIReturnCodeError from robottelo.cli.proxy import CapsuleTunnelError from robottelo.config import settings -from robottelo.constants import PULP_EXPORT_DIR -from robottelo.constants import PULP_IMPORT_DIR -from robottelo.constants import PUPPET_COMMON_INSTALLER_OPTS -from robottelo.constants import PUPPET_SATELLITE_INSTALLER +from robottelo.constants import ( + PULP_EXPORT_DIR, + PULP_IMPORT_DIR, + PUPPET_COMMON_INSTALLER_OPTS, + PUPPET_SATELLITE_INSTALLER, +) +from robottelo.exceptions import CLIReturnCodeError from robottelo.host_helpers.api_factory import APIFactory from robottelo.host_helpers.cli_factory import CLIFactory +from robottelo.host_helpers.ui_factory import UIFactory from robottelo.logging import logger from robottelo.utils.installer import InstallerCommand from robottelo.utils.manifest import clone @@ -141,13 +145,15 @@ def upload_manifest(self, org_id, manifest=None, interface='API', timeout=None): :returns: the manifest upload result """ - if manifest is None: + if not isinstance(manifest, bytes | io.BytesIO) and ( + not hasattr(manifest, 'content') or manifest.content is None + ): manifest = clone() if timeout is None: # Set the timeout to 1500 seconds to align with the API timeout. timeout = 1500000 if interface == 'CLI': - if isinstance(manifest.content, (bytes, io.BytesIO)): + if hasattr(manifest, 'path'): self.put(f'{manifest.path}', f'{manifest.name}') result = self.cli.Subscription.upload( {'file': manifest.name, 'organization-id': org_id}, timeout=timeout @@ -158,7 +164,7 @@ def upload_manifest(self, org_id, manifest=None, interface='API', timeout=None): {'file': manifest.filename, 'organization-id': org_id}, timeout=timeout ) else: - if not isinstance(manifest, (bytes, io.BytesIO)): + if not isinstance(manifest, bytes | io.BytesIO): manifest = manifest.content result = self.api.Subscription().upload( data={'organization_id': org_id}, files={'content': manifest} @@ -183,11 +189,10 @@ def publish_content_view(self, org, repo_list): :returns: A dictionary containing the details of the published content view. """ - repo = repo_list if type(repo_list) is list else [repo_list] + repo = repo_list if isinstance(repo_list, list) else [repo_list] content_view = self.api.ContentView(organization=org, repository=repo).create() content_view.publish() - content_view = content_view.read() - return content_view + return content_view.read() def move_pulp_archive(self, org, export_message): """ @@ -195,6 +200,7 @@ def move_pulp_archive(self, org, export_message): sets ownership, returns import path """ self.execute( + f'rm -rf {PULP_IMPORT_DIR}/{org.name} &&' f'mv {PULP_EXPORT_DIR}/{org.name} {PULP_IMPORT_DIR} && ' f'chown -R pulp:pulp {PULP_IMPORT_DIR}' ) @@ -202,11 +208,7 @@ def move_pulp_archive(self, org, export_message): # removes everything before export path, # replaces EXPORT_PATH by IMPORT_PATH, # removes metadata filename - import_path = os.path.dirname( - re.sub(rf'.*{PULP_EXPORT_DIR}', PULP_IMPORT_DIR, export_message) - ) - - return import_path + return os.path.dirname(re.sub(rf'.*{PULP_EXPORT_DIR}', PULP_IMPORT_DIR, export_message)) class SystemInfo: @@ -229,9 +231,9 @@ def available_capsule_port(self): :rtype: int """ port_pool_range = settings.fake_capsules.port_range - if type(port_pool_range) is str: + if isinstance(port_pool_range, str): port_pool_range = tuple(port_pool_range.split('-')) - if type(port_pool_range) is tuple and len(port_pool_range) == 2: + if isinstance(port_pool_range, tuple) and len(port_pool_range) == 2: port_pool = range(int(port_pool_range[0]), int(port_pool_range[1])) else: raise TypeError( @@ -257,14 +259,14 @@ def available_capsule_port(self): except ValueError: raise CapsuleTunnelError( f'Failed parsing the port numbers from stdout: {ss_cmd.stdout.splitlines()[:-1]}' - ) + ) from None try: # take the list of available ports and return randomly selected one return random.choice([port for port in port_pool if port not in used_ports]) except IndexError: raise CapsuleTunnelError( 'Failed to create ssh tunnel: No more ports available for mapping' - ) + ) from None @contextlib.contextmanager def default_url_on_new_port(self, oldport, newport): @@ -302,7 +304,7 @@ def validate_pulp_filepath( self, org, dir_path, - file_names=['*.json', '*.tar.gz'], + file_names=('*.json', '*.tar.gz'), ): """Checks the existence of certain files in a pulp dir""" extension_query = ' -o '.join([f'-name "{file}"' for file in file_names]) @@ -310,6 +312,46 @@ def validate_pulp_filepath( return result.stdout +class ProvisioningSetup: + """Provisioning tests setup helper methods""" + + def configure_libvirt_cr(self, server_fqdn=settings.libvirt.libvirt_hostname): + """Configures Libvirt ComputeResource to communicate with Satellite + + :param server_fqdn: Libvirt server FQDN + :return: None + """ + # Geneate SSH key-pair for foreman user and copy public key to libvirt server + self.execute('sudo -u foreman ssh-keygen -q -t rsa -f ~foreman/.ssh/id_rsa -N "" <<< y') + self.execute(f'ssh-keyscan -t ecdsa {server_fqdn} >> ~foreman/.ssh/known_hosts') + self.execute( + f'sshpass -p {settings.server.ssh_password} ssh-copy-id -o StrictHostKeyChecking=no ' + f'-i ~foreman/.ssh/id_rsa root@{server_fqdn}' + ) + # Install libvirt-client, and verify foreman user is able to communicate with Libvirt server + self.register_to_cdn() + self.execute('dnf -y --disableplugin=foreman-protector install libvirt-client') + assert ( + self.execute( + f'su foreman -s /bin/bash -c "virsh -c qemu+ssh://root@{server_fqdn}/system list"' + ).status + == 0 + ) + + def provisioning_cleanup(self, hostname, interface='API'): + if interface == 'CLI': + if self.cli.Host.exists(search=('name', hostname)): + self.cli.Host.delete({'name': hostname}) + assert not self.cli.Host.exists(search=('name', hostname)) + else: + host = self.api.Host().search(query={'search': f'name="{hostname}"'}) + if host: + host[0].delete() + assert not self.api.Host().search(query={'search': f'name={hostname}'}) + # Workaround BZ: 2207698 + assert self.cli.Service.restart().status == 0 + + class Factories: """Mixin that provides attributes for each factory type""" @@ -324,3 +366,7 @@ def api_factory(self): if not getattr(self, '_api_factory', None): self._api_factory = APIFactory(self) return self._api_factory + + @lru_cache + def ui_factory(self, session): + return UIFactory(self, session=session) diff --git a/robottelo/host_helpers/ui_factory.py b/robottelo/host_helpers/ui_factory.py new file mode 100644 index 00000000000..df156ad6d6f --- /dev/null +++ b/robottelo/host_helpers/ui_factory.py @@ -0,0 +1,60 @@ +""" +It is not meant to be used directly, but as part of a robottelo.hosts.Satellite instance +Need to pass the existing session object to the ui_factory method as a parameter +example: my_satellite.ui_factory(session).ui_method() +""" +from fauxfactory import gen_string + +from robottelo.constants import DEFAULT_CV, ENVIRONMENT + + +class UIFactory: + """This class is part of a mixin and not to be used directly. See robottelo.hosts.Satellite""" + + def __init__(self, satellite, session=None): + self._satellite = satellite + self._session = session + + def create_fake_host( + self, + host, + interface_id=None, + global_parameters=None, + host_parameters=None, + extra_values=None, + new_host_details=False, + ): + if interface_id is None: + interface_id = gen_string('alpha') + if extra_values is None: + extra_values = {} + os_name = f'{host.operatingsystem.name} {host.operatingsystem.major}' + name = host.name if host.name is not None else gen_string('alpha').lower() + values = { + 'host.name': name, + 'host.organization': host.organization.name, + 'host.location': host.location.name, + 'host.lce': ENVIRONMENT, + 'host.content_view': DEFAULT_CV, + 'operating_system.architecture': host.architecture.name, + 'operating_system.operating_system': os_name, + 'operating_system.media_type': 'All Media', + 'operating_system.media': host.medium.name, + 'operating_system.ptable': host.ptable.name, + 'operating_system.root_password': host.root_pass, + 'interfaces.interface.interface_type': 'Interface', + 'interfaces.interface.device_identifier': interface_id, + 'interfaces.interface.mac': host.mac, + 'interfaces.interface.domain': host.domain.name, + 'interfaces.interface.primary': True, + 'interfaces.interface.interface_additional_data.virtual_nic': False, + 'parameters.global_params': global_parameters, + 'parameters.host_params': host_parameters, + 'additional_information.comment': 'Host with fake data', + } + values.update(extra_values) + if new_host_details: + self._session.host_new.create(values) + else: + self._session.host.create(values) + return f'{name}.{host.domain.name}' diff --git a/robottelo/hosts.py b/robottelo/hosts.py index e261b24ad89..b51eecc15b4 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -1,68 +1,63 @@ +from configparser import ConfigParser +import contextlib +from contextlib import contextmanager +from datetime import datetime +from functools import cached_property, lru_cache import importlib import io import json +from pathlib import Path, PurePath import random import re -import time -from configparser import ConfigParser -from contextlib import contextmanager -from functools import cached_property -from functools import lru_cache -from pathlib import Path -from pathlib import PurePath from tempfile import NamedTemporaryFile -from urllib.parse import urljoin -from urllib.parse import urlparse -from urllib.parse import urlunsplit +import time +from urllib.parse import urljoin, urlparse, urlunsplit -import requests -import yaml from box import Box from broker import Broker from broker.hosts import Host from dynaconf.vendor.box.exceptions import BoxKeyError -from fauxfactory import gen_alpha -from fauxfactory import gen_string +from fauxfactory import gen_alpha, gen_string from manifester import Manifester from nailgun import entities from packaging.version import Version +import requests from ssh2.exceptions import AuthenticationError -from wait_for import TimedOutError -from wait_for import wait_for +from wait_for import TimedOutError, wait_for from wrapanapi.entities.vm import VmState +import yaml from robottelo import constants from robottelo.cli.base import Base -from robottelo.cli.factory import CLIFactoryError -from robottelo.config import configure_airgun -from robottelo.config import configure_nailgun -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings -from robottelo.constants import CUSTOM_PUPPET_MODULE_REPOS -from robottelo.constants import CUSTOM_PUPPET_MODULE_REPOS_PATH -from robottelo.constants import CUSTOM_PUPPET_MODULE_REPOS_VERSION -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import HAMMER_CONFIG -from robottelo.constants import KEY_CLOAK_CLI -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants import RHSSO_NEW_GROUP -from robottelo.constants import RHSSO_NEW_USER -from robottelo.constants import RHSSO_RESET_PASSWORD -from robottelo.constants import RHSSO_USER_UPDATE -from robottelo.constants import SATELLITE_VERSION -from robottelo.exceptions import DownloadFileError -from robottelo.exceptions import HostPingFailed -from robottelo.host_helpers import CapsuleMixins -from robottelo.host_helpers import ContentHostMixins -from robottelo.host_helpers import SatelliteMixins +from robottelo.config import ( + configure_airgun, + configure_nailgun, + robottelo_tmp_dir, + settings, +) +from robottelo.constants import ( + CUSTOM_PUPPET_MODULE_REPOS, + CUSTOM_PUPPET_MODULE_REPOS_PATH, + CUSTOM_PUPPET_MODULE_REPOS_VERSION, + DEFAULT_ARCHITECTURE, + HAMMER_CONFIG, + KEY_CLOAK_CLI, + PRDS, + REPOS, + REPOSET, + RHSSO_NEW_GROUP, + RHSSO_NEW_USER, + RHSSO_RESET_PASSWORD, + RHSSO_USER_UPDATE, + SATELLITE_VERSION, +) +from robottelo.exceptions import CLIFactoryError, DownloadFileError, HostPingFailed +from robottelo.host_helpers import CapsuleMixins, ContentHostMixins, SatelliteMixins from robottelo.logging import logger from robottelo.utils import validate_ssh_pub_key from robottelo.utils.datafactory import valid_emails_list from robottelo.utils.installer import InstallerCommand - POWER_OPERATIONS = { VmState.RUNNING: 'running', VmState.STOPPED: 'stopped', @@ -71,6 +66,18 @@ } +@lru_cache +def lru_sat_ready_rhel(rhel_ver): + rhel_version = rhel_ver or settings.server.version.rhel_version + deploy_args = { + 'deploy_rhel_version': rhel_version, + 'deploy_flavor': settings.flavors.default, + 'promtail_config_template_file': 'config_sat.j2', + 'workflow': settings.server.deploy_workflows.os, + } + return Broker(**deploy_args, host_class=Satellite).checkout() + + def get_sat_version(): """Try to read sat_version from envvar SATELLITE_VERSION if not available fallback to ssh connection to get it.""" @@ -162,6 +169,10 @@ class IPAHostError(Exception): pass +class ProxyHostError(Exception): + pass + + class ContentHost(Host, ContentHostMixins): run = Host.execute default_timeout = settings.server.ssh_client.command_timeout @@ -185,24 +196,59 @@ def __init__(self, hostname, auth=None, **kwargs): self.blank = kwargs.get('blank', False) super().__init__(hostname=hostname, **kwargs) + @classmethod + def get_hosts_from_inventory(cls, filter): + """Get an instance of a host from inventory using a filter""" + inv_hosts = Broker(host_class=cls).from_inventory(filter) + logger.debug('Found %s instances from inventory by filter: %s', len(inv_hosts), filter) + return inv_hosts + + @classmethod + def get_host_by_hostname(cls, hostname): + """Get an instance of a host from inventory by hostname""" + logger.info('Getting %s instance from inventory by hostname: %s', cls.__name__, hostname) + inv_hosts = cls.get_hosts_from_inventory(filter=f'@inv.hostname == "{hostname}"') + if not inv_hosts: + raise ContentHostError(f'No {cls.__name__} found in inventory by hostname {hostname}') + if len(inv_hosts) > 1: + raise ContentHostError( + f'Multiple {cls.__name__} found in inventory by hostname {hostname}' + ) + return inv_hosts[0] + @property def satellite(self): if not self._satellite: self._satellite = Satellite() return self._satellite + @property + def _sat_host_record(self): + """Provide access to this host's Host record if it exists.""" + hosts = self.satellite.api.Host().search(query={'search': self.hostname}) + if not hosts: + logger.debug('No host record found for %s on Satellite', self.hostname) + return None + return hosts[0] + + def _delete_host_record(self): + """Delete the Host record of this host from Satellite.""" + if h_record := self._sat_host_record: + logger.debug('Deleting host record for %s from Satellite', self.hostname) + h_record.delete() + @property def nailgun_host(self): """If this host is subscribed, provide access to its nailgun object""" if self.identity.get('registered_to') == self.satellite.hostname: try: - host_list = self.satellite.api.Host().search(query={'search': self.hostname})[0] + host = self._sat_host_record except Exception as err: logger.error(f'Failed to get nailgun host for {self.hostname}: {err}') - host_list = None - return host_list - else: - logger.warning(f'Host {self.hostname} not registered to {self.satellite.hostname}') + host = None + return host + logger.warning(f'Host {self.hostname} not registered to {self.satellite.hostname}') + return None @property def subscribed(self): @@ -235,50 +281,123 @@ def arch(self): @cached_property def _redhat_release(self): - """Process redhat-release file for distro and version information""" + """Process redhat-release file for distro and version information + This is a fallback for when /etc/os-release is not available + """ result = self.execute('cat /etc/redhat-release') if result.status != 0: raise ContentHostError(f'Not able to cat /etc/redhat-release "{result.stderr}"') - match = re.match(r'(?P.+) release (?P\d+)(.(?P\d+))?', result.stdout) + match = re.match(r'(?P.+) release (?P\d+)(.(?P\d+))?', result.stdout) if match is None: raise ContentHostError(f'Not able to parse release string "{result.stdout}"') - return match.groupdict() + r_release = match.groupdict() + + # /etc/os-release compatibility layer + r_release['VERSION_ID'] = r_release['major'] + # not every release have a minor version + r_release['VERSION_ID'] += f'.{r_release["minor"]}' if r_release['minor'] else '' + + distro_map = { + 'Fedora': {'NAME': 'Fedora Linux', 'ID': 'fedora'}, + 'CentOS': {'ID': 'centos'}, + 'Red Hat Enterprise Linux': {'ID': 'rhel'}, + } + # Use the version map to set the NAME and ID fields + for distro, properties in distro_map.items(): + if distro in r_release['NAME']: + r_release.update(properties) + break + return r_release @cached_property + def _os_release(self): + """Process os-release file for distro and version information""" + facts = {} + regex = r'^(["\'])(.*)(\1)$' + result = self.execute('cat /etc/os-release') + if result.status != 0: + logger.info( + f'Not able to cat /etc/os-release "{result.stderr}", ' + 'falling back to /etc/redhat-release' + ) + return self._redhat_release + for ln in [line for line in result.stdout.splitlines() if line.strip()]: + line = ln.strip() + if line.startswith('#'): + continue + key, value = line.split('=') + if key and value: + facts[key] = re.sub(regex, r'\2', value).replace('\\', '') + return facts + + @property def os_distro(self): """Get host's distro information""" - groups = self._redhat_release - return groups['distro'] + return self._os_release['NAME'] - @cached_property + @property def os_version(self): """Get host's OS version information :returns: A ``packaging.version.Version`` instance """ - groups = self._redhat_release - minor_version = '' if groups['minor'] is None else f'.{groups["minor"]}' - version_string = f'{groups["major"]}{minor_version}' - return Version(version=version_string) + return Version(self._os_release['VERSION_ID']) + + @property + def os_id(self): + """Get host's OS ID information""" + return self._os_release['ID'] + + @cached_property + def is_el(self): + """Boolean representation of whether this host is an EL host""" + return self.execute('stat /etc/redhat-release').status == 0 + + @property + def is_rhel(self): + """Boolean representation of whether this host is a RHEL host""" + return self.os_id == 'rhel' + + @property + def is_centos(self): + """Boolean representation of whether this host is a CentOS host""" + return self.os_id == 'centos' + + def list_cached_properties(self): + """Return a list of cached property names of this class""" + import inspect + + return [ + name + for name, value in inspect.getmembers(self.__class__) + if isinstance(value, cached_property) + ] + + def get_cached_properties(self): + """Return a dictionary of cached properties for this class""" + return {name: getattr(self, name) for name in self.list_cached_properties()} + + def clean_cached_properties(self): + """Delete all cached properties for this class""" + for name in self.list_cached_properties(): + with contextlib.suppress(KeyError): # ignore if property is not cached + del self.__dict__[name] def setup(self): + logger.debug('START: setting up host %s', self) if not self.blank: self.remove_katello_ca() + logger.debug('END: setting up host %s', self) + def teardown(self): + logger.debug('START: tearing down host %s', self) if not self.blank and not getattr(self, '_skip_context_checkin', False): - if self.nailgun_host: - self.nailgun_host.delete() self.unregister() - # Strip most unnecessary attributes from our instance for checkin - keep_keys = set(self.to_dict()) | { - 'release', - '_prov_inst', - '_cont_inst', - '_skip_context_checkin', - } - self.__dict__ = {k: v for k, v in self.__dict__.items() if k in keep_keys} - self.__class__ = Host + if type(self) is not Satellite: # do not delete Satellite's host record + self._delete_host_record() + + logger.debug('END: tearing down host %s', self) def power_control(self, state=VmState.RUNNING, ensure=True): """Lookup the host workflow for power on and execute @@ -292,14 +411,16 @@ def power_control(self, state=VmState.RUNNING, ensure=True): BrokerError: various error types to do with broker execution ContentHostError: if the workflow status isn't successful and broker didn't raise """ + if getattr(self, '_cont_inst', None): + raise NotImplementedError('Power control not supported for container instances') try: vm_operation = POWER_OPERATIONS.get(state) workflow_name = settings.broker.host_workflows.power_control - except (AttributeError, KeyError): + except (AttributeError, KeyError) as err: raise NotImplementedError( 'No workflow in broker.host_workflows for power control, ' 'or VM operation not supported' - ) + ) from err assert ( # TODO read the kwarg name from settings too? Broker() @@ -318,8 +439,23 @@ def power_control(self, state=VmState.RUNNING, ensure=True): self.connect, fail_condition=lambda res: res is not None, handle_exception=True ) # really broad diaper here, but connection exceptions could be a ton of types - except TimedOutError: - raise ContentHostError('Unable to connect to host that should be running') + except TimedOutError as toe: + raise ContentHostError('Unable to connect to host that should be running') from toe + + def wait_for_connection(self, timeout=180): + try: + wait_for( + self.connect, + fail_condition=lambda res: res is not None, + handle_exception=True, + raise_original=True, + timeout=timeout, + delay=1, + ) + except (ConnectionRefusedError, ConnectionAbortedError, TimedOutError) as err: + raise ContentHostError( + f'Unable to establsh SSH connection to host {self} after {timeout} seconds' + ) from err def download_file(self, file_url, local_path=None, file_name=None): """Downloads file from given fileurl to directory specified by local_path by given filename @@ -381,6 +517,7 @@ def enable_repo(self, repo, force=False): downstream_repo = settings.repos.capsule_repo if force or settings.robottelo.cdn or not downstream_repo: return self.execute(f'subscription-manager repos --enable {repo}') + return None def subscription_manager_list_repos(self): return self.execute('subscription-manager repos --list') @@ -391,10 +528,12 @@ def subscription_manager_status(self): def subscription_manager_list(self): return self.execute('subscription-manager list') - def subscription_manager_get_pool(self, sub_list=[]): + def subscription_manager_get_pool(self, sub_list=None): """ Return pool ids for the corresponding subscriptions in the list """ + if sub_list is None: + sub_list = [] pool_ids = [] for sub in sub_list: result = self.execute( @@ -406,10 +545,12 @@ def subscription_manager_get_pool(self, sub_list=[]): pool_ids.append(result) return pool_ids - def subscription_manager_attach_pool(self, pool_list=[]): + def subscription_manager_attach_pool(self, pool_list=None): """ Attach pool ids to the host and return the result """ + if pool_list is None: + pool_list = [] result = [] for pool in pool_list: result.append(self.execute(f'subscription-manager attach --pool={pool}')) @@ -482,8 +623,8 @@ def install_katello_agent(self): # We're in a traditional VM, so goferd should be running after katello-agent install try: wait_for(lambda: self.execute('service goferd status').status == 0) - except TimedOutError: - raise ContentHostError('katello-agent is not running') + except TimedOutError as err: + raise ContentHostError('katello-agent is not running') from err def install_katello_host_tools(self): """Installs Katello host tools on the broker virtual machine @@ -529,6 +670,8 @@ def remove_katello_ca(self): :return: None. :raises robottelo.hosts.ContentHostError: If katello-ca wasn't removed. """ + # unregister host from CDN to avoid subscription leakage + self.execute('subscription-manager unregister') # Not checking the status here, as rpm can be not even installed # and deleting may fail self.execute('yum erase -y $(rpm -qa |grep katello-ca-consumer)') @@ -587,10 +730,10 @@ def register( """Registers content host to the Satellite or Capsule server using a global registration template. - :param target: Satellite or Capusle object to register to, required. - :param org: Organization to register content host for, required. - :param loc: Location to register content host for, required. + :param org: Organization to register content host to. Previously required, pass None to omit + :param loc: Location to register content host for, Previously required, pass None to omit. :param activation_keys: Activation key name to register content host with, required. + :param target: Satellite or Capsule object to register to, required. :param setup_insights: Install and register Insights client, requires OS repo. :param setup_remote_execution: Copy remote execution SSH key. :param setup_remote_execution_pull: Deploy pull provider client on host @@ -609,18 +752,32 @@ def register( """ options = { 'activation-keys': activation_keys, - 'organization-id': org.id, - 'location-id': loc.id, 'insecure': str(insecure).lower(), 'update-packages': str(update_packages).lower(), } + if org is not None: + if isinstance(org, entities.Organization): + options['organization-id'] = org.id + elif isinstance(org, dict): + options['organization-id'] = org['id'] + else: + raise ValueError('org must be a dict or an Organization object') + + if loc is not None: + if isinstance(loc, entities.Location): + options['location-id'] = loc.id + elif isinstance(loc, dict): + options['location-id'] = loc['id'] + else: + raise ValueError('loc must be a dict or a Location object') + if target.__class__.__name__ == 'Capsule': options['smart-proxy'] = target.hostname elif target is not None and target.__class__.__name__ not in ['Capsule', 'Satellite']: raise ValueError('Global registration method can be used with Satellite/Capsule only') if lifecycle_environment is not None: - options['lifecycle_environment_id'] = lifecycle_environment.id + options['lifecycle-environment-id'] = lifecycle_environment.id if operating_system is not None: options['operatingsystem-id'] = operating_system.id if hostgroup is not None: @@ -740,7 +897,7 @@ def put(self, local_path, remote_path=None): If local_path is a manifest object, write its contents to a temporary file then continue with the upload. """ - if 'utils.Manifest' in str(local_path): + if 'utils.manifest' in str(local_path): with NamedTemporaryFile(dir=robottelo_tmp_dir) as content_file: content_file.write(local_path.content.read()) content_file.flush() @@ -840,7 +997,7 @@ def update_known_hosts(self, ssh_key_name, host, user=None): f'Failed to put hostname in ssh known_hosts files:\n{result.stderr}' ) - def configure_puppet(self, proxy_hostname=None): + def configure_puppet(self, proxy_hostname=None, run_puppet_agent=True): """Configures puppet on the virtual machine/Host. :param proxy_hostname: external capsule hostname :return: None. @@ -878,14 +1035,16 @@ def configure_puppet(self, proxy_hostname=None): # sat6 under the capsule --> certifcates or on capsule via cli "puppetserver # ca list", so that we sign it. self.execute('/opt/puppetlabs/bin/puppet agent -t') - proxy_host = Host(proxy_hostname) + proxy_host = Host(hostname=proxy_hostname) proxy_host.execute(f'puppetserver ca sign --certname {cert_name}') - # This particular puppet run would create the host entity under - # 'All Hosts' and let's redirect stderr to /dev/null as errors at - # this stage can be ignored. - result = self.execute('/opt/puppetlabs/bin/puppet agent -t 2> /dev/null') - if result.status: - raise ContentHostError('Failed to configure puppet on the content host') + + if run_puppet_agent: + # This particular puppet run would create the host entity under + # 'All Hosts' and let's redirect stderr to /dev/null as errors at + # this stage can be ignored. + result = self.execute('/opt/puppetlabs/bin/puppet agent -t 2> /dev/null') + if result.status: + raise ContentHostError('Failed to configure puppet on the content host') def execute_foreman_scap_client(self, policy_id=None): """Executes foreman_scap_client on the vm to create security audit report. @@ -911,6 +1070,7 @@ def execute_foreman_scap_client(self, policy_id=None): f'stdout: {result.stdout} host_data_stdout: {data.stdout}, ' f'and host_data_stderr: {data.stderr}' ) + return result.stdout def configure_rex(self, satellite, org, subnet_id=None, by_ip=True, register=True): """Setup a VM host for remote execution. @@ -990,6 +1150,8 @@ def configure_rhai_client( # Register client if self.execute('insights-client --register').status != 0: raise ContentHostError('Unable to register client to Insights through Satellite') + if self.execute('insights-client --test-connection').status != 0: + raise ContentHostError('Test connection failed via insights.') def unregister_insights(self): """Unregister insights client. @@ -1108,18 +1270,19 @@ def virt_who_hypervisor_config( :param bool upload_manifest: whether to upload the organization manifest :param list extra_repos: (Optional) repositories dict options to setup additionally. """ - from robottelo.cli.org import Org - from robottelo.cli import factory as cli_factory - from robottelo.cli.lifecycleenvironment import LifecycleEnvironment - from robottelo.cli.subscription import Subscription - from robottelo.cli.virt_who_config import VirtWhoConfig - org = cli_factory.make_org() if org_id is None else Org.info({'id': org_id}) + org = ( + satellite.cli_factory.make_org() + if org_id is None + else satellite.cli.Org.info({'id': org_id}) + ) if lce_id is None: - lce = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + lce = satellite.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) else: - lce = LifecycleEnvironment.info({'id': lce_id, 'organization-id': org['id']}) + lce = satellite.cli.LifecycleEnvironment.info( + {'id': lce_id, 'organization-id': org['id']} + ) extra_repos = extra_repos or [] repos = [ # Red Hat Satellite Tools @@ -1133,9 +1296,9 @@ def virt_who_hypervisor_config( } ] repos.extend(extra_repos) - content_setup_data = cli_factory.setup_cdn_and_custom_repos_content( - org['id'], - lce['id'], + content_setup_data = satellite.cli_factory.setup_cdn_and_custom_repos_content( + org[id], + lce[id], repos, upload_manifest=upload_manifest, rh_subscriptions=[constants.DEFAULT_SUBSCRIPTION_NAME], @@ -1178,7 +1341,7 @@ def virt_who_hypervisor_config( # create the virt-who directory on satellite satellite = Satellite() satellite.execute(f'mkdir -p {virt_who_deploy_directory}') - VirtWhoConfig.fetch({'id': config_id, 'output': virt_who_deploy_file}) + satellite.cli.VirtWhoConfig.fetch({'id': config_id, 'output': virt_who_deploy_file}) # remote_copy from satellite to self satellite.session.remote_copy(virt_who_deploy_file, self) @@ -1244,7 +1407,9 @@ def virt_who_hypervisor_config( virt_who_hypervisor_host = org_hosts[0] subscription_id = None if hypervisor_hostname and subscription_name: - subscriptions = Subscription.list({'organization-id': org_id}, per_page=False) + subscriptions = satellite.cli.Subscription.list( + {'organization-id': org_id}, per_page=False + ) for subscription in subscriptions: if subscription['name'] == subscription_name: subscription_id = subscription['id'] @@ -1306,8 +1471,10 @@ def install_tracer(self): raise ContentHostError('There was an error installing katello-host-tools-tracer') self.execute('katello-tracer-upload') - def register_to_cdn(self, pool_ids=[settings.subscription.rhn_poolid]): + def register_to_cdn(self, pool_ids=None): """Subscribe satellite to CDN""" + if pool_ids is None: + pool_ids = [settings.subscription.rhn_poolid] self.remove_katello_ca() cmd_result = self.register_contenthost( org=None, @@ -1346,6 +1513,8 @@ def update_host_location(self, location): class Capsule(ContentHost, CapsuleMixins): rex_key_path = '~foreman-proxy/.ssh/id_rsa_foreman_proxy.pub' + product_rpm_name = 'satellite-capsule' + upstream_rpm_name = 'foreman-proxy' @property def nailgun_capsule(self): @@ -1364,12 +1533,9 @@ def satellite(self): answers = Box(yaml.load(data, yaml.FullLoader)) sat_hostname = urlparse(answers.foreman_proxy.foreman_base_url).netloc # get the Satellite hostname from the answer file - hosts = Broker(host_class=Satellite).from_inventory( - filter=f'@inv.hostname == "{sat_hostname}"' - ) - if hosts: - self._satellite = hosts[0] - else: + try: + self._satellite = Satellite.get_host_by_hostname(sat_hostname) + except ContentHostError: logger.debug( f'No Satellite host found in inventory for {self.hostname}. ' 'Satellite object with the same hostname will be created anyway.' @@ -1392,15 +1558,25 @@ def is_upstream(self): :return: True if no downstream satellite RPMS are installed :rtype: bool """ - return self.execute('rpm -q satellite-capsule &>/dev/null').status != 0 + return self.execute(f'rpm -q {self.product_rpm_name}').status != 0 + + @cached_property + def is_stream(self): + """Check if the Capsule is a stream release or not + + :return: True if the Capsule is a stream release + :rtype: bool + """ + if self.is_upstream: + return False + return ( + 'stream' in self.execute(f'rpm -q --qf "%{{RELEASE}}" {self.product_rpm_name}').stdout + ) @cached_property def version(self): - if not self.is_upstream: - version = self.execute('rpm -q satellite-capsule').stdout - return 'stream' if 'stream' in version else version.split('-')[2] - else: - return 'upstream' + rpm_name = self.upstream_rpm_name if self.is_upstream else self.product_rpm_name + return self.execute(f'rpm -q --qf "%{{VERSION}}" {rpm_name}').stdout @cached_property def url(self): @@ -1429,6 +1605,7 @@ def check_services(self): for line in result.stdout.splitlines(): if error_msg in line: return line.replace(error_msg, '').strip() + return None def install(self, installer_obj=None, cmd_args=None, cmd_kwargs=None): """General purpose installer""" @@ -1442,7 +1619,22 @@ def get_features(self): """Get capsule features""" return requests.get(f'https://{self.hostname}:9090/features', verify=False).text - def capsule_setup(self, sat_host=None, **installer_kwargs): + def enable_capsule_downstream_repos(self): + """Enable CDN repos and capsule downstream repos on Capsule Host""" + # CDN Repos + self.register_to_cdn() + for repo in getattr(constants, f"OHSNAP_RHEL{self.os_version.major}_REPOS"): + result = self.enable_repo(repo, force=True) + if result.status: + raise CapsuleHostError(f'Repo enable at capsule host failed\n{result.stdout}') + # Downstream Capsule specific Repos + self.download_repofile( + product='capsule', + release=settings.capsule.version.release, + snap=settings.capsule.version.snap, + ) + + def capsule_setup(self, sat_host=None, capsule_cert_opts=None, **installer_kwargs): """Prepare the host and run the capsule installer""" self._satellite = sat_host or Satellite() @@ -1471,7 +1663,9 @@ def capsule_setup(self, sat_host=None, **installer_kwargs): raise CapsuleHostError(f'The satellite-capsule package was not found\n{result.stdout}') # Generate certificate, copy it to Capsule, run installer, check it succeeds - certs_tar, _, installer = self.satellite.capsule_certs_generate(self, **installer_kwargs) + if not capsule_cert_opts: + capsule_cert_opts = {} + certs_tar, _, installer = self.satellite.capsule_certs_generate(self, **capsule_cert_opts) self.satellite.session.remote_copy(certs_tar, self) installer.update(**installer_kwargs) result = self.install(installer) @@ -1490,14 +1684,22 @@ def capsule_setup(self, sat_host=None, **installer_kwargs): f'A core service is not running at capsule host\n{result.stdout}' ) + def update_download_policy(self, policy): + """Updates capsule's download policy to desired value""" + proxy = self.nailgun_smart_proxy.read() + proxy.download_policy = policy + proxy.update(['download_policy']) + def set_rex_script_mode_provider(self, mode='ssh'): """Set provider for remote execution script mode. One of: ssh(default), pull-mqtt, ssh-async""" - installer_opts = { - 'foreman-proxy-templates': 'true', - 'foreman-proxy-registration': 'true', - 'foreman-proxy-plugin-remote-execution-script-mode': mode, - } + + installer_opts = {'foreman-proxy-plugin-remote-execution-script-mode': mode} + + if self.__class__.__name__ == 'Capsule': + installer_opts['foreman-proxy-templates'] = 'true' + installer_opts['foreman-proxy-registration'] = 'true' + enable_mqtt_command = InstallerCommand( installer_opts=installer_opts, ) @@ -1508,6 +1710,22 @@ def set_rex_script_mode_provider(self, mode='ssh'): if result.status != 0: raise SatelliteHostError(f'Failed to enable pull provider: {result.stdout}') + def run_installer_arg(self, *args, timeout='20m'): + """Run an installer argument on capsule""" + installer_args = list(args) + installer_command = InstallerCommand( + installer_args=installer_args, + ) + result = self.execute( + installer_command.get_command(), + timeout=timeout, + ) + if result.status != 0: + raise SatelliteHostError( + f'Failed to execute with arguments: {installer_args} and,' + f' the stderr is {result.stderr}' + ) + def set_mqtt_resend_interval(self, value): """Set the time interval in seconds at which the notification should be re-sent to the mqtt host until the job is picked up or cancelled""" @@ -1547,10 +1765,14 @@ def cli(self): except AttributeError: # not everything has an mro method, we don't care about them pass + self._cli._configured = True return self._cli class Satellite(Capsule, SatelliteMixins): + product_rpm_name = 'satellite' + upstream_rpm_name = 'foreman' + def __init__(self, hostname=None, **kwargs): hostname = hostname or settings.server.hostname # instance attr set by broker.Host self.omitting_credentials = False @@ -1559,6 +1781,19 @@ def __init__(self, hostname=None, **kwargs): # create dummy classes for later population self._api = type('api', (), {'_configured': False}) self._cli = type('cli', (), {'_configured': False}) + self.record_property = None + + def _swap_nailgun(self, new_version): + """Install a different version of nailgun from GitHub and invalidate the module cache.""" + import sys + + from pip._internal import main as pip_main + + pip_main(['uninstall', '-y', 'nailgun']) + pip_main(['install', f'https://github.com/SatelliteQE/nailgun/archive/{new_version}.zip']) + self._api = type('api', (), {'_configured': False}) + to_clear = [k for k in sys.modules if 'nailgun' in k] + [sys.modules.pop(k) for k in to_clear] @property def api(self): @@ -1567,7 +1802,7 @@ def api(self): self._api = type('api', (), {'_configured': False}) if self._api._configured: return self._api - + from nailgun import entities as _entities # use a private import from nailgun.config import ServerConfig from nailgun.entity_mixins import Entity @@ -1584,10 +1819,10 @@ class DecClass(cls): self.nailgun_cfg = ServerConfig( auth=(settings.server.admin_username, settings.server.admin_password), url=f'{self.url}', - verify=False, + verify=settings.server.verify_ca, ) # add each nailgun entity to self.api, injecting our server config - for name, obj in entities.__dict__.items(): + for name, obj in _entities.__dict__.items(): try: if Entity in obj.mro(): # create a copy of the class and inject our server config into the __init__ @@ -1596,6 +1831,7 @@ class DecClass(cls): except AttributeError: # not everything has an mro method, we don't care about them pass + self._api._configured = True return self._api @property @@ -1625,6 +1861,7 @@ def cli(self): except AttributeError: # not everything has an mro method, we don't care about them pass + self._cli._configured = True return self._cli @contextmanager @@ -1633,6 +1870,7 @@ def omit_credentials(self): yield self.omitting_credentials = False + @contextmanager def ui_session(self, testname=None, user=None, password=None, url=None, login=True): """Initialize an airgun Session object and store it as self.ui_session""" @@ -1644,15 +1882,27 @@ def get_caller(): for frame in inspect.stack(): if frame.function.startswith('test_'): return frame.function + return None - return Session( - session_name=testname or get_caller(), - user=user or settings.server.admin_username, - password=password or settings.server.admin_password, - url=url, - hostname=self.hostname, - login=login, - ) + try: + ui_session = Session( + session_name=testname or get_caller(), + user=user or settings.server.admin_username, + password=password or settings.server.admin_password, + url=url, + hostname=self.hostname, + login=login, + ) + yield ui_session + except Exception: + raise + finally: + if self.record_property is not None and settings.ui.record_video: + video_url = settings.ui.grid_url.replace( + ':4444', f'/videos/{ui_session.ui_session_id}/video.mp4' + ) + self.record_property('video_url', video_url) + self.record_property('session_id', ui_session.ui_session_id) @property def satellite(self): @@ -1661,28 +1911,24 @@ def satellite(self): return self return self._satellite - @cached_property - def is_upstream(self): - """Figure out which product distribution is installed on the server. - - :return: True if no downstream satellite RPMS are installed - :rtype: bool - """ - return self.execute('rpm -q satellite &>/dev/null').status != 0 - - @cached_property - def version(self): - if not self.is_upstream: - version = self.execute('rpm -q satellite').stdout - return 'stream' if 'stream' in version else version.split('-')[1] - else: - return 'upstream' - def is_remote_db(self): return ( self.execute(f'grep "db_manage: false" {constants.SATELLITE_ANSWER_FILE}').status == 0 ) + def setup_firewall(self): + # Setups firewall on Satellite + assert ( + self.execute( + command='firewall-cmd --add-port="53/udp" --add-port="53/tcp" --add-port="67/udp" ' + '--add-port="69/udp" --add-port="80/tcp" --add-port="443/tcp" ' + '--add-port="5647/tcp" --add-port="8000/tcp" --add-port="9090/tcp" ' + '--add-port="8140/tcp"' + ).status + == 0 + ) + assert self.execute(command='firewall-cmd --runtime-to-permanent').status == 0 + def capsule_certs_generate(self, capsule, cert_path=None, **extra_kwargs): """Generate capsule certs, returning the cert path, installer command stdout and args""" cert_file_path = cert_path or f'/root/{capsule.hostname}-certs.tar' @@ -1698,6 +1944,11 @@ def capsule_certs_generate(self, capsule, cert_path=None, **extra_kwargs): install_cmd = InstallerCommand.from_cmd_str(cmd_str=result.stdout) return cert_file_path, result, install_cmd + def load_remote_yaml_file(self, file_path): + """Load a remote yaml file and return a Box object""" + data = self.session.sftp_read(file_path, return_data=True) + return Box(yaml.load(data, yaml.FullLoader)) + def __enter__(self): """Satellite objects can be used as a context manager to temporarily force everything to use the Satellite object's hostname. @@ -1773,7 +2024,7 @@ def delete_puppet_class(self, puppetclass_name): for hostgroup in puppet_class.read().hostgroup: hostgroup.delete_puppetclass(data={'puppetclass_id': puppet_class.id}) # Search and remove puppet class from affected hosts - for host in self.api.Host().search(query={'search': f'class={puppet_class.name}'}): + for host in self.api.Host(puppetclass=f'{puppet_class.name}').search(): host.delete_puppetclass(data={'puppetclass_id': puppet_class.id}) # Remove puppet class entity puppet_class.delete() @@ -1835,7 +2086,7 @@ def register_host_custom_repo(self, module_org, rhel_contenthost, repo_urls): """Register content host to Satellite and sync repos :param module_org: Org where contenthost will be registered. - :param rhel_contenthost: contenthost to be register with Satellite. + :param rhel_contenthost: contenthost to be registered with Satellite. :param repo_urls: List of URLs to be synced and made available to contenthost via subscription-manager. :return: None @@ -1895,6 +2146,9 @@ def register_host_custom_repo(self, module_org, rhel_contenthost, repo_urls): # refresh repository metadata on the host rhel_contenthost.execute('subscription-manager repos --list') + # Override the repos to enabled + rhel_contenthost.execute(r'subscription-manager repos --enable \*') + def enroll_ad_and_configure_external_auth(self, ad_data): """Enroll Satellite Server to an AD Server. @@ -2007,6 +2261,37 @@ def enroll_ad_and_configure_external_auth(self, ad_data): self.execute('systemctl daemon-reload && systemctl restart httpd.service').status == 0 ) + def generate_inventory_report(self, org): + """Function to perform inventory upload.""" + generate_report_task = 'ForemanInventoryUpload::Async::UploadReportJob' + timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') + self.api.Organization(id=org.id).rh_cloud_generate_report() + wait_for( + lambda: self.api.ForemanTask() + .search(query={'search': f'{generate_report_task} and started_at >= "{timestamp}"'})[0] + .result + == 'success', + timeout=400, + delay=15, + silent_failure=True, + handle_exception=True, + ) + + def sync_inventory_status(self, org): + """Perform inventory sync""" + inventory_sync = self.api.Organization(id=org.id).rh_cloud_inventory_sync() + wait_for( + lambda: self.api.ForemanTask() + .search(query={'search': f'id = {inventory_sync["task"]["id"]}'})[0] + .result + == 'success', + timeout=400, + delay=15, + silent_failure=True, + handle_exception=True, + ) + return inventory_sync + class SSOHost(Host): """Class for RHSSO functions and setup""" @@ -2054,9 +2339,16 @@ def get_rhsso_groups_details(self, group_name): return query_group[0] def upload_rhsso_entity(self, json_content, entity_name): - """Helper method upload the entity json request as file on RHSSO Server""" + """Helper method to upload the RHSSO entity file on RHSSO Server. + Overwrites already existing file with the same name. + """ with open(entity_name, "w") as file: json.dump(json_content, file) + # Before uploading a file, remove the file of the same name. In sftp_write, + # if uploading a file of length n when there was already uploaded a file with + # the same name of length m, for n max_len: raise ValueError(f'length is too large, max: {max_len}') - names = { + return { 'alphanumeric': DOMAIN % gen_string('alphanumeric', length), 'alpha': DOMAIN % gen_string('alpha', length), 'numeric': DOMAIN % gen_string('numeric', length), 'latin1': DOMAIN % gen_string('latin1', length), 'utf8': DOMAIN % gen_utf8(length), } - return names @filtered_datapoint @@ -248,8 +241,8 @@ def invalid_values_list(interface=None): raise InvalidArgumentError('Valid interface values are api, cli, ui only') if interface == 'ui': return ['', ' '] + invalid_names_list() - else: # interface = api or cli or None - return ['', ' ', '\t'] + invalid_names_list() + # else: interface = api or cli or None + return ['', ' ', '\t'] + invalid_names_list() @filtered_datapoint @@ -279,7 +272,7 @@ def valid_data_list(interface=None): @filtered_datapoint def valid_docker_repository_names(): """Generates a list of valid names for Docker repository.""" - names = [ + return [ gen_string('alphanumeric', random.randint(1, 255)), gen_string('alpha', random.randint(1, 255)), gen_string('cjk', random.randint(1, 85)), @@ -288,7 +281,6 @@ def valid_docker_repository_names(): gen_string('utf8', random.randint(1, 85)), gen_string('html', random.randint(1, 85)), ] - return names @filtered_datapoint @@ -517,8 +509,7 @@ def valid_http_credentials(url_encoded=False): } for cred in credentials ] - else: - return credentials + return credentials def invalid_http_credentials(url_encoded=False): @@ -540,8 +531,7 @@ def invalid_http_credentials(url_encoded=False): } for cred in credentials ] - else: - return credentials + return credentials @filtered_datapoint diff --git a/robottelo/utils/decorators/__init__.py b/robottelo/utils/decorators/__init__.py index 1d79cba7a26..371afd50f16 100644 --- a/robottelo/utils/decorators/__init__.py +++ b/robottelo/utils/decorators/__init__.py @@ -1,7 +1,6 @@ """Implements various decorators""" from functools import wraps - OBJECT_CACHE = {} diff --git a/robottelo/utils/decorators/func_locker.py b/robottelo/utils/decorators/func_locker.py index 723c4be1ff1..08f4073c614 100644 --- a/robottelo/utils/decorators/func_locker.py +++ b/robottelo/utils/decorators/func_locker.py @@ -39,11 +39,11 @@ def test_that_conflict_with_test_to_lock(self) with locking_function(self.test_to_lock): # do some operations that conflict with test_to_lock """ +from contextlib import contextmanager import functools import inspect import os import tempfile -from contextlib import contextmanager from pytest_services.locks import file_lock @@ -112,10 +112,7 @@ def _get_scope_path(scope, scope_kwargs=None, scope_context=None, create=True): scope_path_list = [_get_temp_lock_function_dir(create=create)] if scope: - if callable(scope): - scope_dir_name = scope(**scope_kwargs) - else: - scope_dir_name = scope + scope_dir_name = scope(**scope_kwargs) if callable(scope) else scope if scope_dir_name: scope_path_list.append(scope_dir_name) if scope_context: @@ -168,8 +165,8 @@ def _check_deadlock(lock_file_path, process_id): """ if os.path.exists(lock_file_path): try: - lock_file_handler = open(lock_file_path) - lock_file_content = lock_file_handler.read() + with open(lock_file_path) as lock_file_handler: + lock_file_content = lock_file_handler.read() except OSError as exp: # do nothing, but anyway log the exception logger.exception(exp) @@ -227,8 +224,8 @@ def lock_function( def main_wrapper(func): - setattr(func, '__class_name__', class_name) - setattr(func, '__function_locked__', True) + func.__class_name__ = class_name + func.__function_locked__ = True @functools.wraps(func) def function_wrapper(*args, **kwargs): @@ -265,8 +262,7 @@ def wait_function(func): if function: return main_wrapper(function) - else: - return wait_function + return wait_function @contextmanager diff --git a/robottelo/utils/decorators/func_shared/shared.py b/robottelo/utils/decorators/func_shared/shared.py index 568f6adf212..73961df50b5 100644 --- a/robottelo/utils/decorators/func_shared/shared.py +++ b/robottelo/utils/decorators/func_shared/shared.py @@ -87,24 +87,21 @@ def shared_class_method(cls, org=None, repo=None): import datetime import functools import hashlib +from importlib import import_module import inspect import os import sys import traceback import uuid -from importlib import import_module from nailgun.entities import Entity -from robottelo.config import setting_is_set -from robottelo.config import settings +from robottelo.config import setting_is_set, settings from robottelo.logging import logger -from robottelo.utils.decorators.func_shared import file_storage -from robottelo.utils.decorators.func_shared import redis_storage +from robottelo.utils.decorators.func_shared import file_storage, redis_storage from robottelo.utils.decorators.func_shared.file_storage import FileStorageHandler from robottelo.utils.decorators.func_shared.redis_storage import RedisStorageHandler - _storage_handlers = {'file': FileStorageHandler, 'redis': RedisStorageHandler} DEFAULT_STORAGE_HANDLER = 'file' @@ -395,8 +392,7 @@ def __call__(self): # if was not able to restore the original exception raise this one raise SharedFunctionException( - 'Error generated by process: {} Exception: {}' - ' error: {}'.format(pid, error_class_name, error) + f'Error generated by process: {pid} Exception: {error_class_name} error: {error}' ) if not call_function and self._inject: @@ -569,5 +565,4 @@ def wait_function(func): if function_: return main_wrapper(function_) - else: - return wait_function + return wait_function diff --git a/robottelo/utils/io/__init__.py b/robottelo/utils/io/__init__.py index 2a00c546720..ac23b5e83b5 100644 --- a/robottelo/utils/io/__init__.py +++ b/robottelo/utils/io/__init__.py @@ -1,8 +1,8 @@ # Helper methods for tests requiring I/0 import hashlib import json -import tarfile from pathlib import Path +import tarfile def get_local_file_data(path): @@ -82,6 +82,7 @@ def get_remote_report_checksum(satellite, org_id): continue checksum, _ = result.stdout.split(maxsplit=1) return checksum + return None def get_report_data(report_path): diff --git a/robottelo/utils/issue_handlers/__init__.py b/robottelo/utils/issue_handlers/__init__.py index d48c1948f24..d59c97aec63 100644 --- a/robottelo/utils/issue_handlers/__init__.py +++ b/robottelo/utils/issue_handlers/__init__.py @@ -1,9 +1,8 @@ # Methods related to issue handlers in general from robottelo.utils.issue_handlers import bugzilla - handler_methods = {'BZ': bugzilla.is_open_bz} -SUPPORTED_HANDLERS = tuple(f"{handler}:" for handler in handler_methods.keys()) +SUPPORTED_HANDLERS = tuple(f"{handler}:" for handler in handler_methods) def add_workaround(data, matches, usage, validation=(lambda *a, **k: True), **kwargs): @@ -18,10 +17,11 @@ def should_deselect(issue, data=None): """Check if test should be deselected based on marked issue.""" # Handlers can be extended to support different issue trackers. handlers = {'BZ': bugzilla.should_deselect_bz} - supported_handlers = tuple(f"{handler}:" for handler in handlers.keys()) + supported_handlers = tuple(f"{handler}:" for handler in handlers) if str(issue).startswith(supported_handlers): handler_code = str(issue).partition(":")[0] return handlers[handler_code.strip()](issue.strip(), data) + return None def is_open(issue, data=None): diff --git a/robottelo/utils/issue_handlers/bugzilla.py b/robottelo/utils/issue_handlers/bugzilla.py index 6fca11eb894..20836a3660d 100644 --- a/robottelo/utils/issue_handlers/bugzilla.py +++ b/robottelo/utils/issue_handlers/bugzilla.py @@ -1,21 +1,16 @@ -import re from collections import defaultdict +import re +from packaging.version import Version import pytest import requests -from packaging.version import Version -from tenacity import retry -from tenacity import stop_after_attempt -from tenacity import wait_fixed +from tenacity import retry, stop_after_attempt, wait_fixed from robottelo.config import settings -from robottelo.constants import CLOSED_STATUSES -from robottelo.constants import OPEN_STATUSES -from robottelo.constants import WONTFIX_RESOLUTIONS +from robottelo.constants import CLOSED_STATUSES, OPEN_STATUSES, WONTFIX_RESOLUTIONS from robottelo.hosts import get_sat_version from robottelo.logging import logger - # match any version as in `sat-6.2.x` or `sat-6.2.0` or `6.2.9` # The .version group being a `d.d` string that can be casted to Version() VERSION_RE = re.compile(r'(?:sat-)*?(?P\d\.\d)\.\w*') diff --git a/robottelo/utils/manifest.py b/robottelo/utils/manifest.py index 58539b6cb36..52c220f3c59 100644 --- a/robottelo/utils/manifest.py +++ b/robottelo/utils/manifest.py @@ -4,11 +4,10 @@ import uuid import zipfile -import requests from cryptography.hazmat.backends import default_backend as crypto_default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import serialization as crypto_serialization +from cryptography.hazmat.primitives import hashes, serialization as crypto_serialization from cryptography.hazmat.primitives.asymmetric import padding +import requests from robottelo.config import settings diff --git a/robottelo/utils/ohsnap.py b/robottelo/utils/ohsnap.py index cc6d551c70c..96241a759b6 100644 --- a/robottelo/utils/ohsnap.py +++ b/robottelo/utils/ohsnap.py @@ -1,12 +1,11 @@ """Utility module to communicate with Ohsnap API""" -import requests from box import Box from packaging.version import Version +import requests from wait_for import wait_for from robottelo import constants -from robottelo.exceptions import InvalidArgumentError -from robottelo.exceptions import RepositoryDataNotFound +from robottelo.exceptions import InvalidArgumentError, RepositoryDataNotFound from robottelo.logging import logger @@ -62,7 +61,6 @@ def ohsnap_repo_url(ohsnap, request_type, product, release, os_release, snap='') else: logger.warning(f'Ohsnap returned no releases for the given stream: {release}') - release = '.'.join(release.split('.')[:3]) # keep only major.minor.patch logger.debug(f'Release string after processing: {release}') return ( f'{ohsnap.host}/api/releases/' @@ -92,7 +90,9 @@ def dogfood_repository( try: repository = next(r for r in res.json() if r['label'] == repo) except StopIteration: - raise RepositoryDataNotFound(f'Repository "{repo}" is not provided by the given product') + raise RepositoryDataNotFound( + f'Repository "{repo}" is not provided by the given product' + ) from None repository['baseurl'] = repository['baseurl'].replace('$basearch', arch) # If repo check is enabled, check that the repository actually exists on the remote server dogfood_req = requests.get(repository['baseurl']) @@ -103,3 +103,26 @@ def dogfood_repository( f'{product=}, {release=}, {os_release=}, {snap=}' ) return Box(**repository) + + +def ohsnap_snap_rpms(ohsnap, sat_version, snap_version, os_major, is_all=True): + sat_xy = '.'.join(sat_version.split('.')[:2]) + url = f'{ohsnap.host}/api/releases/{sat_version}/snaps/{snap_version}/rpms' + if is_all: + url += '?all=true' + res, _ = wait_for( + lambda: requests.get(url, hooks={'response': ohsnap_response_hook}), + handle_exception=True, + raise_original=True, + timeout=ohsnap.request_retry.timeout, + delay=ohsnap.request_retry.delay, + ) + rpms = [] + rpm_repos = [f'satellite {sat_xy}', f'maintenance {sat_xy}'] + if res.status_code == 200: + for repo_data in res.json(): + if repo_data['rhel'] == os_major and any( + repo in repo_data['repository'].lower() for repo in rpm_repos + ): + rpms += repo_data['rpms'] + return rpms diff --git a/robottelo/utils/report_portal/portal.py b/robottelo/utils/report_portal/portal.py index bf377d52f20..c7e0df5d54e 100644 --- a/robottelo/utils/report_portal/portal.py +++ b/robottelo/utils/report_portal/portal.py @@ -1,7 +1,5 @@ import requests -from tenacity import retry -from tenacity import stop_after_attempt -from tenacity import wait_fixed +from tenacity import retry, stop_after_attempt, wait_fixed from robottelo.config import settings from robottelo.logging import logger @@ -103,10 +101,9 @@ def get_launches( resp.raise_for_status() # this should further filter out unfinished launches as RP API currently doesn't # support usage of the same filter type multiple times (filter.ne.status) - launches = [ + return [ launch for launch in resp.json()['content'] if launch['status'] not in ['INTERRUPTED'] ] - return launches @retry( stop=stop_after_attempt(6), diff --git a/robottelo/utils/shared_resource.py b/robottelo/utils/shared_resource.py new file mode 100644 index 00000000000..0ad0bd92e46 --- /dev/null +++ b/robottelo/utils/shared_resource.py @@ -0,0 +1,199 @@ +"""Allow multiple processes to communicate status on a single shared resource. + +This is useful for cases where multiple processes need to wait for all other processes to be ready +before continuing with some common action. The most common use case in this framework will likely +be to wait for all pre-upgrade setups to be ready before performing the upgrade. + +The system works by creating a file in /tmp with the name of the resource. This is a common file +where each process can communicate its status. The first process to register will be the main +watcher. The main watcher will wait for all other processes to be ready, then perform the action. +If the main actor fails to complete the action, and the action is recoverable, another process +will take over as the main watcher and attempt to perform the action. If the action is not +recoverable, the main watcher will fail and release all other processes. + +It is recommended to use this class as a context manager, as it will automatically register and +report when the process is done. + +Example: + >>> with SharedResource("target_sat.hostname", upgrade_action, **upgrade_kwargs) as resource: + ... # Do pre-upgrade setup steps + ... resource.ready() # tell the other processes that we are ready + ... yield target_sat # give the upgraded satellite to the test + ... # Do post-upgrade cleanup steps if any +""" +import json +from pathlib import Path +import time +from uuid import uuid4 + +from broker.helpers import FileLock + + +class SharedResource: + """A class representing a shared resource. + + Attributes: + action (function): The function to be executed when the resource is ready. + action_args (tuple): The arguments to be passed to the action function. + action_kwargs (dict): The keyword arguments to be passed to the action function. + action_is_recoverable (bool): Whether the action is recoverable or not. + id (str): The unique identifier of the shared resource. + resource_file (Path): The path to the file representing the shared resource. + is_main (bool): Whether the current instance is the main watcher or not. + is_recovering (bool): Whether the current instance is recovering from an error or not. + """ + + def __init__(self, resource_name, action, *action_args, **action_kwargs): + """Initializes a new instance of the SharedResource class. + + Args: + resource_name (str): The name of the shared resource. + action (function): The function to be executed when the resource is ready. + action_args (tuple): The arguments to be passed to the action function. + action_kwargs (dict): The keyword arguments to be passed to the action function. + """ + self.resource_file = Path(f"/tmp/{resource_name}.shared") + self.lock_file = FileLock(self.resource_file) + self.id = str(uuid4().fields[-1]) + self.action = action + self.action_is_recoverable = action_kwargs.pop("action_is_recoverable", False) + self.action_args = action_args + self.action_kwargs = action_kwargs + self.is_recovering = False + + def _update_status(self, status): + """Updates the status of the shared resource. + + Args: + status (str): The new status of the shared resource. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + curr_data["statuses"][self.id] = status + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def _update_main_status(self, status): + """Updates the main status of the shared resource. + + Args: + status (str): The new main status of the shared resource. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + curr_data["main_status"] = status + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def _check_all_status(self, status): + """Checks if all watchers have the specified status. + + Args: + status (str): The status to check for. + + Returns: + bool: True if all watchers have the specified status, False otherwise. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + for watcher_id in curr_data["watchers"]: + if curr_data["statuses"].get(watcher_id) != status: + return False + return True + + def _wait_for_status(self, status): + """Waits until all watchers have the specified status. + + Args: + status (str): The status to wait for. + """ + while not self._check_all_status(status): + time.sleep(1) + + def _wait_for_main_watcher(self): + """Waits for the main watcher to finish.""" + while True: + curr_data = json.loads(self.resource_file.read_text()) + if curr_data["main_status"] != "done": + time.sleep(60) + elif curr_data["main_status"] == "action_error": + self._try_take_over() + elif curr_data["main_status"] == "error": + raise Exception(f"Error in main watcher: {curr_data['main_watcher']}") + else: + break + + def _try_take_over(self): + """Tries to take over as the main watcher.""" + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + if curr_data["main_status"] in ("action_error", "error"): + curr_data["main_status"] = "recovering" + curr_data["main_watcher"] = self.id + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + self.is_main = True + self.is_recovering = True + self.wait() + + def register(self): + """Registers the current process as a watcher.""" + with self.lock_file: + if self.resource_file.exists(): + curr_data = json.loads(self.resource_file.read_text()) + self.is_main = False + else: # First watcher to register, becomes the main watcher, and creates the file + curr_data = { + "watchers": [], + "statuses": {}, + "main_watcher": self.id, + "main_status": "waiting", + } + self.is_main = True + curr_data["watchers"].append(self.id) + curr_data["statuses"][self.id] = "pending" + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def ready(self): + """Marks the current process as ready to perform the action.""" + self._update_status("ready") + self.wait() + + def done(self): + """Marks the current process as done performing post actions.""" + self._update_status("done") + + def act(self): + """Attempt to perform the action.""" + try: + self.action(*self.action_args, **self.action_kwargs) + except Exception as err: + self._update_main_status("error") + raise err + + def wait(self): + """Top-level wait function, separating behavior between main and non-main watchers.""" + if self.is_main and not (self.is_recovering and not self.action_is_recoverable): + self._wait_for_status("ready") + self._update_main_status("acting") + self.act() + self._update_main_status("done") + else: + self._wait_for_main_watcher() + + def __enter__(self): + """Registers the current process as a watcher and returns the instance.""" + self.register() + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Marks the current process as done and updates the main watcher if needed.""" + if exc_type is FileNotFoundError: + raise exc_value + if exc_type is None: + self.done() + if self.is_main: + self._wait_for_status("done") + self.resource_file.unlink() + else: + self._update_status("error") + if self.is_main: + self._update_main_status("error") + raise exc_value diff --git a/robottelo/utils/ssh.py b/robottelo/utils/ssh.py index c82a7ebc071..8b72bed3497 100644 --- a/robottelo/utils/ssh.py +++ b/robottelo/utils/ssh.py @@ -16,13 +16,12 @@ def get_client( from robottelo.config import settings from robottelo.hosts import ContentHost - client = ContentHost( + return ContentHost( hostname=hostname or settings.server.hostname, username=username or settings.server.ssh_username, password=password or settings.server.ssh_password, port=port or settings.server.ssh_client.port, ) - return client def command( diff --git a/robottelo/utils/vault.py b/robottelo/utils/vault.py new file mode 100644 index 00000000000..97f95755bbd --- /dev/null +++ b/robottelo/utils/vault.py @@ -0,0 +1,124 @@ +"""Hashicorp Vault Utils where vault CLI is wrapped to perform vault operations""" +import json +import os +import re +import subprocess +import sys + +from robottelo.exceptions import InvalidVaultURLForOIDC +from robottelo.logging import logger, robottelo_root_dir + + +class Vault: + HELP_TEXT = ( + "Vault CLI in not installed in your system, " + "refer link https://learn.hashicorp.com/tutorials/vault/getting-started-install to " + "install vault CLI as per your system spec!" + ) + + def __init__(self, env_file='.env'): + self.env_path = robottelo_root_dir.joinpath(env_file) + self.envdata = None + self.vault_enabled = None + + def setup(self): + if self.env_path.exists(): + self.envdata = self.env_path.read_text() + is_enabled = re.findall('^(?:.*\n)*VAULT_ENABLED_FOR_DYNACONF=(.*)', self.envdata) + if is_enabled: + self.vault_enabled = is_enabled[0] + self.export_vault_addr() + + def teardown(self): + if os.environ.get('VAULT_ADDR') is not None: + del os.environ['VAULT_ADDR'] + + def export_vault_addr(self): + vaulturl = re.findall('VAULT_URL_FOR_DYNACONF=(.*)', self.envdata)[0] + + # Set Vault CLI Env Var + os.environ['VAULT_ADDR'] = vaulturl + + # Dynaconf Vault Env Vars + if ( + self.vault_enabled + and self.vault_enabled in ['True', 'true'] + and 'localhost:8200' in vaulturl + ): + raise InvalidVaultURLForOIDC( + f"{vaulturl} doesn't support OIDC login," + "please change url to corp vault in env file!" + ) + + def exec_vault_command(self, command: str, **kwargs): + """A wrapper to execute the vault CLI commands + + :param comamnd str: The vault CLI command + :param kwargs dict: Arguments to the subprocess run command to customize the run behavior + """ + vcommand = subprocess.run(command, shell=True, capture_output=True, **kwargs) + if vcommand.returncode != 0: + verror = str(vcommand.stderr) + if vcommand.returncode == 127: + logger.error(f"Error! {self.HELP_TEXT}") + sys.exit(1) + if vcommand.stderr: + if 'Error revoking token' in verror: + logger.info("Token is alredy revoked!") + elif 'Error looking up token' in verror: + logger.info("Vault is not logged in!") + else: + logger.error(f"Error! {verror}") + return vcommand + + def login(self, **kwargs): + if ( + self.vault_enabled + and self.vault_enabled in ['True', 'true'] + and 'VAULT_SECRET_ID_FOR_DYNACONF' not in os.environ + and self.status(**kwargs).returncode != 0 + ): + logger.info( + "Warning! The browser is about to open for vault OIDC login, " + "close the tab once the sign-in is done!" + ) + if ( + self.exec_vault_command(command="vault login -method=oidc", **kwargs).returncode + == 0 + ): + self.exec_vault_command(command="vault token renew -i 10h", **kwargs) + logger.info("Success! Vault OIDC Logged-In and extended for 10 hours!") + # Fetching tokens + token = self.exec_vault_command("vault token lookup --format json").stdout + token = json.loads(str(token.decode('UTF-8')))['data']['id'] + # Setting new token in env file + _envdata = re.sub( + '.*VAULT_TOKEN_FOR_DYNACONF=.*', + f"VAULT_TOKEN_FOR_DYNACONF={token}", + self.envdata, + ) + self.env_path.write_text(_envdata) + logger.info("Success! New OIDC token added to .env file to access secrets from vault!") + + def logout(self): + # Teardown - Setting dymmy token in env file + _envdata = re.sub( + '.*VAULT_TOKEN_FOR_DYNACONF=.*', "# VAULT_TOKEN_FOR_DYNACONF=myroot", self.envdata + ) + self.env_path.write_text(_envdata) + vstatus = self.exec_vault_command('vault token revoke -self') + if vstatus.returncode == 0: + logger.info("Success! OIDC token removed from Env file successfully!") + + def status(self, **kwargs): + vstatus = self.exec_vault_command('vault token lookup', **kwargs) + if vstatus.returncode == 0: + logger.info(str(vstatus.stdout.decode('UTF-8'))) + return vstatus + + def __enter__(self): + self.setup() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.teardown() diff --git a/robottelo/utils/virtwho.py b/robottelo/utils/virtwho.py index b53480265b1..b1cac0012ca 100644 --- a/robottelo/utils/virtwho.py +++ b/robottelo/utils/virtwho.py @@ -3,11 +3,9 @@ import re import uuid -import requests -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_url +from fauxfactory import gen_integer, gen_string, gen_url from nailgun import entities +import requests from wait_for import wait_for from robottelo import ssh @@ -49,16 +47,15 @@ def get_system(system_type): 'password': getattr(settings.virtwho, system_type).guest_password, 'port': getattr(settings.virtwho, system_type).guest_port, } - elif system_type == 'satellite': + if system_type == 'satellite': return { 'hostname': settings.server.hostname, 'username': settings.server.ssh_username, 'password': settings.server.ssh_password, } - else: - raise VirtWhoError( - f'"{system_type}" system type is not supported. Please use one of {system_type_list}' - ) + raise VirtWhoError( + f'"{system_type}" system type is not supported. Please use one of {system_type_list}' + ) def get_guest_info(hypervisor_type): @@ -117,8 +114,7 @@ def register_system(system, activation_key=None, org='Default_Organization', env ret, stdout = runcmd(cmd, system) if ret == 0 or "system has been registered" in stdout: return True - else: - raise VirtWhoError(f'Failed to register system: {system}') + raise VirtWhoError(f'Failed to register system: {system}') def virtwho_cleanup(): @@ -152,10 +148,9 @@ def get_virtwho_status(): return 'logerror' if any(key in stdout for key in running_stauts): return 'running' - elif any(key in stdout for key in stopped_status): + if any(key in stdout for key in stopped_status): return 'stopped' - else: - return 'undefined' + return 'undefined' def get_configure_id(name): @@ -166,8 +161,7 @@ def get_configure_id(name): config = VirtWhoConfig.info({'name': name}) if 'id' in config['general-information']: return config['general-information']['id'] - else: - raise VirtWhoError(f"No configure id found for {name}") + raise VirtWhoError(f"No configure id found for {name}") def get_configure_command(config_id, org=DEFAULT_ORG): @@ -200,10 +194,8 @@ def get_configure_option(option, filename): cmd = f"grep -v '^#' {filename} | grep ^{option}" ret, stdout = runcmd(cmd) if ret == 0 and option in stdout: - value = stdout.split('=')[1].strip() - return value - else: - raise VirtWhoError(f"option {option} is not exist or not be enabled in {filename}") + return stdout.split('=')[1].strip() + raise VirtWhoError(f"option {option} is not exist or not be enabled in {filename}") def get_rhsm_log(): @@ -218,7 +210,7 @@ def check_message_in_rhsm_log(message): """Check the message exist in /var/log/rhsm/rhsm.log""" wait_for( lambda: 'Host-to-guest mapping being sent to' in get_rhsm_log(), - timeout=10, + timeout=20, delay=2, ) logs = get_rhsm_log() @@ -238,7 +230,7 @@ def _get_hypervisor_mapping(hypervisor_type): """ wait_for( lambda: 'Host-to-guest mapping being sent to' in get_rhsm_log(), - timeout=10, + timeout=20, delay=2, ) logs = get_rhsm_log() @@ -266,8 +258,7 @@ def _get_hypervisor_mapping(hypervisor_type): break if hypervisor_name: return hypervisor_name, guest_name - else: - raise VirtWhoError(f"Failed to get the hypervisor_name for guest {guest_name}") + raise VirtWhoError(f"Failed to get the hypervisor_name for guest {guest_name}") def get_hypervisor_ahv_mapping(hypervisor_type): @@ -303,9 +294,9 @@ def get_hypervisor_ahv_mapping(hypervisor_type): # Always check the last json section to get the host_uuid for item in mapping: if 'entities' in item: - for item in item['entities']: - if 'host_uuid' in item: - system_uuid = item['host_uuid'] + for _item in item['entities']: + if 'host_uuid' in _item: + system_uuid = _item['host_uuid'] break message = f"Host UUID {system_uuid} found for VM: {guest_uuid}" for line in logs.split('\n'): @@ -350,6 +341,7 @@ def deploy_configure_by_command(command, hypervisor_type, debug=False, org='Defa raise VirtWhoError(f"Failed to deploy configure by {command}") if debug: return deploy_validation(hypervisor_type) + return None def deploy_configure_by_script( @@ -373,6 +365,7 @@ def deploy_configure_by_script( raise VirtWhoError(f"Failed to deploy configure by {script_filename}") if debug: return deploy_validation(hypervisor_type) + return None def deploy_configure_by_command_check(command): @@ -386,13 +379,12 @@ def deploy_configure_by_command_check(command): virtwho_cleanup() try: ret, stdout = runcmd(command) - except Exception: - raise VirtWhoError(f"Failed to deploy configure by {command}") + except Exception as err: + raise VirtWhoError(f"Failed to deploy configure by {command}") from err else: if ret != 0 or 'Finished successfully' not in stdout: raise VirtWhoError(f"Failed to deploy configure by {command}") - else: - return 'Finished successfully' + return 'Finished successfully' def restart_virtwho_service(): @@ -412,7 +404,7 @@ def update_configure_option(option, value, config_file): :param value: set the option to the value :param config_file: path of virt-who config file """ - cmd = 'sed -i "s|^{0}.*|{0}={1}|g" {2}'.format(option, value, config_file) + cmd = f'sed -i "s|^{option}.*|{option}={value}|g" {config_file}' ret, output = runcmd(cmd) if ret != 0: raise VirtWhoError(f"Failed to set option {option} value to {value}") @@ -424,7 +416,7 @@ def delete_configure_option(option, config_file): :param option: the option you want to delete :param config_file: path of virt-who config file """ - cmd = 'sed -i "/^{0}/d" {1}; sed -i "/^#{0}/d" {1}'.format(option, config_file) + cmd = f'sed -i "/^{option}/d" {config_file}; sed -i "/^#{option}/d" {config_file}' ret, output = runcmd(cmd) if ret != 0: raise VirtWhoError(f"Failed to delete option {option}") @@ -439,11 +431,11 @@ def add_configure_option(option, value, config_file): """ try: get_configure_option(option, config_file) - except Exception: + except Exception as err: cmd = f'echo -e "\n{option}={value}" >> {config_file}' - ret, output = runcmd(cmd) + ret, _ = runcmd(cmd) if ret != 0: - raise VirtWhoError(f"Failed to add option {option}={value}") + raise VirtWhoError(f"Failed to add option {option}={value}") from err else: raise VirtWhoError(f"option {option} is already exist in {config_file}") @@ -458,9 +450,9 @@ def hypervisor_json_create(hypervisors, guests): :param guests: how many guests will be created """ hypervisors_list = [] - for i in range(hypervisors): + for _ in range(hypervisors): guest_list = [] - for c in range(guests): + for _ in range(guests): guest_list.append( { "guestId": str(uuid.uuid4()), @@ -471,8 +463,7 @@ def hypervisor_json_create(hypervisors, guests): name = str(uuid.uuid4()) hypervisor = {"guestIds": guest_list, "name": name, "hypervisorId": {"hypervisorId": name}} hypervisors_list.append(hypervisor) - mapping = {"hypervisors": hypervisors_list} - return mapping + return {"hypervisors": hypervisors_list} def create_fake_hypervisor_content(org_label, hypervisors, guests): @@ -508,7 +499,7 @@ def virtwho_package_locked(): assert "Packages are locked" in result[1] -def create_http_proxy(org, name=None, url=None, http_type='https'): +def create_http_proxy(org, location, name=None, url=None, http_type='https'): """ Creat a new http-proxy with attributes. :param name: Name of the proxy @@ -526,5 +517,21 @@ def create_http_proxy(org, name=None, url=None, http_type='https'): name=http_proxy_name, url=http_proxy_url, organization=[org.id], + location=[location.id], ).create() return http_proxy.url, http_proxy.name, http_proxy.id + + +def get_configure_command_option(deploy_type, args, org=DEFAULT_ORG): + """Return the deploy command line based on option. + :param str option: the unique id of the configure file you have created. + :param str org: the satellite organization name. + """ + username, password = Base._get_username_password() + if deploy_type == 'location-id': + return f"hammer -u {username} -p {password} virt-who-config deploy --id {args['id']} --location-id '{args['location-id']}' " + if deploy_type == 'organization-title': + return f"hammer -u {username} -p {password} virt-who-config deploy --id {args['id']} --organization-title '{args['organization-title']}' " + if deploy_type == 'name': + return f"hammer -u {username} -p {password} virt-who-config deploy --name {args['name']} --organization '{org}' " + return None diff --git a/scripts/config_helpers.py b/scripts/config_helpers.py index 6176fa1cb5e..cf422588b05 100644 --- a/scripts/config_helpers.py +++ b/scripts/config_helpers.py @@ -3,8 +3,8 @@ import click import deepdiff -import yaml from logzero import logger +import yaml def merge_nested_dictionaries(original, new, overwrite=False): @@ -14,8 +14,8 @@ def merge_nested_dictionaries(original, new, overwrite=False): # if the key is not in the original, add it if key not in original: original[key] = value - # if the key is in the original, and the value is a dictionary, recurse - elif isinstance(value, dict): + # if the key is in the original, and original[key] and value are dictionaries, recurse + elif isinstance(original[key], dict) and isinstance(value, dict): # use deepdiff to check if the dictionaries are the same if deepdiff.DeepDiff(original[key], value): original[key] = merge_nested_dictionaries(original[key], value, overwrite) @@ -24,6 +24,7 @@ def merge_nested_dictionaries(original, new, overwrite=False): # if the key is in the original, and the value is a list, ask the user elif overwrite == "ask": choice_prompt = ( + "-------------------------\n" f"The current value for {key} is {original[key]}.\n" "Please choose an option:\n" "1. Keep the current value\n" @@ -34,7 +35,7 @@ def merge_nested_dictionaries(original, new, overwrite=False): user_choice = click.prompt(choice_prompt, type=int, default=1, show_default=True) if user_choice == 1: continue - elif user_choice == 2: + if user_choice == 2: original[key] = value elif user_choice == 3 and isinstance(value, list): original[key] = original[key] + value @@ -109,7 +110,7 @@ def merge(from_, strategy): user_choice = click.prompt(choice_prompt, type=int, default=1, show_default=True) if user_choice == 1: continue - elif user_choice == 2: + if user_choice == 2: logger.warning(f"Overwriting {real_name} with {file}") real_name.unlink() real_name.write_text(file.read_text()) diff --git a/scripts/customer_scenarios.py b/scripts/customer_scenarios.py index eb806468e21..b81137042b9 100755 --- a/scripts/customer_scenarios.py +++ b/scripts/customer_scenarios.py @@ -19,8 +19,7 @@ def make_path_list(path_list): paths = path_list.split(',') paths = [path for path in paths if any(target in path for target in targets)] return set(paths) - else: - return targets + return targets def get_bz_data(paths): @@ -31,12 +30,10 @@ def get_bz_data(paths): for test in tests: test_dict = test.to_dict() test_data = {**test_dict['tokens'], **test_dict['invalid-tokens']} - if 'bz' in test_data.keys(): - if ( - 'customerscenario' not in test_data.keys() - or test_data['customerscenario'] == 'false' - ): - path_result.append([test.name, test_data['bz']]) + if 'bz' in test_data and ( + 'customerscenario' not in test_data or test_data['customerscenario'] == 'false' + ): + path_result.append([test.name, test_data['bz']]) if path_result: result[path] = path_result return result @@ -64,8 +61,7 @@ def get_response(bzs): ) assert response.status_code == 200, 'BZ query unsuccessful' assert response.json().get('error') is not True, response.json().get('message') - bugs = response.json().get('bugs') - return bugs + return response.json().get('bugs') def query_bz(data): @@ -75,7 +71,7 @@ def query_bz(data): for test in tests: bugs = get_response(test[1]) for bug in bugs: - if 'external_bugs' in bug.keys() and len(bug['external_bugs']) > 1: + if 'external_bugs' in bug and len(bug['external_bugs']) > 1: customer_cases = [ case for case in bug['external_bugs'] diff --git a/scripts/fixture_cli.py b/scripts/fixture_cli.py new file mode 100644 index 00000000000..c68559d3c1b --- /dev/null +++ b/scripts/fixture_cli.py @@ -0,0 +1,84 @@ +from pathlib import Path + +import click +import pytest + + +def fixture_to_test(fixture_name): + """Convert a fixture name to a test name. + + Basic Example: fixture_to_test("module_published_cv") + Returns: "def test_run_module_published_cv(module_published_cv):\n assert True" + + Parametrized Example: fixture_to_test("sat_azure:sat,puppet_sat") + Returns: "@pytest.mark.parametrize('sat_azure', ['sat', 'puppet_sat'], indirect=True)" + "\ndef test_run_sat_azure(sat_azure):\n assert True" + """ + if ":" not in fixture_name: + return f"def test_runfake_{fixture_name}({fixture_name}):\n assert True" + + fixture_name, params = fixture_name.split(":") + params = params.split(",") + return ( + f"@pytest.mark.parametrize('{fixture_name}', {params}, indirect=True)\n" + f"def test_runfake_{fixture_name}({fixture_name}):\n assert True" + ) + + +@click.command() +@click.argument("fixtures", nargs=-1, required=True) +@click.option( + "--from-file", + type=click.File("w"), + help="Run the fixtures from within a file, inheriting the file's context.", +) +@click.option( + "--verbose", + is_flag=True, + help="Toggle verbose mode (default is quiet).", +) +@click.option( + "--xdist-workers", + "-n", + type=int, + default=1, + help="Run the tests in parallel with xdist.", +) +def run_fixtures(fixtures, from_file, verbose, xdist_workers): + """Create a temporary test that depends on each fixture, then run it. + + You can also run the fixtures from the context of a file, which is useful when testing fixtures + that don't live at a global scope. + + Indirectly parametrized fixtures are also possible with this syntax: fixture_name:param1,param2,param3 + + Examples: + python scripts/fixture_cli.py module_published_cv module_subscribe_satellite + python scripts/fixture_cli.py module_lce --from-file tests/foreman/api/test_activationkey.py + python scripts/fixture_cli.py sat_azure:sat,puppet_sat + """ + verbosity = "-v" if verbose else "-qq" + xdist_workers = str(xdist_workers) # pytest expects a string + generated_tests = "import pytest\n\n" + "\n\n".join(map(fixture_to_test, fixtures)) + if from_file: + from_file = Path(from_file.name) + # inject the test at the end of the file + with from_file.open("a") as f: + eof_pos = f.tell() + f.write(f"\n\n{generated_tests}") + pytest.main( + [verbosity, "-n", xdist_workers, str(from_file.resolve()), "-k", "test_runfake_"] + ) + # remove the test from the file + with from_file.open("r+") as f: + f.seek(eof_pos) + f.truncate() + else: + temp_file = Path("test_DELETEME.py") + temp_file.write_text(generated_tests) + pytest.main([verbosity, "-n", xdist_workers, str(temp_file)]) + temp_file.unlink() + + +if __name__ == "__main__": + run_fixtures() diff --git a/scripts/graph_entities.py b/scripts/graph_entities.py index ff21877c31c..46038a3e3b0 100755 --- a/scripts/graph_entities.py +++ b/scripts/graph_entities.py @@ -9,8 +9,7 @@ """ import inspect -from nailgun import entities -from nailgun import entity_mixins +from nailgun import entities, entity_mixins def graph(): @@ -25,9 +24,7 @@ def graph(): for entity_name, entity in entities_.items(): # Graph out which entities this entity depends on. for field_name, field in entity.get_fields().items(): - if isinstance(field, entity_mixins.OneToOneField) or isinstance( - field, entity_mixins.OneToManyField - ): + if isinstance(field, (entity_mixins.OneToOneField | entity_mixins.OneToManyField)): print( '{} -> {} [label="{}"{}]'.format( entity_name, diff --git a/scripts/polarion-test-case-upload.sh b/scripts/polarion-test-case-upload.sh index 58df170e361..0c24ba35c8d 100755 --- a/scripts/polarion-test-case-upload.sh +++ b/scripts/polarion-test-case-upload.sh @@ -68,12 +68,18 @@ from betelgeuse import default_config DEFAULT_APPROVERS_VALUE = '${POLARION_USERNAME}:approved' DEFAULT_STATUS_VALUE = 'approved' DEFAULT_SUBTYPE2_VALUE = '-' -TESTCASE_CUSTOM_FIELDS = default_config.TESTCASE_CUSTOM_FIELDS + ('customerscenario',) + ('team',) +TESTCASE_CUSTOM_FIELDS = default_config.TESTCASE_CUSTOM_FIELDS + ('customerscenario',) + ('team',) + ('markers',) +# converting TESTCASE_CUSTOM_FIELDS to list for removing tokens since the tokens are defined as defaults/mandatory in betelgeuse +TESTCASE_CUSTOM_FIELDS = list(TESTCASE_CUSTOM_FIELDS) +REMOVE_TOKEN_LIST = ['caselevel', 'upstream', 'testtype'] +TESTCASE_CUSTOM_FIELDS = tuple([token for token in TESTCASE_CUSTOM_FIELDS if token not in REMOVE_TOKEN_LIST]) + REQUIREMENT_CUSTOM_FIELDS = default_config.REQUIREMENT_CUSTOM_FIELDS + ('team',) TRANSFORM_CUSTOMERSCENARIO_VALUE = default_config._transform_to_lower DEFAULT_CUSTOMERSCENARIO_VALUE = 'false' DEFAULT_REQUIREMENT_TEAM_VALUE = None TRANSFORM_REQUIREMENT_TEAM_VALUE = default_config._transform_to_lower +MARKERS_IGNORE_LIST = ['parametrize', 'skip.*', 'usefixtures', 'rhel_ver_.*'] EOF set -x diff --git a/scripts/tokenize_customer_scenario.py b/scripts/tokenize_customer_scenario.py index 273a2075d4d..14a0cbf75a5 100644 --- a/scripts/tokenize_customer_scenario.py +++ b/scripts/tokenize_customer_scenario.py @@ -14,12 +14,9 @@ $ python scripts/tokenize_customer_scenario.py """ import codemod -from codemod import Query -from codemod import regex_suggestor -from codemod import run_interactive +from codemod import Query, regex_suggestor, run_interactive from codemod.helpers import path_filter - codemod.base.yes_to_all = True # this script can be changed to accept this list as param diff --git a/scripts/vault_login.py b/scripts/vault_login.py index 6f311bfeadc..300d81759a5 100755 --- a/scripts/vault_login.py +++ b/scripts/vault_login.py @@ -1,86 +1,14 @@ #!/usr/bin/env python # This Enables and Disables individuals OIDC token to access secrets from vault -import json -import os -import re -import subprocess import sys -from pathlib import Path - -from robottelo.constants import Colored -from robottelo.utils import export_vault_env_vars - - -HELP_TEXT = ( - "Vault CLI in not installed in your system, " - "refer link https://learn.hashicorp.com/tutorials/vault/getting-started-install to " - "install vault CLI as per your system spec!" -) - - -def _vault_command(command: str): - vcommand = subprocess.run(command, capture_output=True, shell=True) - if vcommand.returncode != 0: - verror = str(vcommand.stderr) - if vcommand.returncode == 127: - print(f"{Colored.REDDARK}Error! {HELP_TEXT}") - sys.exit(1) - elif 'Error revoking token' in verror: - print(f"{Colored.GREEN}Token is alredy revoked!") - sys.exit(0) - elif 'Error looking up token' in verror: - print(f"{Colored.YELLOW}Warning! Vault not logged in, please run 'make vault-login'!") - sys.exit(2) - else: - print(f"{Colored.REDDARK}Error! {verror}") - sys.exit(1) - return vcommand - - -def _vault_login(root_path, envdata): - print( - f"{Colored.WHITELIGHT}Warning! The browser is about to open for vault OIDC login, " - "close the tab once the sign-in is done!" - ) - if _vault_command(command="vault login -method=oidc").returncode == 0: - _vault_command(command="vault token renew -i 10h") - print(f"{Colored.GREEN}Success! Vault OIDC Logged-In and extended for 10 hours!") - # Fetching token - token = _vault_command("vault token lookup --format json").stdout - token = json.loads(str(token.decode('UTF-8')))['data']['id'] - # Setting new token in env file - envdata = re.sub('.*VAULT_TOKEN_FOR_DYNACONF=.*', f"VAULT_TOKEN_FOR_DYNACONF={token}", envdata) - with open(root_path, 'w') as envfile: - envfile.write(envdata) - print( - f"{Colored.GREEN}Success! New OIDC token added to .env file to access secrets from vault!" - ) - - -def _vault_logout(root_path, envdata): - # Teardown - Setting dymmy token in env file - envdata = re.sub('.*VAULT_TOKEN_FOR_DYNACONF=.*', "# VAULT_TOKEN_FOR_DYNACONF=myroot", envdata) - with open(root_path, 'w') as envfile: - envfile.write(envdata) - _vault_command('vault token revoke -self') - print(f"{Colored.GREEN}Success! OIDC token removed from Env file successfully!") - - -def _vault_status(): - vstatus = _vault_command('vault token lookup') - if vstatus.returncode == 0: - print(str(vstatus.stdout.decode('UTF-8'))) +from robottelo.utils.vault import Vault if __name__ == '__main__': - root_path = Path('.env') - envdata = root_path.read_text() - export_vault_env_vars(envdata=envdata) - if sys.argv[-1] == '--login': - _vault_login(root_path, envdata) - elif sys.argv[-1] == '--status': - _vault_status() - else: - _vault_logout(root_path, envdata) - # Unsetting VAULT URL - del os.environ['VAULT_ADDR'] + with Vault() as vclient: + if sys.argv[-1] == '--login': + vclient.login() + elif sys.argv[-1] == '--status': + vclient.status() + else: + vclient.logout() diff --git a/setup.py b/setup.py index f3a682641fc..7e0feb9f2b3 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup with open('README.rst') as f: README = f.read() @@ -17,7 +16,7 @@ include_package_data=True, license='GNU GPL v3.0', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - python_requires="~=3.9", + python_requires="~=3.10", classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', diff --git a/testimony.yaml b/testimony.yaml index 6771b38ebc4..fed691dd424 100644 --- a/testimony.yaml +++ b/testimony.yaml @@ -15,7 +15,8 @@ CaseComponent: # No spaces allowed - ActivationKeys - AlternateContentSources - - Ansible + - Ansible-ConfigurationManagement + - Ansible-RemoteExecution - AnsibleCollection - API - AuditLog @@ -23,10 +24,10 @@ CaseComponent: - BootdiskPlugin - Bootstrap - Branding + - BVT - Candlepin - - Capsule + - ForemanProxy - Capsule-Content - - Certificates - ComputeResources - ComputeResources-Azure - ComputeResources-CNV @@ -46,15 +47,14 @@ CaseComponent: - DiscoveryImage - DiscoveryPlugin - Documentation - - Dynflow - - Email - Entitlements - ErrataManagement - Fact - ForemanDebug - - ForemanMaintain + - SatelliteMaintain - Hammer - Hammer-Content + - HTTPProxy - HostCollections - HostForm - HostGroup @@ -62,11 +62,10 @@ CaseComponent: - Hosts-Content - Infobloxintegration - Infrastructure - - Installer + - Installation - InterSatelliteSync - katello-agent - katello-tracer - - LDAP - Leappintegration - LifecycleEnvironments - LocalizationInternationalization @@ -75,7 +74,6 @@ CaseComponent: - Networking - Notifications - OrganizationsandLocations - - Packaging - Parameters - Provisioning - ProvisioningTemplates @@ -88,22 +86,19 @@ CaseComponent: - RemoteExecution - Reporting - Repositories - - RHCloud-CloudConnector - - RHCloud-Insights - - RHCloud-Inventory + - RHCloud - rubygem-foreman-redhat_access - - satellite-change-hostname - SatelliteClone - SCAPPlugin - Search - Security - - SELinux - Settings - SubscriptionManagement - Subscriptions-virt-who - SyncPlans - TasksPlugin - TemplatesPlugin + - TestEnvironment - TFTP - Upgrades - UsersRoles @@ -120,15 +115,6 @@ CaseImportance: - Low required: true type: choice -CaseLevel: - casesensitive: true - choices: - - Component - - Integration - - System - - Acceptance - required: true - type: choice CasePosNeg: casesensitive: false choices: @@ -156,20 +142,6 @@ SubComponent: type: choice Subtype1: {} Teardown: {} -TestType: - casesensitive: false - choices: - - Functional - - Structural - required: true - type: choice -Upstream: - casesensitive: false - choices: - - 'Yes' - - 'No' - required: true - type: choice Parametrized: casesensitive: false choices: diff --git a/tests/foreman/api/test_acs.py b/tests/foreman/api/test_acs.py index 773b2b300fe..7821db07fc5 100644 --- a/tests/foreman/api/test_acs.py +++ b/tests/foreman/api/test_acs.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: AlternateContentSources :Team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from requests.exceptions import HTTPError -from robottelo.constants.repos import PULP_FIXTURE_ROOT -from robottelo.constants.repos import PULP_SUBPATHS_COMBINED +from robottelo.constants.repos import PULP_FIXTURE_ROOT, PULP_SUBPATHS_COMBINED @pytest.mark.e2e @@ -51,7 +45,6 @@ def test_positive_CRUD_all_types( 3. ACS can be updated and read with new name. 4. ACS can be refreshed. 5. ACS can be deleted. - """ if 'rhui' in request.node.name and 'file' in request.node.name: pytest.skip('unsupported parametrize combination') @@ -128,10 +121,9 @@ def test_positive_run_bulk_actions(module_target_sat, module_yum_repo): :expectedresults: 1. All ACSes can be refreshed via bulk action. 2. Only the proper ACSes are deleted on bulk destroy. - """ acs_ids = [] - for i in range(3): + for _ in range(3): acs = module_target_sat.api.AlternateContentSource( name=gen_string('alpha'), alternate_content_source_type='simplified', diff --git a/tests/foreman/api/test_activationkey.py b/tests/foreman/api/test_activationkey.py index 0a4c48de8a3..ef5add381fc 100644 --- a/tests/foreman/api/test_activationkey.py +++ b/tests/foreman/api/test_activationkey.py @@ -4,36 +4,29 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ActivationKeys :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No + """ import http -import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string +from fauxfactory import gen_integer, gen_string from nailgun import client -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.config import get_credentials -from robottelo.config import user_nailgun_config -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.config import get_credentials, user_nailgun_config +from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_names_list, + parametrized, + valid_data_list, +) @filtered_datapoint @@ -49,7 +42,7 @@ def _bad_max_hosts(): @pytest.mark.tier1 -def test_positive_create_unlimited_hosts(): +def test_positive_create_unlimited_hosts(target_sat): """Create a plain vanilla activation key. :id: 1d73b8cc-a754-4637-8bae-d9d2aaf89003 @@ -59,12 +52,12 @@ def test_positive_create_unlimited_hosts(): :CaseImportance: Critical """ - assert entities.ActivationKey().create().unlimited_hosts is True + assert target_sat.api.ActivationKey().create().unlimited_hosts is True @pytest.mark.tier1 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_create_limited_hosts(max_host): +def test_positive_create_limited_hosts(max_host, target_sat): """Create an activation key with limited hosts. :id: 9bbba620-fd98-4139-a44b-af8ce330c7a4 @@ -76,14 +69,14 @@ def test_positive_create_limited_hosts(max_host): :parametrized: yes """ - act_key = entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + act_key = target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() assert act_key.max_hosts == max_host assert act_key.unlimited_hosts is False @pytest.mark.tier1 @pytest.mark.parametrize('key_name', **parametrized(valid_data_list())) -def test_positive_create_with_name(key_name): +def test_positive_create_with_name(key_name, target_sat): """Create an activation key providing the initial name. :id: 749e0d28-640e-41e5-89d6-b92411ce73a3 @@ -94,13 +87,13 @@ def test_positive_create_with_name(key_name): :parametrized: yes """ - act_key = entities.ActivationKey(name=key_name).create() + act_key = target_sat.api.ActivationKey(name=key_name).create() assert key_name == act_key.name @pytest.mark.tier2 @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create an activation key and provide a description. :id: 64d93726-6f96-4a2e-ab29-eb5bfa2ff8ff @@ -109,12 +102,12 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - act_key = entities.ActivationKey(description=desc).create() + act_key = target_sat.api.ActivationKey(description=desc).create() assert desc == act_key.description @pytest.mark.tier2 -def test_negative_create_with_no_host_limit(): +def test_negative_create_with_no_host_limit(target_sat): """Create activation key without providing limitation for hosts number :id: a9e756e1-886d-4f0d-b685-36ce4247517d @@ -124,12 +117,12 @@ def test_negative_create_with_no_host_limit(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ActivationKey(unlimited_hosts=False).create() + target_sat.api.ActivationKey(unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_create_with_invalid_host_limit(max_host): +def test_negative_create_with_invalid_host_limit(max_host, target_sat): """Create activation key with invalid limit values for hosts number. :id: c018b177-2074-4f1a-a7e0-9f38d6c9a1a6 @@ -141,12 +134,12 @@ def test_negative_create_with_invalid_host_limit(max_host): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create activation key providing an invalid name. :id: 5f7051be-0320-4d37-9085-6904025ad909 @@ -158,12 +151,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(name=name).create() + target_sat.api.ActivationKey(name=name).create() @pytest.mark.tier2 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_update_limited_host(max_host): +def test_positive_update_limited_host(max_host, target_sat): """Create activation key then update it to limited hosts. :id: 34ca8303-8135-4694-9cf7-b20f8b4b0a1e @@ -173,18 +166,18 @@ def test_positive_update_limited_host(max_host): :parametrized: yes """ # unlimited_hosts defaults to True. - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': max_host, 'unlimited_hosts': False} for key, value in want.items(): setattr(act_key, key, value) act_key = act_key.update(want.keys()) - actual = {attr: getattr(act_key, attr) for attr in want.keys()} + actual = {attr: getattr(act_key, attr) for attr in want} assert want == actual @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat, module_org): """Create activation key providing the initial name, then update its name to another valid name. @@ -195,14 +188,16 @@ def test_positive_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() - updated = entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) + act_key = target_sat.api.ActivationKey(organization=module_org).create() + updated = target_sat.api.ActivationKey( + id=act_key.id, organization=module_org, name=new_name + ).update(['name']) assert new_name == updated.name @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_update_limit(max_host): +def test_negative_update_limit(max_host, target_sat): """Create activation key then update its limit to invalid value. :id: 0f857d2f-81ed-4b8b-b26e-34b4f294edbc @@ -217,20 +212,20 @@ def test_negative_update_limit(max_host): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': act_key.max_hosts, 'unlimited_hosts': act_key.unlimited_hosts} act_key.max_hosts = max_host act_key.unlimited_hosts = False with pytest.raises(HTTPError): act_key.update(want.keys()) act_key = act_key.read() - actual = {attr: getattr(act_key, attr) for attr in want.keys()} + actual = {attr: getattr(act_key, attr) for attr in want} assert want == actual @pytest.mark.tier3 @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat, module_org): """Create activation key then update its name to an invalid name. :id: da85a32c-942b-4ab8-a133-36b028208c4d @@ -242,16 +237,18 @@ def test_negative_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey(organization=module_org).create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) - new_key = entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id, organization=module_org, name=new_name).update( + ['name'] + ) + new_key = target_sat.api.ActivationKey(id=act_key.id).read() assert new_key.name != new_name assert new_key.name == act_key.name @pytest.mark.tier3 -def test_negative_update_max_hosts(): +def test_negative_update_max_hosts(target_sat, module_org): """Create an activation key with ``max_hosts == 1``, then update that field with a string value. @@ -261,24 +258,24 @@ def test_negative_update_max_hosts(): :CaseImportance: Low """ - act_key = entities.ActivationKey(max_hosts=1).create() + act_key = target_sat.api.ActivationKey(max_hosts=1, organization=module_org).create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, max_hosts='foo').update(['max_hosts']) + target_sat.api.ActivationKey( + id=act_key.id, organization=module_org, max_hosts='foo' + ).update(['max_hosts']) assert act_key.read().max_hosts == 1 @pytest.mark.tier2 -def test_positive_get_releases_status_code(): +def test_positive_get_releases_status_code(target_sat): """Get an activation key's releases. Check response format. :id: e1ea4797-8d92-4bec-ae6b-7a26599825ab :expectedresults: HTTP 200 is returned with an ``application/json`` content-type - - :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() path = act_key.path('releases') response = client.get(path, auth=get_credentials(), verify=False) status_code = http.client.OK @@ -287,23 +284,21 @@ def test_positive_get_releases_status_code(): @pytest.mark.tier2 -def test_positive_get_releases_content(): +def test_positive_get_releases_content(target_sat): """Get an activation key's releases. Check response contents. :id: 2fec3d71-33e9-40e5-b934-90b03afc26a1 :expectedresults: A list of results is returned. - - :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() response = client.get(act_key.path('releases'), auth=get_credentials(), verify=False).json() - assert 'results' in response.keys() - assert type(response['results']) == list + assert 'results' in response + assert isinstance(response['results'], list) @pytest.mark.tier2 -def test_positive_add_host_collections(module_org): +def test_positive_add_host_collections(module_org, module_target_sat): """Associate an activation key with several host collections. :id: 1538808c-621e-4cf9-9b9b-840c5dd54644 @@ -316,28 +311,30 @@ def test_positive_add_host_collections(module_org): collections and reading that activation key, the correct host collections are listed. - :CaseLevel: Integration - :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 # Give activation key one host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 1 # Give activation key second host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 2 @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_host_collection(module_org): +def test_positive_remove_host_collection(module_org, module_target_sat): """Disassociate host collection from the activation key :id: 31992ac4-fe55-45bb-bd17-a191928ec2ab @@ -351,15 +348,13 @@ def test_positive_remove_host_collection(module_org): 3. Disassociating host collection from the activation key actually removes it from the list - :CaseLevel: Integration - :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() # Associate host collection with activation key. act_key.add_host_collection(data={'host_collection_ids': [host_collection.id]}) @@ -371,7 +366,7 @@ def test_positive_remove_host_collection(module_org): @pytest.mark.tier1 -def test_positive_update_auto_attach(): +def test_positive_update_auto_attach(target_sat, module_org): """Create an activation key, then update the auto_attach field with the inverse boolean value. @@ -381,17 +376,17 @@ def test_positive_update_auto_attach(): :CaseImportance: Critical """ - act_key = entities.ActivationKey().create() - act_key_2 = entities.ActivationKey(id=act_key.id, auto_attach=(not act_key.auto_attach)).update( - ['auto_attach'] - ) + act_key = target_sat.api.ActivationKey(organization=module_org).create() + act_key_2 = target_sat.api.ActivationKey( + id=act_key.id, organization=module_org, auto_attach=(not act_key.auto_attach) + ).update(['auto_attach']) assert act_key.auto_attach != act_key_2.auto_attach @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete(name): +def test_positive_delete(name, target_sat): """Create activation key and then delete it. :id: aa28d8fb-e07d-45fa-b43a-fc90c706d633 @@ -402,10 +397,10 @@ def test_positive_delete(name): :parametrized: yes """ - act_key = entities.ActivationKey(name=name).create() + act_key = target_sat.api.ActivationKey(name=name).create() act_key.delete() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id).read() @pytest.mark.tier2 @@ -444,8 +439,6 @@ def test_positive_fetch_product_content( :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content - :CaseLevel: Integration - :CaseImportance: Critical """ module_target_sat.upload_manifest(module_org.id, session_entitlement_manifest.content) @@ -499,14 +492,12 @@ def test_positive_add_future_subscription(): :CaseAutomation: NotAutomated - :CaseLevel: Integration - :CaseImportance: Critical """ @pytest.mark.tier1 -def test_positive_search_by_org(): +def test_positive_search_by_org(target_sat): """Search for all activation keys in an organization. :id: aedba598-2e47-44a8-826c-4dc304ba00be @@ -516,8 +507,8 @@ def test_positive_search_by_org(): :CaseImportance: Critical """ - org = entities.Organization().create() - act_key = entities.ActivationKey(organization=org).create() - keys = entities.ActivationKey(organization=org).search() + org = target_sat.api.Organization().create() + act_key = target_sat.api.ActivationKey(organization=org).create() + keys = target_sat.api.ActivationKey(organization=org).search() assert len(keys) == 1 assert act_key.id == keys[0].id diff --git a/tests/foreman/api/test_ansible.py b/tests/foreman/api/test_ansible.py index c3afd6362cb..435501d0696 100644 --- a/tests/foreman/api/test_ansible.py +++ b/tests/foreman/api/test_ansible.py @@ -4,122 +4,468 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Ansible +:CaseComponent: Ansible-ConfigurationManagement :Team: Rocket -:TestType: Functional - -:CaseImportance: High +:CaseImportance: Critical -:Upstream: No """ +from fauxfactory import gen_string import pytest +from wait_for import wait_for -from robottelo.config import settings +from robottelo.config import settings, user_nailgun_config +from robottelo.utils.issue_handlers import is_open -@pytest.mark.e2e -def test_fetch_and_sync_ansible_playbooks(target_sat): +@pytest.fixture +def filtered_user(target_sat, module_org, module_location): """ - Test Ansible Playbooks api for fetching and syncing playbooks - - :id: 17b4e767-1494-4960-bc60-f31a0495c09f + :steps: + 1. Create a role with a host view filtered + 2. Create a user with that role + 3. Setup a host + """ + role = target_sat.api.Role( + name=gen_string('alpha'), location=[module_location], organization=[module_org] + ).create() + # assign view_hosts (with a filter, to test BZ 1699188), + # view_hostgroups, view_facts permissions to the role + permission_hosts = target_sat.api.Permission().search(query={'search': 'name="view_hosts"'}) + permission_hostgroups = target_sat.api.Permission().search( + query={'search': 'name="view_hostgroups"'} + ) + permission_facts = target_sat.api.Permission().search(query={'search': 'name="view_facts"'}) + target_sat.api.Filter( + permission=permission_hosts, search='name != nonexistent', role=role + ).create() + target_sat.api.Filter(permission=permission_hostgroups, role=role).create() + target_sat.api.Filter(permission=permission_facts, role=role).create() - :customerscenario: true + password = gen_string('alpha') + user = target_sat.api.User( + role=[role], password=password, location=[module_location], organization=[module_org] + ).create() - :Steps: + return user, password - 1. Install ansible collection with playbooks. - 2. Try to fetch the playbooks via api. - 3. Sync the playbooks. - 4. Assert the count of playbooks fetched and synced are equal. - :expectedresults: - 1. Playbooks should be fetched and synced successfully. +@pytest.mark.upgrade +class TestAnsibleCfgMgmt: + """Test class for Configuration Management with Ansible - :BZ: 2115686 + :CaseComponent: Ansible-ConfigurationManagement - :CaseAutomation: Automated """ - target_sat.execute( - "ansible-galaxy collection install -p /usr/share/ansible/collections " - "xprazak2.forklift_collection" - ) - proxy_id = target_sat.nailgun_smart_proxy.id - playbook_fetch = target_sat.api.AnsiblePlaybooks().fetch(data={'proxy_id': proxy_id}) - playbooks_count = len(playbook_fetch['results']['playbooks_names']) - playbook_sync = target_sat.api.AnsiblePlaybooks().sync(data={'proxy_id': proxy_id}) - assert playbook_sync['action'] == "Sync playbooks" - - target_sat.wait_for_tasks( - search_query=(f'id = {playbook_sync["id"]}'), - poll_timeout=100, - ) - task_details = target_sat.api.ForemanTask().search( - query={'search': f'id = {playbook_sync["id"]}'} - ) - assert task_details[0].result == 'success' - assert len(task_details[0].output['result']['created']) == playbooks_count + @pytest.mark.e2e + def test_fetch_and_sync_ansible_playbooks(self, target_sat): + """ + Test Ansible Playbooks api for fetching and syncing playbooks -@pytest.mark.e2e -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6].*') -def test_positive_ansible_job_on_host(target_sat, module_org, rhel_contenthost): - """ - Test successful execution of Ansible Job on host. + :id: 17b4e767-1494-4960-bc60-f31a0495c09f + + :steps: + 1. Install ansible collection with playbooks. + 2. Try to fetch the playbooks via api. + 3. Sync the playbooks. + 4. Assert the count of playbooks fetched and synced are equal. + + :expectedresults: + 1. Playbooks should be fetched and synced successfully. + + :BZ: 2115686 + + :customerscenario: true + """ + target_sat.execute( + "ansible-galaxy collection install -p /usr/share/ansible/collections " + "xprazak2.forklift_collection" + ) + proxy_id = target_sat.nailgun_smart_proxy.id + playbook_fetch = target_sat.api.AnsiblePlaybooks().fetch(data={'proxy_id': proxy_id}) + playbooks_count = len(playbook_fetch['results']['playbooks_names']) + playbook_sync = target_sat.api.AnsiblePlaybooks().sync(data={'proxy_id': proxy_id}) + assert playbook_sync['action'] == "Sync playbooks" + + target_sat.wait_for_tasks( + search_query=(f'id = {playbook_sync["id"]}'), + poll_timeout=100, + ) + task_details = target_sat.api.ForemanTask().search( + query={'search': f'id = {playbook_sync["id"]}'} + ) + assert task_details[0].result == 'success' + assert len(task_details[0].output['result']['created']) == playbooks_count + + @pytest.mark.e2e + @pytest.mark.tier2 + def test_add_and_remove_ansible_role_hostgroup(self, target_sat): + """ + Test add and remove functionality for ansible roles in hostgroup via API + + :id: 7672cf86-fa31-11ed-855a-0fd307d2d66b + + :steps: + 1. Create a hostgroup and a nested hostgroup + 2. Sync a few ansible roles + 3. Assign a few ansible roles with the host group + 4. Add some ansible role with the host group + 5. Add some ansible roles to the nested hostgroup + 6. Remove the added ansible roles from the parent and nested hostgroup + + :expectedresults: + 1. Ansible role assign/add/remove functionality should work as expected in API + + :BZ: 2164400 + """ + ROLE_NAMES = [ + 'theforeman.foreman_scap_client', + 'redhat.satellite.hostgroups', + 'RedHatInsights.insights-client', + 'redhat.satellite.compute_resources', + ] + hg = target_sat.api.HostGroup(name=gen_string('alpha')).create() + hg_nested = target_sat.api.HostGroup(name=gen_string('alpha'), parent=hg).create() + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': ROLE_NAMES}) + ROLES = [ + target_sat.api.AnsibleRoles().search(query={'search': f'name={role}'})[0].id + for role in ROLE_NAMES + ] + # Assign first 2 roles to HG and verify it + target_sat.api.HostGroup(id=hg.id).assign_ansible_roles( + data={'ansible_role_ids': ROLES[:2]} + ) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:2], strict=True + ): + assert r1['name'] == r2 + + # Add next role from list to HG and verify it + target_sat.api.HostGroup(id=hg.id).add_ansible_role(data={'ansible_role_id': ROLES[2]}) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True + ): + assert r1['name'] == r2 + + # Add next role to nested HG, and verify roles are also nested to HG along with assigned role + # Also, ensure the parent HG does not contain the roles assigned to nested HGs + target_sat.api.HostGroup(id=hg_nested.id).add_ansible_role( + data={'ansible_role_id': ROLES[3]} + ) + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles(), + [ROLE_NAMES[-1]] + ROLE_NAMES[:-1], + strict=True, + ): + assert r1['name'] == r2 + + for r1, r2 in zip( + target_sat.api.HostGroup(id=hg.id).list_ansible_roles(), ROLE_NAMES[:3], strict=True + ): + assert r1['name'] == r2 + + # Remove roles assigned one by one from HG and nested HG + for role in ROLES[:3]: + target_sat.api.HostGroup(id=hg.id).remove_ansible_role(data={'ansible_role_id': role}) + hg_roles = target_sat.api.HostGroup(id=hg.id).list_ansible_roles() + assert len(hg_roles) == 0 + + for role in ROLES: + target_sat.api.HostGroup(id=hg_nested.id).remove_ansible_role( + data={'ansible_role_id': role} + ) + hg_nested_roles = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert len(hg_nested_roles) == 0 + + @pytest.mark.e2e + @pytest.mark.tier2 + def test_positive_ansible_roles_inherited_from_hostgroup( + self, request, target_sat, module_org, module_location + ): + """Verify ansible roles inheritance functionality for host with parent/nested hostgroup via API + + :id: 7672cf86-fa31-11ed-855a-0fd307d2d66g + + :steps: + 1. Create a host, hostgroup and nested hostgroup + 2. Sync a few ansible roles + 3. Assign a few ansible roles to the host, hostgroup, nested hostgroup and verify it. + 4. Update host to be in parent/nested hostgroup and verify roles assigned + + :expectedresults: + 1. Hosts in parent/nested hostgroups must have direct and indirect roles correctly assigned. + + :BZ: 2187967 - :id: c8dcdc54-cb98-4b24-bff9-049a6cc36acb + :customerscenario: true + """ + ROLE_NAMES = [ + 'theforeman.foreman_scap_client', + 'RedHatInsights.insights-client', + 'redhat.satellite.compute_resources', + ] + proxy_id = target_sat.nailgun_smart_proxy.id + host = target_sat.api.Host(organization=module_org, location=module_location).create() + hg = target_sat.api.HostGroup(name=gen_string('alpha'), organization=[module_org]).create() + hg_nested = target_sat.api.HostGroup( + name=gen_string('alpha'), parent=hg, organization=[module_org] + ).create() - :Steps: - 1. Register a content host with satellite - 2. Import a role into satellite - 3. Assign that role to a host - 4. Assert that the role was assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the job is executed. + @request.addfinalizer + def _finalize(): + host.delete() + hg_nested.delete() + hg.delete() - :expectedresults: - 1. Host should be assigned the proper role. - 2. Job execution must be successful. + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': ROLE_NAMES}) + ROLES = [ + target_sat.api.AnsibleRoles().search(query={'search': f'name={role}'})[0].id + for role in ROLE_NAMES + ] - :CaseAutomation: Automated + # Assign roles to Host/Hostgroup/Nested Hostgroup and verify it + target_sat.api.Host(id=host.id).add_ansible_role(data={'ansible_role_id': ROLES[0]}) + assert ROLE_NAMES[0] == target_sat.api.Host(id=host.id).list_ansible_roles()[0]['name'] - :CaseImportance: Critical + target_sat.api.HostGroup(id=hg.id).add_ansible_role(data={'ansible_role_id': ROLES[1]}) + assert ROLE_NAMES[1] == target_sat.api.HostGroup(id=hg.id).list_ansible_roles()[0]['name'] + + target_sat.api.HostGroup(id=hg_nested.id).add_ansible_role( + data={'ansible_role_id': ROLES[2]} + ) + listroles = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert ROLE_NAMES[2] == listroles[0]['name'] + assert listroles[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles[1]['name'] + assert not listroles[1]['directly_assigned'] + + # Update host to be in nested hostgroup and verify roles assigned + host.hostgroup = hg_nested + host = host.update(['hostgroup']) + listroles_host = target_sat.api.Host(id=host.id).list_ansible_roles() + assert ROLE_NAMES[0] == listroles_host[0]['name'] + assert listroles_host[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles_host[1]['name'] + assert not listroles_host[1]['directly_assigned'] + assert ROLE_NAMES[2] == listroles_host[2]['name'] + assert not listroles_host[1]['directly_assigned'] + # Verify nested hostgroup doesn't contains the roles assigned to host + listroles_nested_hg = target_sat.api.HostGroup(id=hg_nested.id).list_ansible_roles() + assert ROLE_NAMES[0] not in [role['name'] for role in listroles_nested_hg] + assert ROLE_NAMES[2] == listroles_nested_hg[0]['name'] + assert ROLE_NAMES[1] == listroles_nested_hg[1]['name'] + + # Update host to be in parent hostgroup and verify roles assigned + host.hostgroup = hg + host = host.update(['hostgroup']) + listroles = target_sat.api.Host(id=host.id).list_ansible_roles() + assert ROLE_NAMES[0] == listroles[0]['name'] + assert listroles[0]['directly_assigned'] + assert ROLE_NAMES[1] == listroles[1]['name'] + assert not listroles[1]['directly_assigned'] + # Verify parent hostgroup doesn't contains the roles assigned to host + listroles_hg = target_sat.api.HostGroup(id=hg.id).list_ansible_roles() + assert ROLE_NAMES[0] not in [role['name'] for role in listroles_hg] + assert ROLE_NAMES[1] == listroles_hg[0]['name'] + + @pytest.mark.rhel_ver_match('[78]') + @pytest.mark.tier2 + def test_positive_read_facts_with_filter( + self, target_sat, rex_contenthost, filtered_user, module_org, module_location + ): + """Read host's Ansible facts as a user with a role that has host filter + + :id: 483d5faf-7a4c-4cb7-b14f-369768ad99b0 + + :steps: + 1. Run Ansible roles on a host + 2. Using API, read Ansible facts of that host + + :expectedresults: Ansible facts returned + + :BZ: 1699188 + + :customerscenario: true + """ + user, password = filtered_user + host = rex_contenthost.nailgun_host + host.organization = module_org + host.location = module_location + host.update(['organization', 'location']) + + # gather ansible facts by running ansible roles on the host + host.play_ansible_roles() + if is_open('BZ:2216471'): + wait_for( + lambda: len(rex_contenthost.nailgun_host.get_facts()) > 0, + timeout=30, + delay=2, + ) + user_cfg = user_nailgun_config(user.login, password) + # get facts through API + user_facts = ( + target_sat.api.Host(server_config=user_cfg) + .search(query={'search': f'name={rex_contenthost.hostname}'})[0] + .get_facts() + ) + assert 'subtotal' in user_facts + assert user_facts['subtotal'] == 1 + assert 'results' in user_facts + assert rex_contenthost.hostname in user_facts['results'] + assert len(user_facts['results'][rex_contenthost.hostname]) > 0 + + +class TestAnsibleREX: + """Test class for remote execution via Ansible + + :CaseComponent: Ansible-RemoteExecution """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - if rhel_contenthost.os_version.major <= 7: - rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel_contenthost.execute('yum install -y insights-client').status == 0 - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, force=True) - assert rhel_contenthost.subscribed - rhel_contenthost.add_rex_key(satellite=target_sat) - proxy_id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) - target_sat.cli.Host.ansible_roles_assign({'id': target_host.id, 'ansible-roles': SELECTED_ROLE}) - host_roles = target_host.list_ansible_roles() - assert host_roles[0]['name'] == SELECTED_ROLE - assert target_host.name == rhel_contenthost.hostname - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 + + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('[^6].*') + def test_positive_ansible_job_on_host( + self, target_sat, module_org, module_location, module_ak_with_synced_repo, rhel_contenthost + ): + """Test successful execution of Ansible Job on host. + + :id: c8dcdc54-cb98-4b24-bff9-049a6cc36acb + + :steps: + 1. Register a content host with satellite + 2. Import a role into satellite + 3. Assign that role to a host + 4. Assert that the role was assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the job is executed. + + :expectedresults: + 1. Host should be assigned the proper role. + 2. Job execution must be successful. + + :BZ: 2164400 + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + if rhel_contenthost.os_version.major <= 7: + rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) + assert rhel_contenthost.execute('yum install -y insights-client').status == 0 + result = rhel_contenthost.register( + module_org, module_location, module_ak_with_synced_repo.name, target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + proxy_id = target_sat.nailgun_smart_proxy.id + target_host = rhel_contenthost.nailgun_host + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + role_id = ( + target_sat.api.AnsibleRoles().search(query={'search': f'name={SELECTED_ROLE}'})[0].id + ) + target_sat.api.Host(id=target_host.id).add_ansible_role(data={'ansible_role_id': role_id}) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + assert target_host.name == rhel_contenthost.hostname + + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + target_sat.api.Host(id=target_host.id).remove_ansible_role( + data={'ansible_role_id': role_id} + ) + host_roles = target_host.list_ansible_roles() + assert len(host_roles) == 0 + + @pytest.mark.no_containers + def test_positive_ansible_job_on_multiple_host( + self, + target_sat, + module_org, + rhel9_contenthost, + rhel8_contenthost, + rhel7_contenthost, + module_location, + module_ak_with_synced_repo, + ): + """Test execution of Ansible job on multiple hosts simultaneously. + + :id: 9369feef-466c-40d3-9d0d-65520d7f21ef + + :customerscenario: true + + :steps: + 1. Register multiple content hosts with satellite + 2. Import a role into satellite + 3. Assign that role to all host + 4. Trigger ansible job keeping all host in a single query + 5. Check the passing and failing of individual hosts + 6. Check if one of the job on a host is failed resulting into whole job is marked as failed. + + :expectedresults: + 1. One of the jobs failing on a single host must impact the overall result as failed. + + :BZ: 2167396, 2190464, 2184117 + """ + hosts = [rhel9_contenthost, rhel8_contenthost, rhel7_contenthost] + SELECTED_ROLE = 'RedHatInsights.insights-client' + for host in hosts: + result = host.register( + module_org, module_location, module_ak_with_synced_repo.name, target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + proxy_id = target_sat.nailgun_smart_proxy.id + target_host = host.nailgun_host + target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + role_id = ( + target_sat.api.AnsibleRoles() + .search(query={'search': f'name={SELECTED_ROLE}'})[0] + .id + ) + target_sat.api.Host(id=target_host.id).add_ansible_role( + data={'ansible_role_id': role_id} + ) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name ^ ({hosts[0].hostname} && {hosts[1].hostname} ' + f'&& {hosts[2].hostname})', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', + poll_timeout=1000, + must_succeed=False, + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 2 # SELECTED_ROLE working on rhel8/rhel9 clients + assert result.failed == 1 # SELECTED_ROLE failing on rhel7 client + assert result.status_label == 'failed' diff --git a/tests/foreman/api/test_architecture.py b/tests/foreman/api/test_architecture.py index cb34a1513ec..362abbfae32 100644 --- a/tests/foreman/api/test_architecture.py +++ b/tests/foreman/api/test_architecture.py @@ -4,30 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_choice -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) @pytest.mark.tier1 -def test_positive_CRUD(default_os): +def test_positive_CRUD(default_os, target_sat): """Create a new Architecture with several attributes, update the name and delete the Architecture itself. @@ -41,13 +37,13 @@ def test_positive_CRUD(default_os): # Create name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(name=name, operatingsystem=[default_os]).create() + arch = target_sat.api.Architecture(name=name, operatingsystem=[default_os]).create() assert {default_os.id} == {os.id for os in arch.operatingsystem} assert name == arch.name # Update name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(id=arch.id, name=name).update(['name']) + arch = target_sat.api.Architecture(id=arch.id, name=name).update(['name']) assert name == arch.name # Delete @@ -58,7 +54,7 @@ def test_positive_CRUD(default_os): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create architecture providing an invalid initial name. :id: 0fa6377d-063a-4e24-b606-b342e0d9108b @@ -72,12 +68,12 @@ def test_negative_create_with_invalid_name(name): :BZ: 1401519 """ with pytest.raises(HTTPError): - entities.Architecture(name=name).create() + target_sat.api.Architecture(name=name).create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_update_with_invalid_name(name, module_architecture): +def test_negative_update_with_invalid_name(name, module_architecture, module_target_sat): """Update architecture's name to an invalid name. :id: cb27b69b-14e0-42d0-9e44-e09d68324803 @@ -89,6 +85,6 @@ def test_negative_update_with_invalid_name(name, module_architecture): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Architecture(id=module_architecture.id, name=name).update(['name']) - arch = entities.Architecture(id=module_architecture.id).read() + module_target_sat.api.Architecture(id=module_architecture.id, name=name).update(['name']) + arch = module_target_sat.api.Architecture(id=module_architecture.id).read() assert arch.name != name diff --git a/tests/foreman/api/test_audit.py b/tests/foreman/api/test_audit.py index 5169e44dbca..a90eda83884 100644 --- a/tests/foreman/api/test_audit.py +++ b/tests/foreman/api/test_audit.py @@ -1,30 +1,24 @@ """Tests for audit functionality. -:Requirement: Audit +:Requirement: AuditLog :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: AuditLog :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities from robottelo.utils.datafactory import gen_string @pytest.mark.e2e @pytest.mark.tier1 -def test_positive_create_by_type(): +def test_positive_create_by_type(target_sat): """Create entities of different types and check audit logs for these events using entity type as search criteria @@ -40,53 +34,64 @@ def test_positive_create_by_type(): :CaseImportance: Medium """ for entity_item in [ - {'entity': entities.Architecture()}, + {'entity': target_sat.api.Architecture(), 'entity_type': 'architecture'}, { - 'entity': entities.AuthSourceLDAP(), + 'entity': target_sat.api.AuthSourceLDAP(), 'entity_type': 'auth_source', 'value_template': 'LDAP-{entity.name}', }, - {'entity': entities.ComputeProfile(), 'entity_type': 'compute_profile'}, + {'entity': target_sat.api.ComputeProfile(), 'entity_type': 'compute_profile'}, { - 'entity': entities.LibvirtComputeResource(), + 'entity': target_sat.api.LibvirtComputeResource(), 'entity_type': 'compute_resource', 'value_template': '{entity.name} (Libvirt)', }, - {'entity': entities.Domain()}, - {'entity': entities.Host()}, - {'entity': entities.HostGroup()}, - {'entity': entities.Image(compute_resource=entities.LibvirtComputeResource().create())}, - {'entity': entities.Location()}, - {'entity': entities.Media(), 'entity_type': 'medium'}, + {'entity': target_sat.api.Domain(), 'entity_type': 'domain'}, + {'entity': target_sat.api.Host(), 'entity_type': 'host'}, + {'entity': target_sat.api.HostGroup(), 'entity_type': 'hostgroup'}, { - 'entity': entities.OperatingSystem(), + 'entity': target_sat.api.Image( + compute_resource=target_sat.api.LibvirtComputeResource().create() + ), + 'entity_type': 'image', + }, + {'entity': target_sat.api.Location(), 'entity_type': 'location'}, + {'entity': target_sat.api.Media(), 'entity_type': 'medium'}, + { + 'entity': target_sat.api.OperatingSystem(), 'entity_type': 'os', 'value_template': '{entity.name} {entity.major}', }, - {'entity': entities.PartitionTable(), 'entity_type': 'ptable'}, - {'entity': entities.Role()}, + {'entity': target_sat.api.PartitionTable(), 'entity_type': 'ptable'}, + {'entity': target_sat.api.Role(), 'entity_type': 'role'}, { - 'entity': entities.Subnet(), + 'entity': target_sat.api.Subnet(), + 'entity_type': 'subnet', 'value_template': '{entity.name} ({entity.network}/{entity.cidr})', }, - {'entity': entities.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, - {'entity': entities.User(), 'value_template': '{entity.login}'}, - {'entity': entities.UserGroup()}, - {'entity': entities.ContentView(), 'entity_type': 'katello/content_view'}, - {'entity': entities.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, - {'entity': entities.ActivationKey(), 'entity_type': 'katello/activation_key'}, - {'entity': entities.HostCollection(), 'entity_type': 'katello/host_collection'}, - {'entity': entities.Product(), 'entity_type': 'katello/product'}, + {'entity': target_sat.api.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, { - 'entity': entities.SyncPlan(organization=entities.Organization(id=1)), + 'entity': target_sat.api.User(), + 'value_template': '{entity.login}', + 'entity_type': 'user', + }, + {'entity': target_sat.api.UserGroup(), 'entity_type': 'usergroup'}, + {'entity': target_sat.api.ContentView(), 'entity_type': 'katello/content_view'}, + {'entity': target_sat.api.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, + {'entity': target_sat.api.ActivationKey(), 'entity_type': 'katello/activation_key'}, + {'entity': target_sat.api.HostCollection(), 'entity_type': 'katello/host_collection'}, + {'entity': target_sat.api.Product(), 'entity_type': 'katello/product'}, + { + 'entity': target_sat.api.SyncPlan(organization=target_sat.api.Organization(id=1)), 'entity_type': 'katello/sync_plan', }, ]: created_entity = entity_item['entity'].create() - entity_type = entity_item.get('entity_type', created_entity.__class__.__name__.lower()) value_template = entity_item.get('value_template', '{entity.name}') entity_value = value_template.format(entity=created_entity) - audits = entities.Audit().search(query={'search': f'type={entity_type}'}) + audits = target_sat.api.Audit().search( + query={'search': f'type={entity_item["entity_type"]}'} + ) entity_audits = [entry for entry in audits if entry.auditable_name == entity_value] assert entity_audits, ( f'audit not found by name "{entity_value}" for entity: ' @@ -99,7 +104,7 @@ def test_positive_create_by_type(): @pytest.mark.tier1 -def test_positive_update_by_type(): +def test_positive_update_by_type(target_sat): """Update some entities of different types and check audit logs for these events using entity type as search criteria @@ -111,21 +116,19 @@ def test_positive_update_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + {'entity': target_sat.api.Architecture(), 'entity_type': 'architecture'}, + {'entity': target_sat.api.Domain(), 'entity_type': 'domain'}, + {'entity': target_sat.api.HostGroup(), 'entity_type': 'hostgroup'}, + {'entity': target_sat.api.Location(), 'entity_type': 'location'}, + {'entity': target_sat.api.Role(), 'entity_type': 'role'}, + {'entity': target_sat.api.UserGroup(), 'entity_type': 'usergroup'}, ]: - created_entity = entity.create() + created_entity = entity['entity'].create() name = created_entity.name new_name = gen_string('alpha') created_entity.name = new_name created_entity = created_entity.update(['name']) - audits = entities.Audit().search( - query={'search': f'type={created_entity.__class__.__name__.lower()}'} - ) + audits = target_sat.api.Audit().search(query={'search': f'type={entity["entity_type"]}'}) entity_audits = [entry for entry in audits if entry.auditable_name == name] assert entity_audits, f'audit not found by name "{name}"' audit = entity_audits[0] @@ -136,7 +139,7 @@ def test_positive_update_by_type(): @pytest.mark.tier1 -def test_positive_delete_by_type(): +def test_positive_delete_by_type(target_sat): """Delete some entities of different types and check audit logs for these events using entity type as search criteria @@ -148,19 +151,16 @@ def test_positive_delete_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.Host(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + {'entity': target_sat.api.Architecture(), 'entity_type': 'architecture'}, + {'entity': target_sat.api.Domain(), 'entity_type': 'domain'}, + {'entity': target_sat.api.HostGroup(), 'entity_type': 'hostgroup'}, + {'entity': target_sat.api.Location(), 'entity_type': 'location'}, + {'entity': target_sat.api.Role(), 'entity_type': 'role'}, + {'entity': target_sat.api.UserGroup(), 'entity_type': 'usergroup'}, ]: - created_entity = entity.create() + created_entity = entity['entity'].create() created_entity.delete() - audits = entities.Audit().search( - query={'search': f'type={created_entity.__class__.__name__.lower()}'} - ) + audits = target_sat.api.Audit().search(query={'search': f'type={entity["entity_type"]}'}) entity_audits = [entry for entry in audits if entry.auditable_name == created_entity.name] assert entity_audits, f'audit not found by name "{created_entity.name}"' audit = entity_audits[0] diff --git a/tests/foreman/api/test_bookmarks.py b/tests/foreman/api/test_bookmarks.py index 91c8ef2df8a..5d5833ef563 100644 --- a/tests/foreman/api/test_bookmarks.py +++ b/tests/foreman/api/test_bookmarks.py @@ -4,44 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Search :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.constants import BOOKMARK_ENTITIES -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import valid_data_list - +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION +from robottelo.utils.datafactory import invalid_values_list, valid_data_list # List of unique bookmark controller values, preserving order -CONTROLLERS = list(dict.fromkeys(entity['controller'] for entity in BOOKMARK_ENTITIES)) +CONTROLLERS = list(dict.fromkeys(entity['controller'] for entity in BOOKMARK_ENTITIES_SELECTION)) @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_name(controller): +def test_positive_create_with_name(controller, target_sat): """Create a bookmark :id: aeef0944-379a-4a27-902d-aa5969dbd441 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a random name and valid controller. 2. List the bookmarks. @@ -53,21 +45,21 @@ def test_positive_create_with_name(controller): :CaseImportance: Critical """ name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, name=name, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, name=name, public=False).create() assert bm.controller == controller assert bm.name == name @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_query(controller): +def test_positive_create_with_query(controller, target_sat): """Create a bookmark :id: 9fb6d485-92b5-43ea-b776-012c13734100 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a random query and valid controller. 2. List the bookmarks. @@ -79,22 +71,22 @@ def test_positive_create_with_query(controller): :CaseImportance: Critical """ query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, query=query).create() + bm = target_sat.api.Bookmark(controller=controller, query=query).create() assert bm.controller == controller assert bm.query == query @pytest.mark.tier1 -@pytest.mark.parametrize('public', (True, False)) +@pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_public(controller, public): +def test_positive_create_public(controller, public, target_sat): """Create a public bookmark :id: 511b9bcf-0661-4e44-b1bc-475a1c207aa9 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a valid controller and public attribute True or False. 2. List the bookmarks. @@ -105,21 +97,21 @@ def test_positive_create_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=public).create() + bm = target_sat.api.Bookmark(controller=controller, public=public).create() assert bm.controller == controller assert bm.public == public @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_with_invalid_name(controller): +def test_negative_create_with_invalid_name(controller, target_sat): """Create a bookmark with invalid name :id: 9a79c561-8225-43fc-8ec7-b6858e9665e2 :parametrized: yes - :Steps: + :steps: 1. Attempt to create a bookmark with an invalid name. 2. List the bookmarks. @@ -133,21 +125,21 @@ def test_negative_create_with_invalid_name(controller): """ name = random.choice(invalid_values_list()) with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=False).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=False).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_empty_query(controller): +def test_negative_create_empty_query(controller, target_sat): """Create a bookmark with empty query :id: 674d569f-6f86-43ba-b9cc-f43e05e8ab1c :parametrized: yes - :Steps: + :steps: 1. Attempt to create a bookmark with a random name, valid controller, and empty query. 2. List the bookmarks. @@ -161,21 +153,21 @@ def test_negative_create_empty_query(controller): """ name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, query='').create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, query='').create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_same_name(controller): +def test_negative_create_same_name(controller, target_sat): """Create bookmarks with the same names :id: f78f6e97-da77-4a61-95c2-622c439d325d :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a random name and valid controller. 2. Attempt to create a second bookmark, using the same name as the previous bookmark. @@ -189,23 +181,23 @@ def test_negative_create_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() + target_sat.api.Bookmark(controller=controller, name=name).create() with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 1 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_null_public(controller): +def test_negative_create_null_public(controller, target_sat): """Create a bookmark omitting the public parameter :id: 0a4cb5ea-912b-445e-a874-b345e43d3eac :parametrized: yes - :Steps: + :steps: 1. Attempt to create a bookmark with a random name and valid controller, with public attribute set to None. @@ -222,21 +214,21 @@ def test_negative_create_null_public(controller): """ name = gen_string('alphanumeric') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=None).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=None).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_name(controller): +def test_positive_update_name(controller, target_sat): """Update a bookmark :id: 1cde270a-26fb-4cff-bdff-89fef17a7624 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a valid controller. 2. Update the bookmark with a random name. @@ -248,7 +240,7 @@ def test_positive_update_name(controller): :CaseImportance: Critical """ new_name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name bm = bm.update(['name']) assert bm.name == new_name @@ -256,14 +248,14 @@ def test_positive_update_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_same_name(controller): +def test_negative_update_same_name(controller, target_sat): """Update a bookmark with name already taken :id: 6becf121-2bea-4f7e-98f4-338bd88b8f4b :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a random name and valid controller. 2. Create a second bookmark for the same controller. @@ -276,8 +268,8 @@ def test_negative_update_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() - bm = entities.Bookmark(controller=controller).create() + target_sat.api.Bookmark(controller=controller, name=name).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.name = name with pytest.raises(HTTPError): bm.update(['name']) @@ -287,14 +279,14 @@ def test_negative_update_same_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_invalid_name(controller): +def test_negative_update_invalid_name(controller, target_sat): """Update a bookmark with an invalid name :id: 479795bb-aeed-45b3-a7e3-d3449c808087 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with valid controller. 2. Attempt to update the bookmark with an invalid name. @@ -306,7 +298,7 @@ def test_negative_update_invalid_name(controller): :CaseImportance: Critical """ new_name = random.choice(invalid_values_list()) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name with pytest.raises(HTTPError): bm.update(['name']) @@ -316,14 +308,14 @@ def test_negative_update_invalid_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_query(controller): +def test_positive_update_query(controller, target_sat): """Update a bookmark query :id: 92a31de2-bebf-4396-94f5-adf59f8d66a5 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark with a valid controller. 2. Update the bookmark's query with a random value. @@ -335,7 +327,7 @@ def test_positive_update_query(controller): :CaseImportance: Critical """ new_query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = new_query bm = bm.update(['query']) assert bm.query == new_query @@ -343,14 +335,14 @@ def test_positive_update_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_empty_query(controller): +def test_negative_update_empty_query(controller, target_sat): """Update a bookmark with an empty query :id: 948602d3-532a-47fe-b313-91e3fab809bf :parametrized: yes - :Steps: + :steps: 1. Create a bookmark for a valid controller. 2. Attempt to update the query to an empty value. @@ -361,7 +353,7 @@ def test_negative_update_empty_query(controller): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = '' with pytest.raises(HTTPError): bm.update(['query']) @@ -370,16 +362,16 @@ def test_negative_update_empty_query(controller): @pytest.mark.tier1 -@pytest.mark.parametrize('public', (True, False)) +@pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_public(controller, public): +def test_positive_update_public(controller, public, target_sat): """Update a bookmark public state to private and vice versa :id: 2717360d-37c4-4bb9-bce1-b1edabdf11b3 :parametrized: yes - :Steps: + :steps: 1. Create a bookmark for a valid controller. 2. Update the bookmark's public attribute. @@ -391,7 +383,7 @@ def test_positive_update_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=not public).create() + bm = target_sat.api.Bookmark(controller=controller, public=not public).create() assert bm.public != public bm.public = public bm = bm.update(['public']) diff --git a/tests/foreman/api/test_capsule.py b/tests/foreman/api/test_capsule.py new file mode 100644 index 00000000000..86aaa089922 --- /dev/null +++ b/tests/foreman/api/test_capsule.py @@ -0,0 +1,220 @@ +"""Tests for the ``smart_proxies`` paths. + +:Requirement: Smartproxy + +:CaseAutomation: Automated + +:CaseComponent: ForemanProxy + +:Team: Platform + +:CaseImportance: Critical + +""" +from fauxfactory import gen_string, gen_url +import pytest +from requests import HTTPError + +from robottelo.config import user_nailgun_config + + +@pytest.mark.e2e +@pytest.mark.upgrade +@pytest.mark.tier1 +def test_positive_update_capsule(request, pytestconfig, target_sat, module_capsule_configured): + """Update various capsule properties + + :id: a3d3eaa9-ed8d-42e6-9c83-20251e5ca9af + + :steps: + 1. Get deployed capsule from fixture + 2. Refresh features + 3. Update capsule organization + 4. Update capsule location + 5. Update capsule name + + :expectedresults: All capsule properties are updated + + :bz: 2077824 + + :customerscenario: true + """ + new_name = f'{gen_string("alpha")}-{module_capsule_configured.hostname}' + capsule = target_sat.api.SmartProxy().search( + query={'search': f'name = {module_capsule_configured.hostname}'} + )[0] + + # refresh features + features = capsule.refresh() + module_capsule_configured.run_installer_arg('enable-foreman-proxy-plugin-openscap') + features_new = capsule.refresh() + assert len(features_new["features"]) == len(features["features"]) + 1 + assert 'Openscap' in [feature["name"] for feature in features_new["features"]] + + # update organizations + organizations = [target_sat.api.Organization().create() for _ in range(2)] + capsule.organization = organizations + capsule = capsule.update(['organization']) + assert {org.id for org in capsule.organization} == {org.id for org in organizations} + + # update locations + locations = [target_sat.api.Location().create() for _ in range(2)] + capsule.location = locations + capsule = capsule.update(['location']) + assert {loc.id for loc in capsule.organization} == {loc.id for loc in organizations} + + # update name + capsule.name = new_name + capsule = capsule.update(['name']) + assert capsule.name == new_name + + @request.addfinalizer + def _finalize(): + # Updating the hostname back + if ( + cap := target_sat.api.SmartProxy().search(query={'search': f'name = {new_name}'}) + and pytestconfig.option.n_minus + ): + cap = cap[0] + cap.name = module_capsule_configured.hostname + cap.update(['name']) + + # serching for non-default capsule BZ#2077824 + capsules = target_sat.api.SmartProxy().search(query={'search': 'id != 1'}) + assert len(capsules) > 0 + assert capsule.url in [cps.url for cps in capsules] + assert capsule.name in [cps.name for cps in capsules] + + +@pytest.mark.skip_if_not_set('fake_capsules') +@pytest.mark.tier1 +def test_negative_create_with_url(target_sat): + """Capsule creation with random URL + + :id: e48a6260-97e0-4234-a69c-77bbbcde85d6 + + :expectedresults: Proxy is not created + """ + # Create a random proxy + with pytest.raises(HTTPError) as context: + target_sat.api.SmartProxy(url=gen_url(scheme='https')).create() + assert 'Unable to communicate' in context.value.response.text + + +@pytest.mark.skip_if_not_set('fake_capsules') +@pytest.mark.tier1 +@pytest.mark.upgrade +def test_positive_delete(target_sat): + """Capsule deletion + + :id: 872bf12e-736d-43d1-87cf-2923966b59d0 + + :expectedresults: Capsule is deleted + + :BZ: 1398695 + """ + new_port = target_sat.available_capsule_port + with target_sat.default_url_on_new_port(9090, new_port) as url: + proxy = target_sat.api.SmartProxy(url=url).create() + proxy.delete() + with pytest.raises(HTTPError): + proxy.read() + + +@pytest.mark.skip_if_not_set('fake_capsules') +@pytest.mark.tier1 +def test_positive_update_url(request, target_sat): + """Capsule url updated + + :id: 0305fd54-4e0c-4dd9-a537-d342c3dc867e + + :expectedresults: Capsule has the url updated + """ + # Create fake capsule with name + name = gen_string('alpha') + port = target_sat.available_capsule_port + with target_sat.default_url_on_new_port(9090, port) as url: + proxy = target_sat.api.SmartProxy(url=url, name=name).create() + assert proxy.name == name + # Open another tunnel to update url + new_port = target_sat.available_capsule_port + with target_sat.default_url_on_new_port(9090, new_port) as url: + proxy.url = url + proxy = proxy.update(['url']) + assert proxy.url == url + + +@pytest.mark.skip_if_not_set('fake_capsules') +@pytest.mark.tier2 +@pytest.mark.upgrade +def test_positive_import_puppet_classes( + request, + session_puppet_enabled_sat, + puppet_proxy_port_range, + module_puppet_org, + module_puppet_loc, +): + """Import puppet classes from proxy for admin and non-admin user + + :id: 385efd1b-6146-47bf-babf-0127ce5955ed + + :expectedresults: Puppet classes are imported from proxy + + :CaseComponent: Puppet + + :BZ: 1398695, 2142555 + + :customerscenario: true + """ + puppet_sat = session_puppet_enabled_sat + update_msg = ( + 'Successfully updated environment and puppetclasses from the on-disk puppet installation' + ) + no_update_msg = 'No changes to your environments detected' + # Create role, add permissions and create non-admin user + user_login = gen_string('alpha') + user_password = gen_string('alpha') + role = puppet_sat.api.Role().create() + puppet_sat.api_factory.create_role_permissions( + role, + { + 'ForemanPuppet::Puppetclass': [ + 'view_puppetclasses', + 'create_puppetclasses', + 'import_puppetclasses', + ] + }, + ) + user = puppet_sat.api.User( + role=[role], + admin=True, + login=user_login, + password=user_password, + organization=[module_puppet_org], + location=[module_puppet_loc], + ).create() + request.addfinalizer(user.delete) + request.addfinalizer(role.delete) + + new_port = puppet_sat.available_capsule_port + with puppet_sat.default_url_on_new_port(9090, new_port) as url: + proxy = puppet_sat.api.SmartProxy(url=url).create() + + result = proxy.import_puppetclasses() + assert result['message'] in [update_msg, no_update_msg] + # Import puppetclasses with environment + result = proxy.import_puppetclasses(environment='production') + assert result['message'] in [update_msg, no_update_msg] + + # Non-Admin user with access to import_puppetclasses + user_cfg = user_nailgun_config(user_login, user_password) + user_cfg.url = f'https://{puppet_sat.hostname}' + user_proxy = puppet_sat.api.SmartProxy(server_config=user_cfg, id=proxy.id).read() + + result = user_proxy.import_puppetclasses() + assert result['message'] in [update_msg, no_update_msg] + # Import puppetclasses with environment + result = user_proxy.import_puppetclasses(environment='production') + assert result['message'] in [update_msg, no_update_msg] + + request.addfinalizer(puppet_sat.api.SmartProxy(id=proxy.id).delete) diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index b75784f6ed5..3fc0f8e7e3f 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -5,34 +5,46 @@ :CaseAutomation: Automated -:CaseLevel: System - :CaseComponent: Capsule-Content :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ + +from datetime import datetime, timedelta import re -from datetime import datetime from time import sleep -import pytest from nailgun import client -from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout +import pytest -from robottelo import constants from robottelo.config import settings -from robottelo.constants import DataFile +from robottelo.constants import ( + CONTAINER_CLIENTS, + ENVIRONMENT, + FAKE_1_YUM_REPOS_COUNT, + FAKE_3_YUM_REPO_RPMS, + FAKE_3_YUM_REPOS_COUNT, + FAKE_FILE_LARGE_COUNT, + FAKE_FILE_LARGE_URL, + FAKE_FILE_NEW_NAME, + KICKSTART_CONTENT, + PRDS, + REPOS, + REPOSET, + RH_CONTAINER_REGISTRY_HUB, + RPM_TO_UPLOAD, + DataFile, +) from robottelo.constants.repos import ANSIBLE_GALAXY -from robottelo.content_info import get_repo_files_by_url -from robottelo.content_info import get_repomd -from robottelo.content_info import get_repomd_revision +from robottelo.content_info import ( + get_repo_files_by_url, + get_repomd, + get_repomd_revision, +) from robottelo.utils.issue_handlers import is_open @@ -42,42 +54,15 @@ class TestCapsuleContentManagement: interactions and use capsule. """ - def update_capsule_download_policy(self, module_capsule_configured, download_policy): - """Updates capsule's download policy to desired value""" - proxy = entities.SmartProxy(id=module_capsule_configured.nailgun_capsule.id).read() - proxy.download_policy = download_policy - proxy.update(['download_policy']) - - @pytest.mark.tier3 - @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') - def test_positive_insights_puppet_package_availability(self, module_capsule_configured): - """Check `redhat-access-insights-puppet` package availability for - capsule - - :BZ: 1315844 - - :id: a31b0e21-aa5d-44e2-a408-5e01b79db3a1 - - :CaseComponent: RHCloud-Insights - - :Team: Platform - - :customerscenario: true - - :expectedresults: `redhat-access-insights-puppet` package is delivered - in capsule repo and is available for installation on capsule via - yum - """ - package_name = 'redhat-access-insights-puppet' - result = module_capsule_configured.run(f'yum list {package_name} | grep @capsule') - if result.status != 0: - result = module_capsule_configured.run(f'yum list available | grep {package_name}') - assert result.status == 0 - @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_uploaded_content_library_sync( - self, module_capsule_configured, function_org, function_product, function_lce_library + self, + module_capsule_configured, + function_org, + function_product, + function_lce_library, + target_sat, ): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule @@ -91,7 +76,7 @@ def test_positive_uploaded_content_library_sync( :expectedresults: custom content is present on external capsule """ - repo = entities.Repository(product=function_product, url=None).create() + repo = target_sat.api.Repository(product=function_product, url=None).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce_library.id} @@ -102,7 +87,7 @@ def test_positive_uploaded_content_library_sync( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Upload custom content into the repo with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -110,14 +95,14 @@ def test_positive_uploaded_content_library_sync( assert repo.read().content_counts['rpm'] == 1 + timestamp = datetime.utcnow().replace(microsecond=0) # Publish new version of the content view cv.publish() + # query sync status as publish invokes sync, task succeeds + module_capsule_configured.wait_for_sync(start_time=timestamp) cv = cv.read() - assert len(cv.version) == 1 - module_capsule_configured.wait_for_sync() - # Verify the RPM published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -128,12 +113,12 @@ def test_positive_uploaded_content_library_sync( ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 - assert caps_files[0] == constants.RPM_TO_UPLOAD + assert caps_files[0] == RPM_TO_UPLOAD @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_checksum_sync( - self, module_capsule_configured, function_org, function_product, function_lce + self, module_capsule_configured, function_org, function_product, function_lce, target_sat ): """Synchronize repository to capsule, update repository's checksum type, trigger capsule sync and make sure checksum type was updated on @@ -151,7 +136,7 @@ def test_positive_checksum_sync( :CaseImportance: Critical """ # Create repository with sha256 checksum type - repo = entities.Repository( + repo = target_sat.api.Repository( product=function_product, checksum_type='sha256', mirroring_policy='additive', @@ -167,22 +152,22 @@ def test_positive_checksum_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync, publish and promote a repo - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() repo.sync() repo = repo.read() cv.publish() cv = cv.read() - assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() + # promote to capsule lce, invoking sync tasks cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type is sha256, not sha1 on capsule repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -204,18 +189,17 @@ def test_positive_checksum_sync( repo.sync() cv.publish() cv = cv.read() - assert len(cv.version) == 2 cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type has updated to sha1 on capsule repomd = get_repomd(repo_url) checksum_types = re.findall(r'(?<=checksum type=").*?(?=")', repomd) @@ -227,7 +211,12 @@ def test_positive_checksum_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_updated_repo( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Sync a custom repo with no upstream url but uploaded content to the Capsule via promoted CV, update content of the repo, publish and promote the CV again, resync @@ -255,7 +244,7 @@ def test_positive_sync_updated_repo( :BZ: 2025494 """ - repo = entities.Repository(url=None, product=function_product).create() + repo = target_sat.api.Repository(url=None, product=function_product).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -273,18 +262,19 @@ def test_positive_sync_updated_repo( assert repo.read().content_counts['rpm'] == 1 # Create, publish and promote CV with the repository to the Capsule's LCE - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Upload more content to the repository with open(DataFile.SRPM_TO_UPLOAD, 'rb') as handle: repo.upload_content(files={'content': handle}) @@ -296,13 +286,15 @@ def test_positive_sync_updated_repo( cv = cv.read() assert len(cv.version) == 2 + cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check the content is synced on the Capsule side properly sat_repo_url = target_sat.get_published_repo_url( org=function_org.label, @@ -327,7 +319,12 @@ def test_positive_sync_updated_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_capsule_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create repository, add it to lifecycle environment, assign lifecycle environment with a capsule, sync repository, sync it once again, update @@ -351,7 +348,7 @@ def test_positive_capsule_sync( capsule """ repo_url = settings.repos.yum_1.url - repo = entities.Repository(product=function_product, url=repo_url).create() + repo = target_sat.api.Repository(product=function_product, url=repo_url).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce.id} @@ -362,21 +359,29 @@ def test_positive_capsule_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() - assert len(cv.version) == 1 - cvv = cv.version[-1].read() - # Promote content view to lifecycle environment + + # prior to trigger (promoting), assert no active sync tasks + active_tasks = module_capsule_configured.nailgun_capsule.content_get_sync( + synchronous=False, timeout=600 + )['active_sync_tasks'] + assert len(active_tasks) == 0 + + # Promote content view to lifecycle environment, + # invoking capsule sync task(s) + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + # wait for and validate the invoked task(s) + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() - assert len(cvv.environment) == 2 # Content of the published content view in @@ -384,8 +389,6 @@ def test_positive_capsule_sync( # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in repository on satellite sat_repo_url = target_sat.get_published_repo_url( @@ -420,14 +423,14 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() - # Promote new content view version to lifecycle environment + # Promote new content view version to lifecycle environment, + # capsule sync task(s) invoked and succeed + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - - module_capsule_configured.wait_for_sync() - # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision(caps_repo_url) @@ -443,21 +446,22 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that packages count in the repository is updated - assert repo.content_counts['rpm'] == (constants.FAKE_1_YUM_REPOS_COUNT + 1) + assert repo.content_counts['rpm'] == (FAKE_1_YUM_REPOS_COUNT + 1) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in the repository sat_files = get_repo_files_by_url(sat_repo_url) @@ -467,7 +471,7 @@ def test_positive_capsule_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients') def test_positive_iso_library_sync( - self, module_capsule_configured, module_entitlement_manifest_org, module_target_sat + self, module_capsule_configured, module_sca_manifest_org, module_target_sat ): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically @@ -483,18 +487,18 @@ def test_positive_iso_library_sync( # Enable & sync RH repository with ISOs rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhsc'], - repo=constants.REPOS['rhsc7_iso']['name'], - reposet=constants.REPOSET['rhsc7_iso'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhsc'], + repo=REPOS['rhsc7_iso']['name'], + reposet=REPOSET['rhsc7_iso'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization - lce = entities.LifecycleEnvironment(organization=module_entitlement_manifest_org).search( - query={'search': f'name={constants.ENVIRONMENT}'} - )[0] + lce = module_target_sat.api.LifecycleEnvironment( + organization=module_sca_manifest_org + ).search(query={'search': f'name={ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -506,24 +510,24 @@ def test_positive_iso_library_sync( assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView( - organization=module_entitlement_manifest_org, repository=[rh_repo] + cv = module_target_sat.api.ContentView( + organization=module_sca_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view + timestamp = datetime.utcnow() cv.publish() - cv = cv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cv = cv.read() assert len(cv.version) == 1 # Verify ISOs are present on satellite sat_isos = get_repo_files_by_url(rh_repo.full_path, extension='iso') assert len(sat_isos) == 4 - module_capsule_configured.wait_for_sync() - # Verify all the ISOs are present on capsule caps_path = ( - f'{module_capsule_configured.url}/pulp/content/{module_entitlement_manifest_org.label}' + f'{module_capsule_configured.url}/pulp/content/{module_sca_manifest_org.label}' f'/{lce.label}/{cv.label}/content/dist/rhel/server/7/7Server/x86_64/sat-capsule/6.4/' 'iso/' ) @@ -534,7 +538,12 @@ def test_positive_iso_library_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_on_demand_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with 'on_demand' policy, add it to a CV, promote to an 'on_demand' Capsule's LCE, download a published package, @@ -551,9 +560,9 @@ def test_positive_on_demand_sync( the original package from the upstream repo """ repo_url = settings.repos.yum_3.url - packages_count = constants.FAKE_3_YUM_REPOS_COUNT - package = constants.FAKE_3_YUM_REPO_RPMS[0] - repo = entities.Repository( + packages_count = FAKE_3_YUM_REPOS_COUNT + package = FAKE_3_YUM_REPO_RPMS[0] + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, @@ -569,10 +578,10 @@ def test_positive_on_demand_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Update capsule's download policy to on_demand - self.update_capsule_download_policy(module_capsule_configured, 'on_demand') + module_capsule_configured.update_download_policy('on_demand') # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -584,13 +593,13 @@ def test_positive_on_demand_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify packages on Capsule match the source caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -614,7 +623,12 @@ def test_positive_on_demand_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_update_with_immediate_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once @@ -630,15 +644,15 @@ def test_positive_update_with_immediate_sync( filesystem contains valid links to packages """ repo_url = settings.repos.yum_1.url - packages_count = constants.FAKE_1_YUM_REPOS_COUNT - repo = entities.Repository( + packages_count = FAKE_1_YUM_REPOS_COUNT + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, url=repo_url, ).create() # Update capsule's download policy to on_demand to match repository's policy - self.update_capsule_download_policy(module_capsule_configured, 'on_demand') + module_capsule_configured.update_download_policy('on_demand') # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce.id} @@ -649,7 +663,7 @@ def test_positive_update_with_immediate_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -661,13 +675,13 @@ def test_positive_update_with_immediate_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) @@ -675,7 +689,7 @@ def test_positive_update_with_immediate_sync( assert repo.download_policy == 'immediate' # Update capsule's download policy as well - self.update_capsule_download_policy(module_capsule_configured, 'immediate') + module_capsule_configured.update_download_policy('immediate') # Sync repository once again repo.sync() @@ -689,13 +703,13 @@ def test_positive_update_with_immediate_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify the count of RPMs published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -736,7 +750,7 @@ def test_positive_capsule_pub_url_accessible(self, module_capsule_configured): @pytest.mark.skip_if_not_set('capsule', 'clients') @pytest.mark.parametrize('distro', ['rhel7', 'rhel8_bos', 'rhel9_bos']) def test_positive_sync_kickstart_repo( - self, target_sat, module_capsule_configured, function_entitlement_manifest_org, distro + self, target_sat, module_capsule_configured, function_sca_manifest_org, distro ): """Sync kickstart repository to the capsule. @@ -757,14 +771,14 @@ def test_positive_sync_kickstart_repo( """ repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['kickstart'][distro]['product'], - reposet=constants.REPOS['kickstart'][distro]['reposet'], - repo=constants.REPOS['kickstart'][distro]['name'], - releasever=constants.REPOS['kickstart'][distro]['version'], + org_id=function_sca_manifest_org.id, + product=REPOS['kickstart'][distro]['product'], + reposet=REPOS['kickstart'][distro]['reposet'], + repo=REPOS['kickstart'][distro]['name'], + releasever=REPOS['kickstart'][distro]['version'], ) - repo = entities.Repository(id=repo_id).read() - lce = entities.LifecycleEnvironment(organization=function_entitlement_manifest_org).create() + repo = target_sat.api.Repository(id=repo_id).read() + lce = target_sat.api.LifecycleEnvironment(organization=function_sca_manifest_org).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -775,11 +789,11 @@ def test_positive_sync_kickstart_repo( assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Update capsule's download policy to on_demand - self.update_capsule_download_policy(module_capsule_configured, 'on_demand') + module_capsule_configured.update_download_policy('on_demand') # Create a content view with the repository - cv = entities.ContentView( - organization=function_entitlement_manifest_org, repository=[repo] + cv = target_sat.api.ContentView( + organization=function_sca_manifest_org, repository=[repo] ).create() # Sync repository repo.sync(timeout='10m') @@ -792,26 +806,26 @@ def test_positive_sync_kickstart_repo( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check for kickstart content on SAT and CAPS tail = ( - f'rhel/server/7/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' + f'rhel/server/7/{REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' if distro == 'rhel7' - else f'{distro.split("_")[0]}/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 + else f'{distro.split("_")[0]}/{REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 ) url_base = ( - f'pulp/content/{function_entitlement_manifest_org.label}/{lce.label}/{cv.label}/' + f'pulp/content/{function_sca_manifest_org.label}/{lce.label}/{cv.label}/' f'content/dist/{tail}' ) # Check kickstart specific files - for file in constants.KICKSTART_CONTENT: + for file in KICKSTART_CONTENT: sat_file = target_sat.md5_by_url(f'{target_sat.url}/{url_base}/{file}') caps_file = target_sat.md5_by_url(f'{module_capsule_configured.url}/{url_base}/{file}') assert sat_file == caps_file @@ -866,7 +880,7 @@ def test_positive_sync_container_repo_end_to_end( repos = [] for ups_name in upstream_names: - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='docker', docker_upstream_name=ups_name, product=function_product, @@ -884,19 +898,20 @@ def test_positive_sync_container_repo_end_to_end( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=repos).create() + cv = target_sat.api.ContentView(organization=function_org, repository=repos).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Pull the images from capsule to the content host repo_paths = [ ( @@ -906,7 +921,7 @@ def test_positive_sync_container_repo_end_to_end( for repo in repos ] - for con_client in constants.CONTAINER_CLIENTS: + for con_client in CONTAINER_CLIENTS: result = container_contenthost.execute( f'{con_client} login -u {settings.server.admin_username}' f' -p {settings.server.admin_password} {module_capsule_configured.hostname}' @@ -956,14 +971,13 @@ def test_positive_sync_container_repo_end_to_end( ) assert result.status == 0 - @pytest.mark.skip_if_open("BZ:2121583") @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_collection_repo( self, + request, target_sat, module_capsule_configured, - rhel7_contenthost, function_product, function_lce_library, ): @@ -994,7 +1008,7 @@ def test_positive_sync_collection_repo( - name: theforeman.operations version: "0.1.0" ''' - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='ansible_collection', ansible_collection_requirements=requirements, product=function_product, @@ -1010,25 +1024,16 @@ def test_positive_sync_collection_repo( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync the repo + timestamp = datetime.utcnow() repo.sync(timeout=600) + + module_capsule_configured.wait_for_sync(start_time=timestamp) repo = repo.read() assert repo.content_counts['ansible_collection'] == 2 - module_capsule_configured.wait_for_sync() - - # Configure the content host to fetch collections from capsule - rhel7_contenthost.install_katello_ca(module_capsule_configured) - rhel7_contenthost.create_custom_repos( - **{ - 'server': settings.repos.rhel7_os, - 'ansible': settings.repos.ansible_repo, - } - ) - result = rhel7_contenthost.execute('yum -y install ansible') - assert result.status == 0 - repo_path = repo.full_path.replace(target_sat.hostname, module_capsule_configured.hostname) coll_path = './collections' + cfg_path = './ansible.cfg' cfg = ( '[defaults]\n' f'collections_paths = {coll_path}\n\n' @@ -1037,16 +1042,18 @@ def test_positive_sync_collection_repo( '[galaxy_server.capsule_galaxy]\n' f'url={repo_path}\n' ) - rhel7_contenthost.execute(f'echo "{cfg}" > ./ansible.cfg') + + request.addfinalizer(lambda: target_sat.execute(f'rm -rf {cfg_path} {coll_path}')) # Try to install collections from the Capsule - result = rhel7_contenthost.execute( + target_sat.execute(f'echo "{cfg}" > {cfg_path}') + result = target_sat.execute( 'ansible-galaxy collection install theforeman.foreman theforeman.operations' ) assert result.status == 0 assert 'error' not in result.stdout.lower() - result = rhel7_contenthost.execute(f'ls {coll_path}/ansible_collections/theforeman/') + result = target_sat.execute(f'ls {coll_path}/ansible_collections/theforeman/') assert result.status == 0 assert 'foreman' in result.stdout assert 'operations' in result.stdout @@ -1074,10 +1081,10 @@ def test_positive_sync_file_repo( :BZ: 1985122 """ - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='file', product=function_product, - url=constants.FAKE_FILE_LARGE_URL, + url=FAKE_FILE_LARGE_URL, ).create() repo.sync() @@ -1094,19 +1101,20 @@ def test_positive_sync_file_repo( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Run one more sync, check for status (BZ#1985122) sync_status = module_capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success' @@ -1128,8 +1136,8 @@ def test_positive_sync_file_repo( ) sat_files = get_repo_files_by_url(sat_repo_url, extension='iso') caps_files = get_repo_files_by_url(caps_repo_url, extension='iso') - assert len(sat_files) == len(caps_files) == constants.FAKE_FILE_LARGE_COUNT + 1 - assert constants.FAKE_FILE_NEW_NAME in caps_files + assert len(sat_files) == len(caps_files) == FAKE_FILE_LARGE_COUNT + 1 + assert FAKE_FILE_NEW_NAME in caps_files assert sat_files == caps_files for file in sat_files: @@ -1140,7 +1148,7 @@ def test_positive_sync_file_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_CV_to_multiple_LCEs( - self, target_sat, module_capsule_configured, module_manifest_org + self, target_sat, module_capsule_configured, module_sca_manifest_org ): """Synchronize a CV to multiple LCEs at the same time. All sync tasks should succeed. @@ -1165,19 +1173,19 @@ def test_positive_sync_CV_to_multiple_LCEs( # Sync a repository to the Satellite. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_manifest_org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhel7_extra']['name'], - reposet=constants.REPOSET['rhel7_extra'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhel'], + repo=REPOS['rhel7_extra']['name'], + reposet=REPOSET['rhel7_extra'], releasever=None, ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() # Create two LCEs, assign them to the Capsule. - lce1 = target_sat.api.LifecycleEnvironment(organization=module_manifest_org).create() + lce1 = target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() lce2 = target_sat.api.LifecycleEnvironment( - organization=module_manifest_org, prior=lce1 + organization=module_sca_manifest_org, prior=lce1 ).create() module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': [lce1.id, lce2.id]} @@ -1185,11 +1193,11 @@ def test_positive_sync_CV_to_multiple_LCEs( result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() # there can and will be LCEs from other tests and orgs, but len() >= 2 assert len(result['results']) >= 2 - assert lce1.id and lce2.id in [capsule_lce['id'] for capsule_lce in result['results']] + assert {lce1.id, lce2.id}.issubset([capsule_lce['id'] for capsule_lce in result['results']]) # Create a Content View, add the repository and publish it. cv = target_sat.api.ContentView( - organization=module_manifest_org, repository=[repo] + organization=module_sca_manifest_org, repository=[repo] ).create() cv.publish() cv = cv.read() @@ -1197,16 +1205,20 @@ def test_positive_sync_CV_to_multiple_LCEs( # Promote the CV to both Capsule's LCEs without waiting for Capsule sync task completion. cvv = cv.version[-1].read() + assert len(cvv.environment) == 1 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce1.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce2.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 3 - # Check all sync tasks finished without errors. - module_capsule_configured.wait_for_sync() - @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_capsule_sync_status_persists( @@ -1224,7 +1236,7 @@ def test_positive_capsule_sync_status_persists( 4. Publish CV 5. Promote to lifecycle env 6. Sync Capsule - 7. Delete the task using foreman-rake console + 7. Delete all sync tasks using foreman-rake console 8. Verify the status of capsule is still synced :bz: 1956985 @@ -1247,32 +1259,198 @@ def test_positive_capsule_sync_status_persists( cv = cv.read() cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() - timestamp = (datetime.utcnow()).strftime('%Y-%m-%d %H:%M') - module_capsule_configured.wait_for_sync() + module_capsule_configured.wait_for_sync(start_time=timestamp) - search_result = target_sat.wait_for_tasks( - search_query='label = Actions::Katello::CapsuleContent::Sync' - f' and organization_id = {function_org.id}' - f' and started_at >= "{timestamp}"', - search_rate=15, - max_tries=5, - ) - # Delete the task using UUID (search_result[0].id) + # Delete all capsule sync tasks so that we fall back for audits. task_result = target_sat.execute( - f"""echo "ForemanTasks::Task.find( - '{search_result[0].id}').destroy!" | foreman-rake console""" + """echo "ForemanTasks::Task.where(action:'Synchronize capsule """ + f"""\\'{module_capsule_configured.hostname}\\'').delete_all" | foreman-rake console""" ) assert task_result.status == 0 - # Ensure task record was deleted. + + # Ensure task records were deleted. task_result = target_sat.execute( - f"""echo "ForemanTasks::Task.find('{search_result[0].id}')" | foreman-rake console""" + """echo "ForemanTasks::Task.where(action:'Synchronize capsule """ + f"""\\'{module_capsule_configured.hostname}\\'')" | foreman-rake console""" ) assert task_result.status == 0 - assert 'RecordNotFound' in task_result.stdout + assert '[]' in task_result.stdout # Check sync status again, and ensure last_sync_time is still correct sync_status = module_capsule_configured.nailgun_capsule.content_get_sync() - assert sync_status['last_sync_time'] >= timestamp + assert ( + datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') >= timestamp + ) + + @pytest.mark.tier4 + @pytest.mark.skip_if_not_set('capsule') + def test_positive_remove_capsule_orphans( + self, + target_sat, + pytestconfig, + capsule_configured, + function_sca_manifest_org, + function_lce_library, + ): + """Synchronize RPM content to the capsule, disassociate the capsule form the content + source and resync, run orphan cleanup and ensure the RPM artifacts were removed. + + :id: 7089a36e-ea68-47ad-86ac-9945b732b0c4 + + :setup: + 1. A blank external capsule that has not been synced yet with immediate download policy. + + :steps: + 1. Enable RHST repo and sync it to the Library LCE. + 2. Set immediate download policy to the capsule, assign it the Library LCE and sync it. + Ensure the RPM artifacts were created. + 3. Remove the Library LCE from the capsule and resync it. + 4. Run orphan cleanup for the capsule. + 5. Ensure the artifacts were removed. + + :expectedresults: + 1. RPM artifacts are created after capsule sync. + 2. RPM artifacts are removed after orphan cleanup. + + :customerscenario: true + + :BZ: 22043089, 2211962 + + """ + if pytestconfig.option.n_minus: + pytest.skip('Test cannot be run on n-minus setups session-scoped capsule') + # Enable RHST repo and sync it to the Library LCE. + repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch='x86_64', + org_id=function_sca_manifest_org.id, + product=REPOS['rhst8']['product'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], + ) + repo = target_sat.api.Repository(id=repo_id).read() + repo.sync() + + # Set immediate download policy to the capsule, assign it the Library LCE and sync it. + proxy = capsule_configured.nailgun_smart_proxy.read() + proxy.download_policy = 'immediate' + proxy.update(['download_policy']) + + capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': function_lce_library.id} + ) + result = capsule_configured.nailgun_capsule.content_lifecycle_environments() + assert len(result['results']) == 1 + assert result['results'][0]['id'] == function_lce_library.id + + sync_status = capsule_configured.nailgun_capsule.content_sync() + assert sync_status['result'] == 'success', 'Capsule sync task failed.' + + # Ensure the RPM artifacts were created. + result = capsule_configured.execute( + 'ls /var/lib/pulp/media/artifact/*/* | xargs file | grep RPM' + ) + assert not result.status, 'RPM artifacts are missing after capsule sync.' + + # Remove the Library LCE from the capsule and resync it. + capsule_configured.nailgun_capsule.content_delete_lifecycle_environment( + data={'environment_id': function_lce_library.id} + ) + sync_status = capsule_configured.nailgun_capsule.content_sync() + assert sync_status['result'] == 'success', 'Capsule sync task failed.' + + # datetime string (local time) to search for proper task. + timestamp = (datetime.now().replace(microsecond=0) - timedelta(seconds=1)).strftime( + '%B %d, %Y at %I:%M:%S %p' + ) + # Run orphan cleanup for the capsule. + target_sat.execute( + 'foreman-rake katello:delete_orphaned_content RAILS_ENV=production ' + f'SMART_PROXY_ID={capsule_configured.nailgun_capsule.id}' + ) + target_sat.wait_for_tasks( + search_query=( + 'label = Actions::Katello::OrphanCleanup::RemoveOrphans' + f' and started_at >= "{timestamp}"' + ), + search_rate=5, + max_tries=10, + ) + + # Ensure the artifacts were removed. + result = capsule_configured.execute( + 'ls /var/lib/pulp/media/artifact/*/* | xargs file | grep RPM' + ) + assert result.status, 'RPM artifacts are still present. They should be gone.' + + @pytest.mark.skip_if_not_set('capsule') + def test_positive_capsule_sync_openstack_container_repos( + self, + module_target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, + ): + """Synchronize openstack container repositories to capsule + + :id: 23e64385-7f34-4ab9-bd63-72306e5a4de0 + + :setup: + 1. A blank external capsule that has not been synced yet. + + :steps: + 1. Enable and sync openstack container repos. + + :expectedresults: + 1. container repos should sync on capsule. + + :customerscenario: true + + :BZ: 2154734 + + """ + upstream_names = [ + 'rhosp13/openstack-cinder-api', + 'rhosp13/openstack-neutron-server', + 'rhosp13/openstack-neutron-dhcp-agent', + 'rhosp13/openstack-nova-api', + ] + repos = [] + + for ups_name in upstream_names: + repo = module_target_sat.api.Repository( + content_type='docker', + docker_upstream_name=ups_name, + product=function_product, + url=RH_CONTAINER_REGISTRY_HUB, + upstream_username=settings.subscription.rhn_username, + upstream_password=settings.subscription.rhn_password, + ).create() + repo.sync(timeout=1800) + repos.append(repo) + + # Associate LCE with the capsule + module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': function_lce.id} + ) + result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() + assert len(result['results']) + assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] + + # Create and publish a content view with all repositories + cv = module_target_sat.api.ContentView(organization=function_org, repository=repos).create() + cv.publish() + cv = cv.read() + assert len(cv.version) == 1 + + # Promote the latest CV version into capsule's LCE + cvv = cv.version[-1].read() + timestamp = datetime.utcnow() + cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() + assert len(cvv.environment) == 2 diff --git a/tests/foreman/api/test_classparameters.py b/tests/foreman/api/test_classparameters.py index fb4d3da4f2f..931a11edec2 100644 --- a/tests/foreman/api/test_classparameters.py +++ b/tests/foreman/api/test_classparameters.py @@ -4,30 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Puppet :CaseImportance: Medium :Team: Rocket -:TestType: Functional - -:Upstream: No """ import json from random import choice +from fauxfactory import gen_boolean, gen_integer, gen_string import pytest -from fauxfactory import gen_boolean -from fauxfactory import gen_integer -from fauxfactory import gen_string from requests import HTTPError from robottelo.config import settings -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import parametrized +from robottelo.utils.datafactory import filtered_datapoint, parametrized @filtered_datapoint @@ -140,10 +132,10 @@ def test_negative_update_parameter_type(self, test_data, module_puppet): 2. Error raised for invalid default value. """ sc_param = module_puppet['sc_params'].pop() + sc_param.override = True + sc_param.parameter_type = test_data['sc_type'] + sc_param.default_value = test_data['value'] with pytest.raises(HTTPError) as context: - sc_param.override = True - sc_param.parameter_type = test_data['sc_type'] - sc_param.default_value = test_data['value'] sc_param.update(['override', 'parameter_type', 'default_value']) assert sc_param.read().default_value != test_data['value'] assert 'Validation failed: Default value is invalid' in context.value.response.text @@ -373,9 +365,9 @@ def test_negative_validate_matcher_and_default_value( session_puppet_enabled_sat.api.OverrideValue( smart_class_parameter=sc_param, match='domain=example.com', value=gen_string('alpha') ).create() + sc_param.parameter_type = 'boolean' + sc_param.default_value = gen_string('alpha') with pytest.raises(HTTPError) as context: - sc_param.parameter_type = 'boolean' - sc_param.default_value = gen_string('alpha') sc_param.update(['parameter_type', 'default_value']) assert ( 'Validation failed: Default value is invalid, Lookup values is invalid' diff --git a/tests/foreman/api/test_computeprofile.py b/tests/foreman/api/test_computeprofile.py index 4cb695a9290..6d374bc8b34 100644 --- a/tests/foreman/api/test_computeprofile.py +++ b/tests/foreman/api/test_computeprofile.py @@ -4,30 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create new Compute Profile using different names :id: 97d04911-9368-4674-92c7-1e3ff114bc18 @@ -36,17 +32,15 @@ def test_positive_create_with_name(name): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - profile = entities.ComputeProfile(name=name).create() + profile = target_sat.api.ComputeProfile(name=name).create() assert name == profile.name @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create(name): +def test_negative_create(name, target_sat): """Attempt to create Compute Profile using invalid names only :id: 2d34a1fd-70a5-4e59-b2e2-86fbfe8e31ab @@ -55,17 +49,15 @@ def test_negative_create(name): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ with pytest.raises(HTTPError): - entities.ComputeProfile(name=name).create() + target_sat.api.ComputeProfile(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat): """Update selected Compute Profile entity using proper names :id: c79193d7-2e0f-4ed9-b947-05feeddabfda @@ -74,19 +66,17 @@ def test_positive_update_name(new_name): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - profile = entities.ComputeProfile().create() - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + profile = target_sat.api.ComputeProfile().create() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name == updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat): """Attempt to update Compute Profile entity using invalid names only :id: 042b40d5-a78b-4e65-b5cb-5b270b800b37 @@ -95,20 +85,18 @@ def test_negative_update_name(new_name): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - profile = entities.ComputeProfile().create() + profile = target_sat.api.ComputeProfile().create() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name != updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_delete(new_name): +def test_positive_delete(new_name, target_sat): """Delete Compute Profile entity :id: 0a620e23-7ba6-4178-af7a-fd1e332f478f @@ -117,11 +105,9 @@ def test_positive_delete(new_name): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - profile = entities.ComputeProfile(name=new_name).create() + profile = target_sat.api.ComputeProfile(name=new_name).create() profile.delete() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id).read() diff --git a/tests/foreman/api/test_computeresource_azurerm.py b/tests/foreman/api/test_computeresource_azurerm.py index 89501467e70..47bf8313066 100644 --- a/tests/foreman/api/test_computeresource_azurerm.py +++ b/tests/foreman/api/test_computeresource_azurerm.py @@ -4,28 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ComputeResources-Azure :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings -from robottelo.constants import AZURERM_FILE_URI -from robottelo.constants import AZURERM_PLATFORM_DEFAULT -from robottelo.constants import AZURERM_PREMIUM_OS_Disk -from robottelo.constants import AZURERM_RHEL7_FT_CUSTOM_IMG_URN -from robottelo.constants import AZURERM_RHEL7_UD_IMG_URN -from robottelo.constants import AZURERM_VM_SIZE_DEFAULT +from robottelo.constants import ( + AZURERM_FILE_URI, + AZURERM_PLATFORM_DEFAULT, + AZURERM_RHEL7_FT_CUSTOM_IMG_URN, + AZURERM_RHEL7_UD_IMG_URN, + AZURERM_VM_SIZE_DEFAULT, + AZURERM_PREMIUM_OS_Disk, +) class TestAzureRMComputeResourceTestCase: @@ -51,7 +48,6 @@ def test_positive_crud_azurerm_cr( :CaseImportance: Critical - :CaseLevel: Component """ # Create CR cr_name = gen_string('alpha') @@ -106,7 +102,6 @@ def test_positive_create_cloud_init_image( :expectedresults: Cloud init image should be added in AzureRM CR along with username - :CaseLevel: Integration """ assert module_azurerm_cloudimg.architecture.id == sat_azure_default_architecture.id @@ -126,7 +121,6 @@ def test_positive_check_available_networks(self, azurermclient, module_azurerm_c :expectedresults: All the networks from AzureRM CR should be available. - :CaseLevel: Integration """ cr_nws = module_azurerm_cr.available_networks() portal_nws = azurermclient.list_network() @@ -141,8 +135,6 @@ def test_gov_cloud_regions_from_azure_compute_resources(self): :CaseImportance: Medium - :CaseLevel: Acceptance - :CaseAutomation: ManualOnly :steps: @@ -265,9 +257,7 @@ def test_positive_azurerm_host_provisioned(self, class_host_ft, azureclient_host :id: ff27905f-fa3c-43ac-b969-9525b32f75f5 - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a AzureRM Compute Resource and provision host. @@ -298,8 +288,6 @@ def test_positive_azurerm_host_power_on_off(self, class_host_ft, azureclient_hos :id: 9ced29d7-d866-4d0c-ac27-78753b5b5a94 - :CaseLevel: System - :steps: 1. Create a AzureRM Compute Resource. 2. Provision a Host on Azure Cloud using above CR. @@ -419,9 +407,7 @@ def test_positive_azurerm_ud_host_provisioned(self, class_host_ud, azureclient_h :id: df496d7c-3443-4afe-b807-5bbfc90e866e - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a AzureRM Compute Resource and provision host. @@ -572,8 +558,6 @@ def test_positive_azurerm_custom_image_host_provisioned( :id: b5be5128-ad49-4dbd-a660-3e38ce012327 - :CaseLevel: System - :CaseImportance: Critical :steps: @@ -596,4 +580,4 @@ def test_positive_azurerm_custom_image_host_provisioned( # Azure cloud assert self.hostname.lower() == azureclient_host.name - assert AZURERM_VM_SIZE_DEFAULT == azureclient_host.type + assert azureclient_host.type == AZURERM_VM_SIZE_DEFAULT diff --git a/tests/foreman/api/test_computeresource_gce.py b/tests/foreman/api/test_computeresource_gce.py index 74a532ccb47..2d088f4f2a2 100644 --- a/tests/foreman/api/test_computeresource_gce.py +++ b/tests/foreman/api/test_computeresource_gce.py @@ -7,27 +7,20 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-GCE :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings -from robottelo.constants import VALID_GCE_ZONES - -RHEL_CLOUD_PROJECTS = ['rhel-cloud', 'rhel-sap-cloud'] +from robottelo.constants import GCE_RHEL_CLOUD_PROJECTS, VALID_GCE_ZONES @pytest.mark.skip_if_not_set('gce') @@ -45,7 +38,6 @@ def test_positive_crud_gce_cr(self, sat_gce, sat_gce_org, sat_gce_loc, gce_cert) :CaseImportance: Critical - :CaseLevel: Component """ cr_name = gen_string('alpha') # Testing Create @@ -85,11 +77,10 @@ def test_positive_check_available_images(self, module_gce_compute, googleclient) :expectedresults: RHEL images from GCP are available to select in GCE CR - :CaseLevel: Integration """ satgce_images = module_gce_compute.available_images() googleclient_images = googleclient.list_templates( - include_public=True, public_projects=RHEL_CLOUD_PROJECTS + include_public=True, public_projects=GCE_RHEL_CLOUD_PROJECTS ) googleclient_image_names = [img.name for img in googleclient_images] # Validating GCE_CR images in Google CR @@ -107,7 +98,6 @@ def test_positive_check_available_networks(self, module_gce_compute, googleclien :expectedresults: All the networks from Google CR should be available to select in GCE CR - :CaseLevel: Integration """ gceavailable_networks = module_gce_compute.available_networks() satgce_networks = [net['name'] for net in gceavailable_networks['results']] @@ -124,7 +114,6 @@ def test_positive_check_available_flavors(self, module_gce_compute): :expectedresults: All the flavors from Google CR should be available to select in GCE Host - :CaseLevel: Integration """ satgce_flavors = int(module_gce_compute.available_flavors()['total']) assert satgce_flavors > 1 @@ -145,7 +134,6 @@ def test_positive_create_finish_template_image( :expectedresults: Finish template image should be added in GCE CR along with username - :CaseLevel: Integration """ assert module_gce_finishimg.compute_resource.id == module_gce_compute.id assert module_gce_finishimg.uuid == gce_latest_rhel_uuid @@ -164,7 +152,6 @@ def test_positive_create_cloud_init_image( :expectedresults: Cloud init image should be added in GCE CR along with username - :CaseLevel: Integration """ assert module_gce_cloudimg.compute_resource.id == module_gce_compute.id assert module_gce_cloudimg.uuid == gce_custom_cloudinit_uuid @@ -238,15 +225,14 @@ def google_host(self, googleclient): @pytest.mark.e2e @pytest.mark.tier1 + @pytest.mark.build_sanity @pytest.mark.parametrize('sat_gce', ['sat', 'puppet_sat'], indirect=True) def test_positive_gce_host_provisioned(self, class_host, google_host): """Host can be provisioned on Google Cloud :id: 889975f2-56ca-4584-95a7-21c513969630 - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a GCE Compute Resource @@ -270,8 +256,6 @@ def test_positive_gce_host_power_on_off(self, class_host, google_host): :id: b622c6fc-c45e-431d-8de3-9d0237873998 - :CaseLevel: System - :steps: 1. Create a GCE Compute Resource 2. Create a Hostgroup with all the Global and Foreman entities but diff --git a/tests/foreman/api/test_computeresource_libvirt.py b/tests/foreman/api/test_computeresource_libvirt.py index 97fb2d4a88c..ad95fb9f202 100644 --- a/tests/foreman/api/test_computeresource_libvirt.py +++ b/tests/foreman/api/test_computeresource_libvirt.py @@ -8,144 +8,119 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-libvirt :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import randint - -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.config import settings -from robottelo.constants import LIBVIRT_RESOURCE_URL -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.constants import FOREMAN_PROVIDERS, LIBVIRT_RESOURCE_URL +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) -pytestmark = [ - pytest.mark.skip_if_not_set('libvirt'), -] +pytestmark = [pytest.mark.skip_if_not_set('libvirt')] +LIBVIRT_URL = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname -@pytest.fixture(scope="module") -def setup(): - """Set up organization and location for tests.""" - setupEntities = type("", (), {})() - setupEntities.org = entities.Organization().create() - setupEntities.loc = entities.Location(organization=[setupEntities.org]).create() - setupEntities.current_libvirt_url = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname - return setupEntities +@pytest.mark.e2e +def test_positive_crud_libvirt_cr(module_target_sat, module_org, module_location): + """CRUD compute resource libvirt -@pytest.mark.tier1 -@pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_name(setup, name): - """Create compute resources with different names - - :id: 1e545c56-2f53-44c1-a17e-38c83f8fe0c1 + :id: 1e545c56-2f53-44c1-a17e-38c83f8fe0c2 :expectedresults: Compute resources are created with expected names :CaseImportance: Critical - - :CaseLevel: Component - - :parametrized: yes """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], - name=name, - organization=[setup.org], - url=setup.current_libvirt_url, - ).create() - assert compresource.name == name - - -@pytest.mark.tier1 -@pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_create_with_description(setup, description): - """Create compute resources with different descriptions - - :id: 1fa5b35d-ee47-452b-bb5f-4a4ca321f992 - - :expectedresults: Compute resources are created with expected - descriptions - - :CaseImportance: Critical - - :CaseLevel: Component + name = gen_string('alphanumeric') + description = gen_string('alphanumeric') + display_type = 'spice' + libvirt_url = 'qemu+tcp://libvirt.example.com:16509/system' - :parametrized: yes - """ - compresource = entities.LibvirtComputeResource( + cr = module_target_sat.api.LibvirtComputeResource( + name=name, description=description, - location=[setup.loc], - organization=[setup.org], - url=setup.current_libvirt_url, - ).create() - assert compresource.description == description - - -@pytest.mark.tier2 -@pytest.mark.parametrize('display_type', ['spice', 'vnc']) -def test_positive_create_libvirt_with_display_type(setup, display_type): - """Create a libvirt compute resources with different values of - 'display_type' parameter - - :id: 76380f31-e217-4ff1-ac6b-20f41e59f133 - - :expectedresults: Compute resources are created with expected - display_type value - - :CaseImportance: High - - :CaseLevel: Component - - :parametrized: yes - """ - compresource = entities.LibvirtComputeResource( + provider=FOREMAN_PROVIDERS['libvirt'], display_type=display_type, - location=[setup.loc], - organization=[setup.org], - url=setup.current_libvirt_url, + organization=[module_org], + location=[module_location], + url=libvirt_url, ).create() - assert compresource.display_type == display_type + assert cr.name == name + assert cr.description == description + assert cr.provider == FOREMAN_PROVIDERS['libvirt'] + assert cr.display_type == display_type + assert cr.url == libvirt_url + + # Update + new_name = gen_string('alphanumeric') + new_description = gen_string('alphanumeric') + new_display_type = 'vnc' + new_org = module_target_sat.api.Organization().create() + new_loc = module_target_sat.api.Location(organization=[new_org]).create() + cr.name = new_name + cr.description = new_description + cr.display_type = new_display_type + cr.url = LIBVIRT_URL + cr.organization = [new_org] + cr.location = [new_loc] + cr.update(['name', 'description', 'display_type', 'url', 'organization', 'location']) + + # READ + updated_cr = module_target_sat.api.LibvirtComputeResource(id=cr.id).read() + assert updated_cr.name == new_name + assert updated_cr.description == new_description + assert updated_cr.display_type == new_display_type + assert updated_cr.url == LIBVIRT_URL + assert updated_cr.organization[0].id == new_org.id + assert updated_cr.location[0].id == new_loc.id + # DELETE + updated_cr.delete() + assert not module_target_sat.api.LibvirtComputeResource().search( + query={'search': f'name={new_name}'} + ) @pytest.mark.tier1 -def test_positive_create_with_provider(setup): - """Create compute resources with different providers. Testing only - Libvirt and Docker as other providers require valid credentials +@pytest.mark.parametrize('name', **parametrized(valid_data_list())) +def test_positive_create_with_name_description( + name, request, module_target_sat, module_org, module_location +): + """Create compute resources with different names and descriptions - :id: f61c66c9-15f8-4b00-9e53-7ebfb09397cc + :id: 1e545c56-2f53-44c1-a17e-38c83f8fe0c1 - :expectedresults: Compute resources are created with expected providers + :expectedresults: Compute resources are created with expected names and descriptions :CaseImportance: Critical - :CaseLevel: Component + :parametrized: yes """ - entity = entities.LibvirtComputeResource() - entity.location = [setup.loc] - entity.organization = [setup.org] - result = entity.create() - assert result.provider == entity.provider + compresource = module_target_sat.api.LibvirtComputeResource( + name=name, + description=name, + organization=[module_org], + location=[module_location], + url=LIBVIRT_URL, + ).create() + request.addfinalizer(compresource.delete) + assert compresource.name == name + assert compresource.description == name @pytest.mark.tier2 -def test_positive_create_with_locs(setup): - """Create a compute resource with multiple locations +def test_positive_create_with_orgs_and_locs(request, module_target_sat): + """Create a compute resource with multiple organizations and locations :id: c6c6c6f7-50ca-4f38-8126-eb95359d7cbb @@ -153,251 +128,20 @@ def test_positive_create_with_locs(setup): locations assigned :CaseImportance: High - - :CaseLevel: Integration """ - locs = [entities.Location(organization=[setup.org]).create() for _ in range(randint(3, 5))] - compresource = entities.LibvirtComputeResource( - location=locs, organization=[setup.org], url=setup.current_libvirt_url - ).create() - assert {loc.name for loc in locs} == {loc.read().name for loc in compresource.location} - - -@pytest.mark.tier2 -def test_positive_create_with_orgs(setup): - """Create a compute resource with multiple organizations - - :id: 2f6e5019-6353-477e-a81f-2a551afc7556 - - :expectedresults: A compute resource is created with expected multiple - organizations assigned - - :CaseImportance: High - - :CaseLevel: Integration - """ - orgs = [entities.Organization().create() for _ in range(randint(3, 5))] - compresource = entities.LibvirtComputeResource( - organization=orgs, url=setup.current_libvirt_url + orgs = [module_target_sat.api.Organization().create() for _ in range(2)] + locs = [module_target_sat.api.Location(organization=[org]).create() for org in orgs] + compresource = module_target_sat.api.LibvirtComputeResource( + location=locs, organization=orgs, url=LIBVIRT_URL ).create() + request.addfinalizer(compresource.delete) assert {org.name for org in orgs} == {org.read().name for org in compresource.organization} - - -@pytest.mark.tier1 -@pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_update_name(setup, new_name): - """Update a compute resource with different names - - :id: 60f08418-b1a2-445e-9cd6-dbc92a33b57a - - :expectedresults: Compute resource is updated with expected names - - :CaseImportance: Critical - - :CaseLevel: Component - - :parametrized: yes - """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url - ).create() - compresource.name = new_name - compresource = compresource.update(['name']) - assert compresource.name == new_name - - -@pytest.mark.tier2 -@pytest.mark.parametrize('new_description', **parametrized(valid_data_list())) -def test_positive_update_description(setup, new_description): - """Update a compute resource with different descriptions - - :id: aac5dc53-8709-441b-b360-28b8efd3f63f - - :expectedresults: Compute resource is updated with expected - descriptions - - :CaseImportance: High - - :CaseLevel: Component - - :parametrized: yes - """ - compresource = entities.LibvirtComputeResource( - description=gen_string('alpha'), - location=[setup.loc], - organization=[setup.org], - url=setup.current_libvirt_url, - ).create() - compresource.description = new_description - compresource = compresource.update(['description']) - assert compresource.description == new_description - - -@pytest.mark.tier2 -@pytest.mark.parametrize('display_type', ['spice', 'vnc']) -def test_positive_update_libvirt_display_type(setup, display_type): - """Update a libvirt compute resource with different values of - 'display_type' parameter - - :id: 0cbf08ac-acc4-476a-b389-271cea2b6cda - - :expectedresults: Compute resource is updated with expected - display_type value - - :CaseImportance: High - - :CaseLevel: Component - - :parametrized: yes - """ - compresource = entities.LibvirtComputeResource( - display_type='VNC', - location=[setup.loc], - organization=[setup.org], - url=setup.current_libvirt_url, - ).create() - compresource.display_type = display_type - compresource = compresource.update(['display_type']) - assert compresource.display_type == display_type - - -@pytest.mark.tier2 -def test_positive_update_url(setup): - """Update a compute resource's url field - - :id: 259aa060-ed9e-4ed5-91e1-7fb0a3592879 - - :expectedresults: Compute resource is updated with expected url - - :CaseImportance: High - - :CaseLevel: Component - """ - new_url = 'qemu+tcp://localhost:16509/system' - - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url - ).create() - compresource.url = new_url - compresource = compresource.update(['url']) - assert compresource.url == new_url - - -@pytest.mark.tier2 -def test_positive_update_loc(setup): - """Update a compute resource's location - - :id: 57e96c7c-da9e-4400-af80-c374cd6b3d4a - - :expectedresults: Compute resource is updated with expected location - - :CaseImportance: High - - :CaseLevel: Integration - """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url - ).create() - new_loc = entities.Location(organization=[setup.org]).create() - compresource.location = [new_loc] - compresource = compresource.update(['location']) - assert len(compresource.location) == 1 - assert compresource.location[0].id == new_loc.id - - -@pytest.mark.tier2 -def test_positive_update_locs(setup): - """Update a compute resource with new multiple locations - - :id: cda9f501-2879-4cb0-a017-51ee795232f1 - - :expectedresults: Compute resource is updated with expected locations - - :CaseImportance: High - - :CaseLevel: Integration - """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url - ).create() - new_locs = [entities.Location(organization=[setup.org]).create() for _ in range(randint(3, 5))] - compresource.location = new_locs - compresource = compresource.update(['location']) - assert {location.id for location in compresource.location} == { - location.id for location in new_locs - } - - -@pytest.mark.tier2 -def test_positive_update_org(setup): - """Update a compute resource's organization - - :id: 430b64a2-7f64-4344-a73b-1b47d8dfa6cb - - :expectedresults: Compute resource is updated with expected - organization - - :CaseImportance: High - - :CaseLevel: Integration - """ - compresource = entities.LibvirtComputeResource( - organization=[setup.org], url=setup.current_libvirt_url - ).create() - new_org = entities.Organization().create() - compresource.organization = [new_org] - compresource = compresource.update(['organization']) - assert len(compresource.organization) == 1 - assert compresource.organization[0].id == new_org.id - - -@pytest.mark.tier2 -def test_positive_update_orgs(setup): - """Update a compute resource with new multiple organizations - - :id: 2c759ad5-d115-46d9-8365-712c0bb39a1d - - :expectedresults: Compute resource is updated with expected - organizations - - :CaseImportance: High - - :CaseLevel: Integration - """ - compresource = entities.LibvirtComputeResource( - organization=[setup.org], url=setup.current_libvirt_url - ).create() - new_orgs = [entities.Organization().create() for _ in range(randint(3, 5))] - compresource.organization = new_orgs - compresource = compresource.update(['organization']) - assert {organization.id for organization in compresource.organization} == { - organization.id for organization in new_orgs - } - - -@pytest.mark.tier1 -def test_positive_delete(setup): - """Delete a compute resource - - :id: 0117a4f1-e2c2-44aa-8919-453166aeebbc - - :expectedresults: Compute resources is successfully deleted - - :CaseImportance: Critical - - :CaseLevel: Component - """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url - ).create() - compresource.delete() - with pytest.raises(HTTPError): - compresource.read() + assert {loc.name for loc in locs} == {loc.read().name for loc in compresource.location} @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_invalid_name(setup, name): +def test_negative_create_with_invalid_name(name, module_target_sat, module_org, module_location): """Attempt to create compute resources with invalid names :id: f73bf838-3ffd-46d3-869c-81b334b47b13 @@ -406,21 +150,19 @@ def test_negative_create_with_invalid_name(setup, name): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ with pytest.raises(HTTPError): - entities.LibvirtComputeResource( - location=[setup.loc], + module_target_sat.api.LibvirtComputeResource( name=name, - organization=[setup.org], - url=setup.current_libvirt_url, + organization=[module_org], + location=[module_location], + url=LIBVIRT_URL, ).create() @pytest.mark.tier2 -def test_negative_create_with_same_name(setup): +def test_negative_create_with_same_name(request, module_target_sat, module_org, module_location): """Attempt to create a compute resource with already existing name :id: 9376e25c-2aa8-4d99-83aa-2eec160c030e @@ -428,25 +170,25 @@ def test_negative_create_with_same_name(setup): :expectedresults: Compute resources is not created :CaseImportance: High - - :CaseLevel: Component """ name = gen_string('alphanumeric') - entities.LibvirtComputeResource( - location=[setup.loc], name=name, organization=[setup.org], url=setup.current_libvirt_url + cr = module_target_sat.api.LibvirtComputeResource( + location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(cr.delete) + assert cr.name == name with pytest.raises(HTTPError): - entities.LibvirtComputeResource( - location=[setup.loc], + module_target_sat.api.LibvirtComputeResource( name=name, - organization=[setup.org], - url=setup.current_libvirt_url, + organization=[module_org], + location=[module_location], + url=LIBVIRT_URL, ).create() @pytest.mark.tier2 @pytest.mark.parametrize('url', **parametrized({'random': gen_string('alpha'), 'empty': ''})) -def test_negative_create_with_url(setup, url): +def test_negative_create_with_url(module_target_sat, module_org, module_location, url): """Attempt to create compute resources with invalid url :id: 37e9bf39-382e-4f02-af54-d3a17e285c2a @@ -455,19 +197,19 @@ def test_negative_create_with_url(setup, url): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ with pytest.raises(HTTPError): - entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=url + module_target_sat.api.LibvirtComputeResource( + location=[module_location], organization=[module_org], url=url ).create() @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) -def test_negative_update_invalid_name(setup, new_name): +def test_negative_update_invalid_name( + request, module_target_sat, module_org, module_location, new_name +): """Attempt to update compute resource with invalid names :id: a6554c1f-e52f-4614-9fc3-2127ced31470 @@ -476,22 +218,21 @@ def test_negative_update_invalid_name(setup, new_name): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ name = gen_string('alphanumeric') - compresource = entities.LibvirtComputeResource( - location=[setup.loc], name=name, organization=[setup.org], url=setup.current_libvirt_url + compresource = module_target_sat.api.LibvirtComputeResource( + location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(compresource.delete) + compresource.name = new_name with pytest.raises(HTTPError): - compresource.name = new_name compresource.update(['name']) assert compresource.read().name == name @pytest.mark.tier2 -def test_negative_update_same_name(setup): +def test_negative_update_same_name(request, module_target_sat, module_org, module_location): """Attempt to update a compute resource with already existing name :id: 4d7c5eb0-b8cb-414f-aa10-fe464a164ab4 @@ -499,25 +240,25 @@ def test_negative_update_same_name(setup): :expectedresults: Compute resources is not updated :CaseImportance: High - - :CaseLevel: Component """ name = gen_string('alphanumeric') - entities.LibvirtComputeResource( - location=[setup.loc], name=name, organization=[setup.org], url=setup.current_libvirt_url + compresource = module_target_sat.api.LibvirtComputeResource( + location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() - new_compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url + request.addfinalizer(compresource.delete) + new_compresource = module_target_sat.api.LibvirtComputeResource( + location=[module_location], organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(new_compresource.delete) + new_compresource.name = name with pytest.raises(HTTPError): - new_compresource.name = name new_compresource.update(['name']) assert new_compresource.read().name != name @pytest.mark.tier2 @pytest.mark.parametrize('url', **parametrized({'random': gen_string('alpha'), 'empty': ''})) -def test_negative_update_url(setup, url): +def test_negative_update_url(url, request, module_target_sat, module_org, module_location): """Attempt to update a compute resource with invalid url :id: b5256090-2ceb-4976-b54e-60d60419fe50 @@ -526,14 +267,13 @@ def test_negative_update_url(setup, url): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ - compresource = entities.LibvirtComputeResource( - location=[setup.loc], organization=[setup.org], url=setup.current_libvirt_url + compresource = module_target_sat.api.LibvirtComputeResource( + location=[module_location], organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(compresource.delete) + compresource.url = url with pytest.raises(HTTPError): - compresource.url = url compresource.update(['url']) assert compresource.read().url != url diff --git a/tests/foreman/api/test_contentcredentials.py b/tests/foreman/api/test_contentcredentials.py index 71053b2900f..3f088ae6253 100644 --- a/tests/foreman/api/test_contentcredentials.py +++ b/tests/foreman/api/test_contentcredentials.py @@ -4,36 +4,32 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentCredentials :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from copy import copy -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests import HTTPError from robottelo.constants import DataFile -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) key_content = DataFile.VALID_GPG_KEY_FILE.read_text() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a GPG key with valid name. :id: 741d969b-28ef-481f-bcf7-ed4cd920b030 @@ -44,12 +40,12 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, name=name).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert name == gpg_key.name @pytest.mark.tier1 -def test_positive_create_with_content(module_org): +def test_positive_create_with_content(module_org, module_target_sat): """Create a GPG key with valid name and valid gpg key text. :id: cfa6690e-fed7-49cf-94f9-fd2deed941c0 @@ -58,13 +54,13 @@ def test_positive_create_with_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() assert key_content == gpg_key.content @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_name(module_org, name): +def test_negative_create_name(module_org, name, module_target_sat): """Attempt to create GPG key with invalid names only. :id: 904a3ed0-7d50-495e-a700-b4f1ae913599 @@ -76,13 +72,13 @@ def test_negative_create_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(module_org, module_target_sat): """Attempt to create a GPG key providing a name of already existent entity @@ -93,15 +89,15 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_content(module_org): +def test_negative_create_with_content(module_org, module_target_sat): """Attempt to create GPG key with empty content. :id: fc79c840-6bcb-4d97-9145-c0008d5b028d @@ -111,14 +107,14 @@ def test_negative_create_with_content(module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, content='').create() + module_target_sat.api.GPGKey(organization=module_org, content='').create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Update GPG key name to another valid name. :id: 9868025d-5346-42c9-b850-916ce37a9541 @@ -129,14 +125,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name gpg_key = gpg_key.update(['name']) assert new_name == gpg_key.name @pytest.mark.tier1 -def test_positive_update_content(module_org): +def test_positive_update_content(module_org, module_target_sat): """Update GPG key content text to another valid one. :id: 62fdaf55-c931-4be6-9857-68cc816046ad @@ -145,7 +141,7 @@ def test_positive_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -156,7 +152,7 @@ def test_positive_update_content(module_org): @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, new_name): +def test_negative_update_name(module_org, new_name, module_target_sat): """Attempt to update GPG key name to invalid one :id: 1a43f610-8969-4f08-967f-fb6af0fca31b @@ -167,7 +163,7 @@ def test_negative_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name with pytest.raises(HTTPError) as error: gpg_key.update(['name']) @@ -176,7 +172,7 @@ def test_negative_update_name(module_org, new_name): @pytest.mark.tier1 -def test_negative_update_same_name(module_org): +def test_negative_update_same_name(module_org, module_target_sat): """Attempt to update GPG key name to the name of existing GPG key entity @@ -187,8 +183,8 @@ def test_negative_update_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alpha') - entities.GPGKey(organization=module_org, name=name).create() - new_gpg_key = entities.GPGKey(organization=module_org).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() + new_gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() new_gpg_key.name = name with pytest.raises(HTTPError) as error: new_gpg_key.update(['name']) @@ -197,7 +193,7 @@ def test_negative_update_same_name(module_org): @pytest.mark.tier1 -def test_negative_update_content(module_org): +def test_negative_update_content(module_org, module_target_sat): """Attempt to update GPG key content to invalid one :id: fee30ef8-370a-4fdd-9e45-e7ab95dade8b @@ -206,7 +202,7 @@ def test_negative_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() gpg_key.content = '' with pytest.raises(HTTPError) as error: gpg_key.update(['content']) @@ -216,7 +212,7 @@ def test_negative_update_content(module_org): @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Create a GPG key with different names and then delete it. :id: b06d211f-2827-40f7-b627-8b1fbaee2eb4 @@ -225,7 +221,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.delete() with pytest.raises(HTTPError): gpg_key.read() diff --git a/tests/foreman/api/test_contentview.py b/tests/foreman/api/test_contentview.py index 88e9940cb0b..03c95999417 100644 --- a/tests/foreman/api/test_contentview.py +++ b/tests/foreman/api/test_contentview.py @@ -4,41 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random +from fauxfactory import gen_integer, gen_string, gen_utf8 import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_utf8 -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.config import settings -from robottelo.config import user_nailgun_config -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CUSTOM_RPM_SHA_512_FEED_COUNT -from robottelo.constants import FILTER_ERRATA_TYPE -from robottelo.constants import PERMISSIONS -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants.repos import CUSTOM_RPM_SHA_512 -from robottelo.constants.repos import FEDORA_OSTREE_REPO -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.config import settings, user_nailgun_config +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CUSTOM_RPM_SHA_512_FEED_COUNT, + FILTER_ERRATA_TYPE, + PERMISSIONS, + PRDS, + REPOS, + REPOSET, + DataFile, +) +from robottelo.constants.repos import CUSTOM_RPM_SHA_512, FEDORA_OSTREE_REPO +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) # Some tests repeatedly publish content views or promote content view versions. # How many times should that be done? A higher number means a more interesting @@ -47,8 +42,8 @@ @pytest.fixture(scope='class') -def class_cv(module_org): - return entities.ContentView(organization=module_org).create() +def class_cv(module_org, class_target_sat): + return class_target_sat.api.ContentView(organization=module_org).create() @pytest.fixture(scope='class') @@ -64,22 +59,22 @@ def class_promoted_cv(class_published_cv, module_lce): @pytest.fixture(scope='class') -def class_cloned_cv(class_cv): - copied_cv_id = entities.ContentView(id=class_cv.id).copy( +def class_cloned_cv(class_cv, class_target_sat): + copied_cv_id = class_target_sat.api.ContentView(id=class_cv.id).copy( data={'name': gen_string('alpha', gen_integer(3, 30))} )['id'] - return entities.ContentView(id=copied_cv_id).read() + return class_target_sat.api.ContentView(id=copied_cv_id).read() @pytest.fixture(scope='class') -def class_published_cloned_cv(class_cloned_cv): +def class_published_cloned_cv(class_cloned_cv, class_target_sat): class_cloned_cv.publish() - return entities.ContentView(id=class_cloned_cv.id).read() + return class_target_sat.api.ContentView(id=class_cloned_cv.id).read() @pytest.fixture -def content_view(module_org): - return entities.ContentView(organization=module_org).create() +def content_view(module_org, module_target_sat): + return module_target_sat.api.ContentView(organization=module_org).create() def apply_package_filter(content_view, repo, package, target_sat, inclusion=True): @@ -92,7 +87,7 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True :return list : list of content view versions """ - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[repo] ).create() cv_filter_rule = target_sat.api.ContentViewFilterRule( @@ -101,14 +96,15 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True assert cv_filter.id == cv_filter_rule.content_view_filter.id content_view.publish() content_view = content_view.read() - content_view_version_info = content_view.version[0].read() - return content_view_version_info + return content_view.version[0].read() class TestContentView: @pytest.mark.upgrade @pytest.mark.tier3 - def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, module_org): + def test_positive_subscribe_host( + self, class_cv, class_promoted_cv, module_lce, module_org, module_target_sat + ): """Subscribe a host to a content view :id: b5a08369-bf92-48ab-b9aa-10f5b9774b79 @@ -116,8 +112,6 @@ def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, :expectedresults: It is possible to create a host and set its 'content_view_id' facet attribute - :CaseLevel: Integration - :CaseAutomation: Automated :CaseImportance: High @@ -129,7 +123,7 @@ def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, # Check that no host associated to just created content view assert class_cv.content_host_count == 0 assert len(class_promoted_cv.version) == 1 - host = entities.Host( + host = module_target_sat.api.Host( content_facet_attributes={ 'content_view_id': class_cv.id, 'lifecycle_environment_id': module_lce.id, @@ -151,15 +145,15 @@ def test_positive_clone_within_same_env(self, class_published_cloned_cv, module_ :expectedresults: Cloned content view can be published and promoted to the same environment as the original content view - :CaseLevel: Integration - :CaseImportance: High """ class_published_cloned_cv.read().version[0].promote(data={'environment_ids': module_lce.id}) @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_cv): + def test_positive_clone_with_diff_env( + self, module_org, class_published_cloned_cv, module_target_sat + ): """attempt to create, publish and promote new content view based on existing view but promoted to a different environment @@ -169,28 +163,24 @@ def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_c :expectedresults: Cloned content view can be published and promoted to a different environment as the original content view - :CaseLevel: Integration - :CaseImportance: Medium """ - le_clone = entities.LifecycleEnvironment(organization=module_org).create() + le_clone = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() class_published_cloned_cv.read().version[0].promote(data={'environment_ids': le_clone.id}) @pytest.mark.tier2 - def test_positive_add_custom_content(self, module_product, module_org): + def test_positive_add_custom_content(self, module_product, module_org, module_target_sat): """Associate custom content in a view :id: db452e0c-0c17-40f2-bab4-8467e7a875f1 :expectedresults: Custom content assigned and present in content view - :CaseLevel: Integration - :CaseImportance: Critical """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() - content_view = entities.ContentView(organization=module_org.id).create() + content_view = module_target_sat.api.ContentView(organization=module_org.id).create() assert len(content_view.repository) == 0 content_view.repository = [yum_repo] content_view = content_view.update(['repository']) @@ -201,18 +191,18 @@ def test_positive_add_custom_content(self, module_product, module_org): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_custom_module_streams(self, content_view, module_product, module_org): + def test_positive_add_custom_module_streams( + self, content_view, module_product, module_org, module_target_sat + ): """Associate custom content (module streams) in a view :id: 9e4821cb-293a-4d84-bd1f-bb9fff36b143 :expectedresults: Custom content (module streams) assigned and present in content view - :CaseLevel: Integration - :CaseImportance: High """ - yum_repo = entities.Repository( + yum_repo = module_target_sat.api.Repository( product=module_product, url=settings.repos.module_stream_1.url ).create() yum_repo.sync() @@ -225,7 +215,9 @@ def test_positive_add_custom_module_streams(self, content_view, module_product, assert repo.content_counts['module_stream'] == 7 @pytest.mark.tier2 - def test_negative_add_dupe_repos(self, content_view, module_product, module_org): + def test_negative_add_dupe_repos( + self, content_view, module_product, module_org, module_target_sat + ): """Attempt to associate the same repo multiple times within a content view @@ -233,15 +225,13 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) :expectedresults: User cannot add repos multiple times to the view - :CaseLevel: Integration - :CaseImportance: Low """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() assert len(content_view.repository) == 0 + content_view.repository = [yum_repo, yum_repo] with pytest.raises(HTTPError): - content_view.repository = [yum_repo, yum_repo] content_view.update(['repository']) assert len(content_view.read().repository) == 0 @@ -250,15 +240,13 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_sha512_rpm(self, content_view, module_org): + def test_positive_add_sha512_rpm(self, content_view, module_org, module_target_sat): """Associate sha512 RPM content in a view :id: 1f473b02-5e2b-41ff-a706-c0635abc2476 :expectedresults: Custom sha512 assigned and present in content view - :CaseLevel: Integration - :CaseComponent: Pulp :team: Rocket @@ -269,8 +257,10 @@ def test_positive_add_sha512_rpm(self, content_view, module_org): :BZ: 1639406 """ - product = entities.Product(organization=module_org).create() - yum_sha512_repo = entities.Repository(product=product, url=CUSTOM_RPM_SHA_512).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_sha512_repo = module_target_sat.api.Repository( + product=product, url=CUSTOM_RPM_SHA_512 + ).create() yum_sha512_repo.sync() repo_content = yum_sha512_repo.read() # Assert that the repository content was properly synced @@ -294,7 +284,7 @@ class TestContentViewCreate: @pytest.mark.parametrize('composite', [True, False]) @pytest.mark.tier1 - def test_positive_create_composite(self, composite): + def test_positive_create_composite(self, composite, target_sat): """Create composite and non-composite content views. :id: 4a3b616d-53ab-4396-9a50-916d6c42a401 @@ -306,11 +296,11 @@ def test_positive_create_composite(self, composite): :CaseImportance: Critical """ - assert entities.ContentView(composite=composite).create().composite == composite + assert target_sat.api.ContentView(composite=composite).create().composite == composite @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, name, target_sat): """Create empty content-view with random names. :id: 80d36498-2e71-4aa9-b696-f0a45e86267f @@ -321,11 +311,11 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - assert entities.ContentView(name=name).create().name == name + assert target_sat.api.ContentView(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_description(self, desc): + def test_positive_create_with_description(self, desc, target_sat): """Create empty content view with random description. :id: 068e3e7c-34ac-47cb-a1bb-904d12c74cc7 @@ -336,10 +326,10 @@ def test_positive_create_with_description(self, desc): :CaseImportance: High """ - assert entities.ContentView(description=desc).create().description == desc + assert target_sat.api.ContentView(description=desc).create().description == desc @pytest.mark.tier1 - def test_positive_clone(self, content_view, module_org): + def test_positive_clone(self, content_view, module_org, module_target_sat): """Create a content view by copying an existing one :id: ee03dc63-e2b0-4a89-a828-2910405279ff @@ -348,7 +338,7 @@ def test_positive_clone(self, content_view, module_org): :CaseImportance: Critical """ - cloned_cv = entities.ContentView( + cloned_cv = module_target_sat.api.ContentView( id=content_view.copy(data={'name': gen_string('alpha', gen_integer(3, 30))})['id'] ).read_json() cv_origin = content_view.read_json() @@ -360,7 +350,7 @@ def test_positive_clone(self, content_view, module_org): @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Create content view providing an invalid name. :id: 261376ca-7d12-41b6-9c36-5f284865243e @@ -372,26 +362,25 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.ContentView(name=name).create() + target_sat.api.ContentView(name=name).create() class TestContentViewPublishPromote: """Tests for publishing and promoting content views.""" - @pytest.mark.skipif( - (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' - ) @pytest.fixture(scope='class', autouse=True) - def class_setup(self, request, module_product): + def class_setup(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.yum_repo = entities.Repository(product=module_product).create() + request.cls.yum_repo = class_target_sat.api.Repository(product=module_product).create() self.yum_repo.sync() - request.cls.swid_repo = entities.Repository( + request.cls.swid_repo = class_target_sat.api.Repository( product=module_product, url=settings.repos.swid_tag.url ).create() self.swid_repo.sync() - def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): + def add_content_views_to_composite( + self, module_target_sat, composite_cv, module_org, cv_amount=1 + ): """Add necessary number of content views to the composite one :param composite_cv: Composite content view object @@ -399,7 +388,7 @@ def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): """ cv_versions = [] for _ in range(cv_amount): - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() cv_versions.append(content_view.read().version[0]) composite_cv.component = cv_versions @@ -416,8 +405,6 @@ def test_positive_publish_with_content_multiple(self, content_view, module_org): content view can be published several times, and each content view version has at least one package. - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.yum_repo] @@ -434,7 +421,7 @@ def test_positive_publish_with_content_multiple(self, content_view, module_org): assert cvv.read_json()['package_count'] > 0 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_once(self, module_org): + def test_positive_publish_composite_multiple_content_once(self, module_org, module_target_sat): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view once. @@ -444,20 +431,22 @@ def test_positive_publish_composite_multiple_content_once(self, module_org): :expectedresults: Composite content view is published and corresponding version is assigned to it. - :CaseLevel: Integration - :CaseImportance: Critical """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() - self.add_content_views_to_composite(composite_cv, module_org, random.randint(2, 3)) + self.add_content_views_to_composite( + module_target_sat, composite_cv, module_org, random.randint(2, 3) + ) composite_cv.publish() assert len(composite_cv.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_multiple(self, module_org): + def test_positive_publish_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view several times. @@ -467,11 +456,9 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): :expectedresults: Composite content view is published several times and corresponding versions are assigned to it. - :CaseLevel: Integration - :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -482,7 +469,7 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): assert len(composite_cv.read().version) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_yum_multiple(self, content_view, module_org): + def test_positive_promote_with_yum_multiple(self, content_view, module_org, module_target_sat): """Give a content view a yum repo, publish it once and promote the content view version ``REPEAT + 1`` times. @@ -492,8 +479,6 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): version is in ``REPEAT + 1`` lifecycle environments and it has at least one package. - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.yum_repo] @@ -503,7 +488,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): # Promote the content view version. for _ in range(REPEAT): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id}) # Everything's done - check some content view attributes... @@ -517,7 +502,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): assert cvv_attrs['package_count'] > 0 @pytest.mark.tier2 - def test_positive_add_to_composite(self, content_view, module_org): + def test_positive_add_to_composite(self, content_view, module_org, module_target_sat): """Create normal content view, publish and add it to a new composite content view @@ -526,8 +511,6 @@ def test_positive_add_to_composite(self, content_view, module_org): :expectedresults: Content view can be created and assigned to composite one through content view versions mechanism - :CaseLevel: Integration - :CaseImportance: Critical """ content_view.repository = [self.yum_repo] @@ -535,7 +518,7 @@ def test_positive_add_to_composite(self, content_view, module_org): content_view.publish() content_view = content_view.read() - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -548,7 +531,9 @@ def test_positive_add_to_composite(self, content_view, module_org): assert composite_cv.component[0].read().content_view.id == content_view.id @pytest.mark.tier2 - def test_negative_add_components_to_composite(self, content_view, module_org): + def test_negative_add_components_to_composite( + self, content_view, module_org, module_target_sat + ): """Attempt to associate components in a non-composite content view @@ -556,15 +541,13 @@ def test_negative_add_components_to_composite(self, content_view, module_org): :expectedresults: User cannot add components to the view - :CaseLevel: Integration - :CaseImportance: Low """ content_view.repository = [self.yum_repo] content_view.update(['repository']) content_view.publish() content_view = content_view.read() - non_composite_cv = entities.ContentView( + non_composite_cv = module_target_sat.api.ContentView( composite=False, organization=module_org, ).create() @@ -575,7 +558,9 @@ def test_negative_add_components_to_composite(self, content_view, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_once(self, module_lce, module_org): + def test_positive_promote_composite_multiple_content_once( + self, module_lce, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view once. @@ -585,11 +570,9 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu :expectedresults: Composite content view version points to ``Library + 1`` lifecycle environments after the promotions. - :CaseLevel: Integration - :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -602,7 +585,9 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_multiple(self, module_org): + def test_positive_promote_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view ``Library + random`` times. @@ -612,11 +597,9 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): :expectedresults: Composite content view version points to ``Library + random`` lifecycle environments after the promotions. - :CaseLevel: Integration - :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -626,7 +609,7 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): envs_amount = random.randint(2, 3) for _ in range(envs_amount): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() composite_cv.version[0].promote(data={'environment_ids': lce.id}) composite_cv = composite_cv.read() assert len(composite_cv.version) == 1 @@ -641,8 +624,6 @@ def test_positive_promote_out_of_sequence(self, content_view, module_org): :expectedresults: Content view promoted out of sequence properly - :CaseLevel: Integration - :CaseImportance: Medium """ for _ in range(REPEAT): @@ -670,15 +651,13 @@ def test_positive_promote_out_of_sequence(self, content_view, module_org): @pytest.mark.tier3 @pytest.mark.pit_server - def test_positive_publish_multiple_repos(self, content_view, module_org): + def test_positive_publish_multiple_repos(self, content_view, module_org, module_target_sat): """Attempt to publish a content view with multiple YUM repos. :id: 5557a33b-7a6f-45f5-9fe4-23a704ed9e21 :expectedresults: Content view publish should not raise an exception. - :CaseLevel: Integration - :CaseComponent: Pulp :team: Rocket @@ -689,9 +668,9 @@ def test_positive_publish_multiple_repos(self, content_view, module_org): :BZ: 1651930 """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(10): - repo = entities.Repository(product=product).create() + repo = module_target_sat.api.Repository(product=product).create() repo.sync() content_view.repository.append(repo) content_view = content_view.update(['repository']) @@ -720,13 +699,13 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): :CaseImportance: Medium """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository( content_type='yum', product=product, url=settings.repos.module_stream_1.url ).create() repo.sync() - content_view_1 = entities.ContentView(organization=module_org).create() - content_view_2 = entities.ContentView(organization=module_org).create() + content_view_1 = target_sat.api.ContentView(organization=module_org).create() + content_view_2 = target_sat.api.ContentView(organization=module_org).create() # create content views with same repo and different filter for content_view, package in [(content_view_1, 'camel'), (content_view_2, 'cow')]: @@ -736,7 +715,7 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): assert content_view_info.package_count == 35 # create composite content view with these two published content views - comp_content_view = entities.ContentView( + comp_content_view = target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -750,12 +729,106 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): comp_content_view_info = comp_content_view.version[0].read() assert comp_content_view_info.package_count == 36 + @pytest.mark.e2e + @pytest.mark.tier2 + def test_ccv_audit_scenarios(self, module_org, target_sat): + """Check for various scenarios where a composite content view or it's component + content views needs_publish flags should be set to true and that they properly + get set and unset + + :id: cdd94ab8-da31-40ac-ab81-02472517e9bf + + :steps: + + 1. Create a ccv + 2. Add some content views to the composite view + 3. Remove a content view from the composite view + 4. Add a CV that has `latest` set to true to the composite view + 5. Publish a new version of that CV + 6. Publish a new version of another CV in the CCV, and update it to latest + + :expectedresults: When appropriate, a ccv and it's cvs needs_publish flags get + set or unset + + :CaseImportance: High + """ + composite_cv = target_sat.api.ContentView(composite=True).create() + # Needs_publish is set to True on creation + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + # Add some content_views to the composite view + self.add_content_views_to_composite(composite_cv, module_org, 2) + # Needs_publish should be set to True when a component CV is added/removed + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + component_cvs = composite_cv.read().component + # Remove a component cv, should need publish now + component_cvs.pop(1) + composite_cv.component = component_cvs + composite_cv = composite_cv.update(['component']) + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + # add a CV that has `latest` set to true + new_cv = target_sat.api.ContentView().create() + new_cv.publish() + composite_cv.content_view_component[0].add( + data={"components": [{"content_view_id": new_cv.id, "latest": "true"}]} + ) + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + # a new version of a component cv that has "latest" - needs publish + new_cv.publish() + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + # a component CV was changed to "always update" when ccv has old version - needs publish + # update composite_cv after changes + composite_cv = composite_cv.read() + # get the first CV that was added, which has 1 version + old_component = composite_cv.content_view_component[0].read() + old_component.content_view.publish() + old_component.latest = True + old_component = old_component.update(['latest']) + # set latest to true and see if CV needs publish + assert composite_cv.read().needs_publish + composite_cv.publish() + assert not composite_cv.read().needs_publish + + @pytest.mark.tier2 + def test_check_needs_publish_flag(self, target_sat): + """Check that the publish_only_if_needed option in the API works as intended (defaults to + false, is able to be overriden to true, and if so gives the appropriate message + if the cvs needs_publish flag is set to false) + + :id: 6e4aa845-db08-4cc3-a960-ea64fb20f50c + + :expectedresults: The publish_only_if_needed flag is working as intended, and is defaulted + to false + + :CaseImportance: High + """ + cv = target_sat.api.ContentView().create() + assert cv.publish() + assert not cv.read().needs_publish + with pytest.raises(HTTPError): + assert ( + cv.publish(data={'publish_only_if_needed': True})['displayMessage'] + == """ + Content view does not need a publish since there are no audited changes since the + last publish. Pass check_needs_publish parameter as false if you don't want to check + if content view needs a publish.""" + ) + class TestContentViewUpdate: """Tests for updating content views.""" @pytest.mark.parametrize( - 'key, value', + ('key', 'value'), **(lambda x: {'argvalues': list(x.items()), 'ids': list(x.keys())})( {'description': gen_utf8(), 'name': gen_utf8()} ), @@ -778,7 +851,7 @@ def test_positive_update_attributes(self, module_cv, key, value): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_update_name(self, module_cv, new_name): + def test_positive_update_name(self, module_cv, new_name, module_target_sat): """Create content view providing the initial name, then update its name to another valid name. @@ -792,12 +865,12 @@ def test_positive_update_name(self, module_cv, new_name): """ module_cv.name = new_name module_cv.update(['name']) - updated = entities.ContentView(id=module_cv.id).read() + updated = module_target_sat.api.ContentView(id=module_cv.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_update_name(self, module_cv, new_name): + def test_negative_update_name(self, module_cv, new_name, module_target_sat): """Create content view then update its name to an invalid name. @@ -809,8 +882,8 @@ def test_negative_update_name(self, module_cv, new_name): :CaseImportance: Critical """ + module_cv.name = new_name with pytest.raises(HTTPError): - module_cv.name = new_name module_cv.update(['name']) cv = module_cv.read() assert cv.name != new_name @@ -821,7 +894,7 @@ class TestContentViewDelete: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_delete(self, content_view, name): + def test_positive_delete(self, content_view, name, target_sat): """Create content view and then delete it. :id: d582f1b3-8118-4e78-a639-237c6f9d27c6 @@ -834,7 +907,7 @@ def test_positive_delete(self, content_view, name): """ content_view.delete() with pytest.raises(HTTPError): - entities.ContentView(id=content_view.id).read() + target_sat.api.ContentView(id=content_view.id).read() @pytest.mark.run_in_one_thread @@ -843,11 +916,11 @@ class TestContentViewRedHatContent: @pytest.fixture(scope='class', autouse=True) def initiate_testclass( - self, request, module_cv, module_entitlement_manifest_org, module_target_sat + self, request, module_cv, module_entitlement_manifest_org, class_target_sat ): """Set up organization, product and repositories for tests.""" - repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + repo_id = class_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_entitlement_manifest_org.id, product=PRDS['rhel'], @@ -855,7 +928,7 @@ def initiate_testclass( reposet=REPOSET['rhst7'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() module_cv.repository = [self.repo] module_cv.update(['repository']) @@ -869,15 +942,13 @@ def test_positive_add_rh(self): :expectedresults: RH Content assigned and present in a view - :CaseLevel: Integration - :CaseImportance: High """ assert len(self.yumcv.repository) == 1 assert self.yumcv.repository[0].read().name == REPOS['rhst7']['name'] @pytest.mark.tier2 - def test_positive_add_rh_custom_spin(self): + def test_positive_add_rh_custom_spin(self, target_sat): """Associate Red Hat content in a view and filter it using rule :id: 30c3103d-9503-4501-8117-1f2d25353215 @@ -885,12 +956,10 @@ def test_positive_add_rh_custom_spin(self): :expectedresults: Filtered RH content is available and can be seen in a view - :CaseLevel: Integration - :CaseImportance: High """ # content_view ← cv_filter - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=self.yumcv, inclusion='true', name=gen_string('alphanumeric'), @@ -898,13 +967,13 @@ def test_positive_add_rh_custom_spin(self): assert self.yumcv.id == cv_filter.content_view.id # content_view ← cv_filter ← cv_filter_rule - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, name=gen_string('alphanumeric'), version='1.0' ).create() assert cv_filter.id == cv_filter_rule.content_view_filter.id @pytest.mark.tier2 - def test_positive_update_rh_custom_spin(self): + def test_positive_update_rh_custom_spin(self, target_sat): """Edit content views for a custom rh spin. For example, modify a filter @@ -913,16 +982,14 @@ def test_positive_update_rh_custom_spin(self): :expectedresults: edited content view save is successful and info is updated - :CaseLevel: Integration - :CaseImportance: High """ - cvf = entities.ErratumContentViewFilter( + cvf = target_sat.api.ErratumContentViewFilter( content_view=self.yumcv, ).create() assert self.yumcv.id == cvf.content_view.id - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cvf, types=[FILTER_ERRATA_TYPE['enhancement']] ).create() assert cv_filter_rule.types == [FILTER_ERRATA_TYPE['enhancement']] @@ -939,8 +1006,6 @@ def test_positive_publish_rh(self, module_org, content_view): :expectedresults: Content view can be published - :CaseLevel: Integration - :CaseImportance: Critical """ content_view.repository = [self.repo] @@ -949,7 +1014,7 @@ def test_positive_publish_rh(self, module_org, content_view): assert len(content_view.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_rh_custom_spin(self, module_org, content_view): + def test_positive_publish_rh_custom_spin(self, module_org, content_view, module_target_sat): """Attempt to publish a content view containing Red Hat spin - i.e., contains filters. @@ -957,13 +1022,11 @@ def test_positive_publish_rh_custom_spin(self, module_org, content_view): :expectedresults: Content view can be published - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() @@ -977,8 +1040,6 @@ def test_positive_promote_rh(self, module_org, content_view, module_lce): :expectedresults: Content view can be promoted - :CaseLevel: Integration - :CaseImportance: Critical """ content_view.repository = [self.repo] @@ -994,7 +1055,7 @@ def test_positive_promote_rh(self, module_org, content_view, module_lce): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_rh_custom_spin(self, content_view, module_lce): + def test_positive_promote_rh_custom_spin(self, content_view, module_lce, module_target_sat): """Attempt to promote a content view containing Red Hat spin - i.e., contains filters. @@ -1002,19 +1063,103 @@ def test_positive_promote_rh_custom_spin(self, content_view, module_lce): :expectedresults: Content view can be promoted - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() content_view.read().version[0].promote(data={'environment_ids': module_lce.id}) assert len(content_view.read().version[0].read().environment) == 2 + @pytest.mark.e2e + @pytest.mark.tier2 + def test_cv_audit_scenarios(self, module_product, target_sat): + """Check for various scenarios where a content view's needs_publish flag + should be set to true and that it properly gets set and unset + + :id: 48b0ce35-f76b-447e-a465-d9ce70cbb20e` + + :steps: + + 1. Create a CV + 2. Add a filter to the CV + 3. Add a rule to the filter + 4. Delete that rule + 5. Delete that filter + 6. Add a repo to the CV + 7. Delete content from the repo + 8. Add content to the repo + 9. Sync the repo + + :expectedresults: All of the above steps should results in the CV needing to be + be published + + :CaseImportance: High + """ + # needs_publish is set to true when created + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # needs_publish is set to true when a filter is added/updated/deleted + cv_filter = target_sat.api.RPMContentViewFilter( + content_view=self.yumcv, inclusion='true', name=gen_string('alphanumeric') + ).create() + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # Adding a rule should set needs_publish to true + cvf_rule = target_sat.api.ContentViewFilterRule( + content_view_filter=cv_filter, name=gen_string('alphanumeric'), version='1.0' + ).create() + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # Deleting a rule should set needs_publish to true + cvf_rule.delete() + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # Deleting a filter should set needs_publish to true + cv_filter.delete() + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # needs_publish is set to true whenever repositories are interacted with on the CV + # add a repo to the CV, needs_publish should be set to true + repo_url = settings.repos.yum_0.url + repo = target_sat.api.Repository( + download_policy='immediate', + mirroring_policy='mirror_complete', + product=module_product, + url=repo_url, + ).create() + repo.sync() + self.yumcv.repository = [repo] + self.yumcv = self.yumcv.update(['repository']) + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # needs_publish is set to true when repository content is removed + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) + repo.remove_content(data={'ids': [package.id for package in packages]}) + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # needs_publish is set to true whenever repo content is added + with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: + repo.upload_content(files={'content': handle}) + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + # needs_publish is set to true whenever a repo is synced + repo.sync() + assert self.yumcv.read().needs_publish + self.yumcv.publish() + assert not self.yumcv.read().needs_publish + @pytest.mark.tier2 def test_positive_admin_user_actions( @@ -1035,8 +1180,6 @@ def test_positive_admin_user_actions( :expectedresults: The user can Read, Modify, Delete, Publish, Promote the content views - :CaseLevel: Integration - :CaseImportance: Critical """ user_login = gen_string('alpha') @@ -1095,8 +1238,6 @@ def test_positive_readonly_user_actions(target_sat, function_role, content_view, :expectedresults: User with read-only role for content view can view the repository in the content view - :CaseLevel: Integration - :CaseImportance: Critical """ user_login = gen_string('alpha') @@ -1161,8 +1302,6 @@ def test_negative_readonly_user_actions( :BZ: 1922134 - :CaseLevel: Integration - :CaseImportance: Critical """ user_login = gen_string('alpha') @@ -1170,7 +1309,7 @@ def test_negative_readonly_user_actions( # create a role with content views read only permissions target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( filters={'name': 'view_content_views'}, query={'search': 'resource_type="Katello::ContentView"'}, ), @@ -1179,7 +1318,7 @@ def test_negative_readonly_user_actions( # create environment permissions and assign it to our role target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( query={'search': 'resource_type="Katello::KTEnvironment"'} ), role=function_role, @@ -1238,8 +1377,6 @@ def test_negative_non_readonly_user_actions(target_sat, content_view, function_r :expectedresults: the user can perform different operations against content view, but not read it - :CaseLevel: Integration - :CaseImportance: Critical """ user_login = gen_string('alpha') @@ -1280,13 +1417,10 @@ def test_negative_non_readonly_user_actions(target_sat, content_view, function_r class TestOstreeContentView: """Tests for ostree contents in content views.""" - @pytest.mark.skipif( - (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' - ) @pytest.fixture(scope='class', autouse=True) - def initiate_testclass(self, request, module_product): + def initiate_testclass(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.ostree_repo = entities.Repository( + request.cls.ostree_repo = class_target_sat.api.Repository( product=module_product, content_type='ostree', url=FEDORA_OSTREE_REPO, @@ -1294,13 +1428,13 @@ def initiate_testclass(self, request, module_product): ).create() self.ostree_repo.sync() # Create new yum repository - request.cls.yum_repo = entities.Repository( + request.cls.yum_repo = class_target_sat.api.Repository( url=settings.repos.yum_1.url, product=module_product, ).create() self.yum_repo.sync() # Create new docker repository - request.cls.docker_repo = entities.Repository( + request.cls.docker_repo = class_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product, @@ -1317,8 +1451,6 @@ def test_positive_add_custom_ostree_content(self, content_view): :expectedresults: Custom ostree content assigned and present in content view - :CaseLevel: Integration - :CaseImportance: High """ assert len(content_view.repository) == 0 @@ -1336,8 +1468,6 @@ def test_positive_publish_custom_ostree(self, content_view): :expectedresults: Content-view with Custom ostree published successfully - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.ostree_repo] @@ -1354,8 +1484,6 @@ def test_positive_promote_custom_ostree(self, content_view, module_lce): :expectedresults: Content-view with custom ostree contents promoted successfully - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.ostree_repo] @@ -1376,8 +1504,6 @@ def test_positive_publish_promote_with_custom_ostree_and_other(self, content_vie :expectedresults: Content-view with custom ostree and other contents promoted successfully - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.ostree_repo, self.yum_repo, self.docker_repo] @@ -1395,7 +1521,6 @@ def test_positive_publish_promote_with_custom_ostree_and_other(self, content_vie class TestContentViewRedHatOstreeContent: """Tests for publishing and promoting cv with RH ostree contents.""" - @pytest.mark.run_in_one_thread @pytest.fixture(scope='class', autouse=True) def initiate_testclass(self, request, module_entitlement_manifest_org, class_target_sat): """Set up organization, product and repositories for tests.""" @@ -1408,7 +1533,7 @@ def initiate_testclass(self, request, module_entitlement_manifest_org, class_tar reposet=REPOSET['rhaht'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() @pytest.mark.tier2 @@ -1420,8 +1545,6 @@ def test_positive_add_rh_ostree_content(self, content_view): :expectedresults: RH atomic ostree content assigned and present in content view - :CaseLevel: Integration - :CaseImportance: High """ assert len(content_view.repository) == 0 @@ -1439,8 +1562,6 @@ def test_positive_publish_RH_ostree(self, content_view): :expectedresults: Content-view with RH ostree contents published successfully - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.repo] @@ -1457,8 +1578,6 @@ def test_positive_promote_RH_ostree(self, content_view, module_lce): :expectedresults: Content-view with RH ostree contents promoted successfully - :CaseLevel: Integration - :CaseImportance: High """ content_view.repository = [self.repo] @@ -1481,8 +1600,6 @@ def test_positive_publish_promote_with_RH_ostree_and_other( :expectedresults: Content-view with RH ostree and other contents promoted successfully - :CaseLevel: Integration - :CaseImportance: High """ repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( @@ -1494,7 +1611,7 @@ def test_positive_publish_promote_with_RH_ostree_and_other( releasever=None, ) # Sync repository - rpm_repo = entities.Repository(id=repo_id) + rpm_repo = module_target_sat.api.Repository(id=repo_id) rpm_repo.sync() content_view.repository = [self.repo, rpm_repo] content_view = content_view.update(['repository']) diff --git a/tests/foreman/api/test_contentviewfilter.py b/tests/foreman/api/test_contentviewfilter.py index 9bfc61778e8..6360bf5da16 100644 --- a/tests/foreman/api/test_contentviewfilter.py +++ b/tests/foreman/api/test_contentviewfilter.py @@ -8,46 +8,40 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import http from random import randint -import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string +from fauxfactory import gen_integer, gen_string from nailgun import client -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.config import get_credentials -from robottelo.config import settings +from robottelo.config import get_credentials, settings from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) @pytest.fixture(scope='module') -def sync_repo(module_product): - repo = entities.Repository(product=module_product).create() +def sync_repo(module_product, module_target_sat): + repo = module_target_sat.api.Repository(product=module_product).create() repo.sync() return repo @pytest.fixture(scope='module') -def sync_repo_module_stream(module_product): - repo = entities.Repository( +def sync_repo_module_stream(module_product, module_target_sat): + repo = module_target_sat.api.Repository( content_type='yum', product=module_product, url=settings.repos.module_stream_1.url ).create() repo.sync() @@ -55,13 +49,15 @@ def sync_repo_module_stream(module_product): @pytest.fixture -def content_view(module_org, sync_repo): - return entities.ContentView(organization=module_org, repository=[sync_repo]).create() +def content_view(module_org, sync_repo, module_target_sat): + return module_target_sat.api.ContentView( + organization=module_org, repository=[sync_repo] + ).create() @pytest.fixture -def content_view_module_stream(module_org, sync_repo_module_stream): - return entities.ContentView( +def content_view_module_stream(module_org, sync_repo_module_stream, module_target_sat): + return module_target_sat.api.ContentView( organization=module_org, repository=[sync_repo_module_stream] ).create() @@ -70,7 +66,7 @@ class TestContentViewFilter: """Tests for content view filters.""" @pytest.mark.tier2 - def test_negative_get_with_no_args(self): + def test_negative_get_with_no_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: da29fd90-cd96-49f9-b94e-71d4e3a35a57 @@ -78,19 +74,17 @@ def test_negative_get_with_no_args(self): :expectedresults: An HTTP 200 response is received if a GET request is issued with no arguments specified. - :CaseLevel: Integration - :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, ) assert response.status_code == http.client.OK @pytest.mark.tier2 - def test_negative_get_with_bad_args(self): + def test_negative_get_with_bad_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: e6fea726-930b-4b74-b784-41528811994f @@ -98,12 +92,10 @@ def test_negative_get_with_bad_args(self): :expectedresults: An HTTP 200 response is received if a GET request is issued with bad arguments specified. - :CaseLevel: Integration - :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, data={'foo': 'bar'}, @@ -112,7 +104,7 @@ def test_negative_get_with_bad_args(self): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_erratum_with_name(self, name, content_view): + def test_positive_create_erratum_with_name(self, name, content_view, target_sat): """Create new erratum content filter using different inputs as a name :id: f78a133f-441f-4fcc-b292-b9eed228d755 @@ -122,15 +114,14 @@ def test_positive_create_erratum_with_name(self, name, content_view): :expectedresults: Content view filter created successfully and has correct name and type - :CaseLevel: Integration """ - cvf = entities.ErratumContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.ErratumContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'erratum' @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_pkg_group_with_name(self, name, content_view): + def test_positive_create_pkg_group_with_name(self, name, content_view, target_sat): """Create new package group content filter using different inputs as a name :id: f9bfb6bf-a879-4f1a-970d-8f4df533cd59 @@ -140,11 +131,9 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): :expectedresults: Content view filter created successfully and has correct name and type - :CaseLevel: Integration - :CaseImportance: Medium """ - cvf = entities.PackageGroupContentViewFilter( + cvf = target_sat.api.PackageGroupContentViewFilter( content_view=content_view, name=name, ).create() @@ -153,7 +142,7 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_rpm_with_name(self, name, content_view): + def test_positive_create_rpm_with_name(self, name, content_view, target_sat): """Create new RPM content filter using different inputs as a name :id: f1c88e72-7993-47ac-8fbc-c749d32bc768 @@ -163,17 +152,15 @@ def test_positive_create_rpm_with_name(self, name, content_view): :expectedresults: Content view filter created successfully and has correct name and type - :CaseLevel: Integration - :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'rpm' @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_create_with_inclusion(self, inclusion, content_view): + def test_positive_create_with_inclusion(self, inclusion, content_view, target_sat): """Create new content view filter with different inclusion values :id: 81130dc9-ae33-48bc-96a7-d54d3e99448e @@ -183,14 +170,15 @@ def test_positive_create_with_inclusion(self, inclusion, content_view): :expectedresults: Content view filter created successfully and has correct inclusion value - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view, inclusion=inclusion).create() + cvf = target_sat.api.RPMContentViewFilter( + content_view=content_view, inclusion=inclusion + ).create() assert cvf.inclusion == inclusion @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description, content_view): + def test_positive_create_with_description(self, description, content_view, target_sat): """Create new content filter using different inputs as a description :id: e057083f-e69d-46e7-b336-45faaf67fa52 @@ -200,18 +188,16 @@ def test_positive_create_with_description(self, description, content_view): :expectedresults: Content view filter created successfully and has correct description - :CaseLevel: Integration - :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, description=description, ).create() assert cvf.description == description @pytest.mark.tier2 - def test_positive_create_with_repo(self, content_view, sync_repo): + def test_positive_create_with_repo(self, content_view, sync_repo, target_sat): """Create new content filter with repository assigned :id: 7207d4cf-3ccf-4d63-a50a-1373b16062e2 @@ -219,9 +205,8 @@ def test_positive_create_with_repo(self, content_view, sync_repo): :expectedresults: Content view filter created successfully and has repository assigned - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -231,7 +216,7 @@ def test_positive_create_with_repo(self, content_view, sync_repo): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) def test_positive_create_with_original_packages( - self, original_packages, content_view, sync_repo + self, original_packages, content_view, sync_repo, target_sat ): """Create new content view filter with different 'original packages' option values @@ -243,11 +228,9 @@ def test_positive_create_with_original_packages( :expectedresults: Content view filter created successfully and has 'original packages' value - :CaseLevel: Integration - :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -256,7 +239,9 @@ def test_positive_create_with_original_packages( assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_create_with_docker_repos(self, module_product, sync_repo, content_view): + def test_positive_create_with_docker_repos( + self, module_product, sync_repo, content_view, module_target_sat + ): """Create new docker repository and add to content view that has yum repo already assigned to it. Create new content view filter and assign it to the content view. @@ -266,9 +251,8 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont :expectedresults: Content view filter created successfully and has both repositories assigned (yum and docker) - :CaseLevel: Integration """ - docker_repository = entities.Repository( + docker_repository = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -277,7 +261,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont content_view.repository = [sync_repo, docker_repository] content_view.update(['repository']) - cvf = entities.RPMContentViewFilter( + cvf = module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo, docker_repository], @@ -291,7 +275,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) def test_positive_create_with_module_streams( - self, module_product, sync_repo, sync_repo_module_stream, content_view + self, module_product, sync_repo, sync_repo_module_stream, content_view, target_sat ): """Verify Include and Exclude Filters creation for modulemd (module streams) @@ -300,12 +284,11 @@ def test_positive_create_with_module_streams( :expectedresults: Content view filter created successfully for both Include and Exclude Type - :CaseLevel: Integration """ content_view.repository += [sync_repo_module_stream] content_view.update(['repository']) for inclusion in (True, False): - cvf = entities.ModuleStreamContentViewFilter( + cvf = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[sync_repo, sync_repo_module_stream], @@ -317,7 +300,7 @@ def test_positive_create_with_module_streams( @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_name(self, name, content_view): + def test_negative_create_with_invalid_name(self, name, content_view, target_sat): """Try to create content view filter using invalid names only :id: 8cf4227b-75c4-4d6f-b94f-88e4eb586435 @@ -326,32 +309,28 @@ def test_negative_create_with_invalid_name(self, name, content_view): :expectedresults: Content view filter was not created - :CaseLevel: Integration - :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=content_view, name=name).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() @pytest.mark.tier2 - def test_negative_create_with_same_name(self, content_view): + def test_negative_create_with_same_name(self, content_view, target_sat): """Try to create content view filter using same name twice :id: 73a64ca7-07a3-49ee-8921-0474a16a23ff :expectedresults: Second content view filter was not created - :CaseLevel: Integration - :CaseImportance: Low """ kwargs = {'content_view': content_view, 'name': gen_string('alpha')} - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() with pytest.raises(HTTPError): - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() @pytest.mark.tier2 - def test_negative_create_without_cv(self): + def test_negative_create_without_cv(self, target_sat): """Try to create content view filter without providing content view @@ -359,15 +338,13 @@ def test_negative_create_without_cv(self): :expectedresults: Content view filter is not created - :CaseLevel: Integration - :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=None).create() + target_sat.api.RPMContentViewFilter(content_view=None).create() @pytest.mark.tier2 - def test_negative_create_with_invalid_repo_id(self, content_view): + def test_negative_create_with_invalid_repo_id(self, content_view, target_sat): """Try to create content view filter using incorrect repository id @@ -375,36 +352,32 @@ def test_negative_create_with_invalid_repo_id(self, content_view): :expectedresults: Content view filter is not created - :CaseLevel: Integration - :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[gen_integer(10000, 99999)], ).create() @pytest.mark.tier2 - def test_positive_delete_by_id(self, content_view): + def test_positive_delete_by_id(self, content_view, target_sat): """Delete content view filter :id: 07caeb9d-419d-43f8-996b-456b0cc0f70d :expectedresults: Content view filter was deleted - :CaseLevel: Integration - :CaseImportance: Critical """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.delete() with pytest.raises(HTTPError): cvf.read() @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_update_name(self, name, content_view): + def test_positive_update_name(self, name, content_view, target_sat): """Update content view filter with new name :id: f310c161-00d2-4281-9721-6e45cbc5e4ec @@ -414,15 +387,14 @@ def test_positive_update_name(self, name, content_view): :expectedresults: Content view filter updated successfully and name was changed - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name assert cvf.update(['name']).name == name @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_update_description(self, description, content_view): + def test_positive_update_description(self, description, content_view, target_sat): """Update content view filter with new description :id: f2c5db28-0163-4cf3-929a-16ba1cb98c34 @@ -432,18 +404,16 @@ def test_positive_update_description(self, description, content_view): :expectedresults: Content view filter updated successfully and description was changed - :CaseLevel: Integration - :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.description = description cvf = cvf.update(['description']) assert cvf.description == description @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_update_inclusion(self, inclusion, content_view): + def test_positive_update_inclusion(self, inclusion, content_view, target_sat): """Update content view filter with new inclusion value :id: 0aedd2d6-d020-4a90-adcd-01694b47c0b0 @@ -453,15 +423,14 @@ def test_positive_update_inclusion(self, inclusion, content_view): :expectedresults: Content view filter updated successfully and inclusion value was changed - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.inclusion = inclusion cvf = cvf.update(['inclusion']) assert cvf.inclusion == inclusion @pytest.mark.tier2 - def test_positive_update_repo(self, module_product, sync_repo, content_view): + def test_positive_update_repo(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with new repository :id: 329ef155-c2d0-4aa2-bac3-79087ae49bdf @@ -469,14 +438,13 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): :expectedresults: Content view filter updated successfully and has new repository assigned - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() content_view.repository = [new_repo] content_view.update(['repository']) @@ -486,7 +454,7 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): assert cvf.repository[0].id == new_repo.id @pytest.mark.tier2 - def test_positive_update_repos(self, module_product, sync_repo, content_view): + def test_positive_update_repos(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with multiple repositories :id: 478fbb1c-fa1d-4fcd-93d6-3a7f47092ed3 @@ -494,16 +462,16 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): :expectedresults: Content view filter updated successfully and has new repositories assigned - :CaseLevel: Integration - :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - repos = [entities.Repository(product=module_product).create() for _ in range(randint(3, 5))] + repos = [ + target_sat.api.Repository(product=module_product).create() for _ in range(randint(3, 5)) + ] for repo in repos: repo.sync() content_view.repository = repos @@ -514,7 +482,9 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) - def test_positive_update_original_packages(self, original_packages, sync_repo, content_view): + def test_positive_update_original_packages( + self, original_packages, sync_repo, content_view, target_sat + ): """Update content view filter with new 'original packages' option value :id: 0c41e57a-afa3-479e-83ba-01f09f0fd2b6 @@ -524,9 +494,8 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c :expectedresults: Content view filter updated successfully and 'original packages' value was changed - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -536,7 +505,9 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_update_repo_with_docker(self, module_product, sync_repo, content_view): + def test_positive_update_repo_with_docker( + self, module_product, sync_repo, content_view, target_sat + ): """Update existing content view filter which has yum repository assigned with new docker repository @@ -545,14 +516,13 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte :expectedresults: Content view filter was updated successfully and has both repositories assigned (yum and docker) - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - docker_repository = entities.Repository( + docker_repository = target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -568,7 +538,7 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_update_name(self, name, content_view): + def test_negative_update_name(self, name, content_view, target_sat): """Try to update content view filter using invalid names only :id: 9799648a-3900-4186-8271-6b2dedb547ab @@ -577,36 +547,32 @@ def test_negative_update_name(self, name, content_view): :expectedresults: Content view filter was not updated - :CaseLevel: Integration - :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_same_name(self, content_view): + def test_negative_update_same_name(self, content_view, target_sat): """Try to update content view filter's name to already used one :id: b68569f1-9f7b-4a95-9e2a-a5da348abff7 :expectedresults: Content view filter was not updated - :CaseLevel: Integration - :CaseImportance: Low """ name = gen_string('alpha', 8) - entities.RPMContentViewFilter(content_view=content_view, name=name).create() - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_cv_by_id(self, content_view): + def test_negative_update_cv_by_id(self, content_view, target_sat): """Try to update content view filter using incorrect content view ID @@ -614,15 +580,14 @@ def test_negative_update_cv_by_id(self, content_view): :expectedresults: Content view filter was not updated - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.content_view.id = gen_integer(10000, 99999) with pytest.raises(HTTPError): cvf.update(['content_view']) @pytest.mark.tier2 - def test_negative_update_repo_by_id(self, sync_repo, content_view): + def test_negative_update_repo_by_id(self, sync_repo, content_view, target_sat): """Try to update content view filter using incorrect repository ID @@ -630,9 +595,8 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): :expectedresults: Content view filter was not updated - :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[sync_repo], ).create() @@ -641,7 +605,7 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): cvf.update(['repository']) @pytest.mark.tier2 - def test_negative_update_repo(self, module_product, sync_repo, content_view): + def test_negative_update_repo(self, module_product, sync_repo, content_view, target_sat): """Try to update content view filter with new repository which doesn't belong to filter's content view @@ -649,22 +613,19 @@ def test_negative_update_repo(self, module_product, sync_repo, content_view): :expectedresults: Content view filter was not updated - :CaseLevel: Integration - :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() cvf.repository = [new_repo] with pytest.raises(HTTPError): cvf.update(['repository']) - @pytest.mark.stream @pytest.mark.tier2 @pytest.mark.parametrize( 'filter_type', ['erratum', 'package_group', 'rpm', 'modulemd', 'docker'] @@ -687,8 +648,6 @@ def test_positive_check_filter_applied_flag( :expectedresults: 1. Filters applied flag is set correctly per usage. - :CaseLevel: Integration - :CaseImportance: Medium """ @@ -715,7 +674,7 @@ class TestContentViewFilterSearch: """Tests that search through content view filters.""" @pytest.mark.tier1 - def test_positive_search_erratum(self, content_view): + def test_positive_search_erratum(self, content_view, target_sat): """Search for an erratum content view filter's rules. :id: 6a86060f-6b4f-4688-8ea9-c198e0aeb3f6 @@ -726,11 +685,11 @@ def test_positive_search_erratum(self, content_view): :BZ: 1242534 """ - cv_filter = entities.ErratumContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.ErratumContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_package_group(self, content_view): + def test_positive_search_package_group(self, content_view, target_sat): """Search for an package group content view filter's rules. :id: 832c50cc-c2c8-48c9-9a23-80956baf5f3c @@ -739,11 +698,11 @@ def test_positive_search_package_group(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.PackageGroupContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.PackageGroupContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_rpm(self, content_view): + def test_positive_search_rpm(self, content_view, target_sat): """Search for an rpm content view filter's rules. :id: 1c9058f1-35c4-46f2-9b21-155ef988564a @@ -752,8 +711,8 @@ def test_positive_search_rpm(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.RPMContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() class TestContentViewFilterRule: @@ -763,7 +722,9 @@ class TestContentViewFilterRule: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_promote_module_stream_filter(self, module_org, content_view_module_stream): + def test_positive_promote_module_stream_filter( + self, module_org, content_view_module_stream, target_sat + ): """Verify Module Stream, Errata Count after Promote, Publish for Content View with Module Stream Exclude Filter @@ -772,18 +733,17 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo :expectedresults: Content View should get published and promoted successfully with correct Module Stream count. - :CaseLevel: Integration """ # Exclude module stream filter content_view = content_view_module_stream - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('duck')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -797,7 +757,7 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo assert content_view_version_info.errata_counts['total'] == 3 # Promote Content View - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) content_view = content_view.read() content_view_version_info = content_view.version[0].read() @@ -810,7 +770,9 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_include_exclude_module_stream_filter(self, content_view_module_stream): + def test_positive_include_exclude_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Include and Exclude Errata filter(modular errata) automatically force the copy of the module streams associated to it. @@ -826,17 +788,16 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module :expectedresults: Module Stream count changes automatically after including or excluding modular errata - :CaseLevel: Integration """ content_view = content_view_module_stream - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view = content_view.read() @@ -848,14 +809,14 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module # delete the previous content_view_filter cv_filter.delete() - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=False, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view_version_info = content_view.read().version[1].read() @@ -868,7 +829,7 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_multi_level_filters(self, content_view_module_stream): + def test_positive_multi_level_filters(self, content_view_module_stream, target_sat): """Verify promotion of Content View and Verify count after applying multi_filters (errata and module stream) @@ -876,28 +837,27 @@ def test_positive_multi_level_filters(self, content_view_module_stream): :expectedresults: Verify module stream and errata count should correct - :CaseLevel: Integration """ content_view = content_view_module_stream # apply include errata filter - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() # apply exclude module filter - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('walrus')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -911,7 +871,9 @@ def test_positive_multi_level_filters(self, content_view_module_stream): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_dependency_solving_module_stream_filter(self, content_view_module_stream): + def test_positive_dependency_solving_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Module Stream Content View Filter's with Dependency Solve 'Yes'. If dependency solving enabled then module streams with deps will not get fetched over even if the exclude filter has been applied. @@ -924,19 +886,18 @@ def test_positive_dependency_solving_module_stream_filter(self, content_view_mod :expectedresults: Verify dependant/non dependant module streams are getting fetched. - :CaseLevel: Integration """ content_view = content_view_module_stream content_view.solve_dependencies = True content_view = content_view.update(['solve_dependencies']) - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}" and version="{}'.format('duck', '20180704244205')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() diff --git a/tests/foreman/api/test_contentviewversion.py b/tests/foreman/api/test_contentviewversion.py index 50cabbadcb2..5f3414456e3 100644 --- a/tests/foreman/api/test_contentviewversion.py +++ b/tests/foreman/api/test_contentviewversion.py @@ -4,36 +4,34 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import DataFile -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + DEFAULT_CV, + ENVIRONMENT, + DataFile, +) @pytest.fixture(scope='module') -def module_lce_cv(module_org): +def module_lce_cv(module_org, module_target_sat): """Create some entities for all tests.""" - lce1 = entities.LifecycleEnvironment(organization=module_org).create() - lce2 = entities.LifecycleEnvironment(organization=module_org, prior=lce1).create() - default_cv = entities.ContentView(organization=module_org, name=DEFAULT_CV).search() + lce1 = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce2 = module_target_sat.api.LifecycleEnvironment(organization=module_org, prior=lce1).create() + default_cv = module_target_sat.api.ContentView( + organization=module_org, name=DEFAULT_CV + ).search() default_cvv = default_cv[0].version[0] return lce1, lce2, default_cvv @@ -49,8 +47,6 @@ def test_positive_create(module_cv): :expectedresults: Content View Version is created. - :CaseLevel: Integration - :CaseImportance: Critical """ # Fetch content view for latest information @@ -66,19 +62,17 @@ def test_positive_create(module_cv): @pytest.mark.tier2 -def test_negative_create(module_org): +def test_negative_create(module_org, module_target_sat): """Attempt to create content view version using the 'Default Content View'. :id: 0afd49c6-f3a4-403e-9929-849f51ffa922 :expectedresults: Content View Version is not created - :CaseLevel: Integration - :CaseImportance: Critical """ # The default content view cannot be published - cv = entities.ContentView(organization=module_org.id, name=DEFAULT_CV).search() + cv = module_target_sat.api.ContentView(organization=module_org.id, name=DEFAULT_CV).search() # There should be only 1 record returned assert len(cv) == 1 with pytest.raises(HTTPError): @@ -89,19 +83,17 @@ def test_negative_create(module_org): @pytest.mark.tier2 -def test_positive_promote_valid_environment(module_lce_cv, module_org): - """Promote a content view version to 'next in sequence lifecycle environment. +def test_positive_promote_valid_environment(module_lce_cv, module_org, module_target_sat): + """Promote a content view version to next in sequence lifecycle environment. :id: f205ca06-8ab5-4546-83bd-deac4363d487 :expectedresults: Promotion succeeds. - :CaseLevel: Integration - :CaseImportance: Critical """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and promote it. cv.publish() # Refresh the entity @@ -121,18 +113,16 @@ def test_positive_promote_valid_environment(module_lce_cv, module_org): @pytest.mark.tier2 -def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv): +def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv, module_target_sat): """Promote a content view version to a lifecycle environment that is 'out of sequence'. :id: e88405de-843d-4279-9d81-cedaab7c23cf :expectedresults: The promotion succeeds. - - :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -156,8 +146,6 @@ def test_negative_promote_valid_environment(module_lce_cv): :expectedresults: The promotion fails. - :CaseLevel: Integration - :CaseImportance: Low """ lce1, _, default_cvv = module_lce_cv @@ -166,18 +154,16 @@ def test_negative_promote_valid_environment(module_lce_cv): @pytest.mark.tier2 -def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org): +def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org, module_target_sat): """Attempt to promote a content view version to a Lifecycle environment that is 'out of sequence'. :id: 621d1bb6-92c6-4209-8369-6ea14a4c8a01 :expectedresults: The promotion fails. - - :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -195,7 +181,7 @@ def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org) @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete(module_org, module_product): +def test_positive_delete(module_org, module_product, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -206,20 +192,18 @@ def test_positive_delete(module_org, module_product): :expectedresults: Content version deleted successfully - :CaseLevel: Integration - :CaseImportance: Critical """ key_content = DataFile.ZOO_CUSTOM_GPG_KEY.read_text() - gpgkey = entities.GPGKey(content=key_content, organization=module_org).create() + gpgkey = module_target_sat.api.GPGKey(content=key_content, organization=module_org).create() # Creates new repository with GPGKey - repo = entities.Repository( + repo = module_target_sat.api.Repository( gpg_key=gpgkey, product=module_product, url=settings.repos.yum_1.url ).create() # sync repository repo.sync() # Create content view - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Associate repository to new content view content_view.repository = [repo] content_view = content_view.update(['repository']) @@ -240,7 +224,7 @@ def test_positive_delete(module_org, module_product): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_delete_non_default(module_org): +def test_positive_delete_non_default(module_org, module_target_sat): """Create content view and publish and promote it to new environment. After that try to disassociate content view from 'Library' and one more non-default environment through 'delete_from_environment' @@ -250,17 +234,15 @@ def test_positive_delete_non_default(module_org): :expectedresults: Content view version deleted successfully - :CaseLevel: Integration - :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() assert len(content_view.version) == 1 assert len(content_view.version[0].read().environment) == 1 - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) cvv = content_view.version[0].read() assert len(cvv.environment) == 2 @@ -275,7 +257,7 @@ def test_positive_delete_non_default(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_composite_version(module_org): +def test_positive_delete_composite_version(module_org, module_target_sat): """Create composite content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -286,19 +268,19 @@ def test_positive_delete_composite_version(module_org): :expectedresults: Content version deleted successfully - :CaseLevel: Integration - :BZ: 1276479 """ # Create product with repository and publish it - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() repo.sync() # Create and publish content views - content_view = entities.ContentView(organization=module_org, repository=[repo]).create() + content_view = module_target_sat.api.ContentView( + organization=module_org, repository=[repo] + ).create() content_view.publish() # Create and publish composite content view - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( organization=module_org, composite=True, component=[content_view.read().version[0]] ).create() composite_cv.publish() @@ -316,7 +298,7 @@ def test_positive_delete_composite_version(module_org): @pytest.mark.tier2 -def test_negative_delete(module_org): +def test_negative_delete(module_org, module_target_sat): """Create content view and publish it. Try to delete content view version while content view is still associated with lifecycle environment @@ -325,11 +307,9 @@ def test_negative_delete(module_org): :expectedresults: Content view version is not deleted - :CaseLevel: Integration - :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() @@ -342,12 +322,12 @@ def test_negative_delete(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_renamed_cv_version_from_default_env(module_org): +def test_positive_remove_renamed_cv_version_from_default_env(module_org, module_target_sat): """Remove version of renamed content view from Library environment :id: 7d5961d0-6a9a-4610-979e-cbc4ddbc50ca - :Steps: + :steps: 1. Create a content view 2. Add a yum repo to the content view @@ -357,16 +337,16 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): :expectedresults: content view version is removed from Library environment - - :CaseLevel: Integration """ new_name = gen_string('alpha') # create yum product and repo - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo] content_view = content_view.update(['repository']) # publish the content view @@ -375,7 +355,9 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() # ensure that the content view version is promoted to the Library # lifecycle environment assert lce_library.name == ENVIRONMENT @@ -391,12 +373,12 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): @pytest.mark.tier2 -def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove QE promoted content view version from Library environment :id: c7795762-93bd-419c-ac49-d10dc26b842b - :Steps: + :steps: 1. Create a content view 2. Add docker repo(s) to it @@ -407,13 +389,13 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): :expectedresults: Content view version exist only in DEV, QE and not in Library - - :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - product = entities.Product(organization=module_org).create() - docker_repo = entities.Repository( + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -421,7 +403,7 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the docker repo - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -430,7 +412,9 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV and QE lifecycle environments for lce in [lce_dev, lce_qe]: @@ -447,12 +431,12 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove PROD promoted content view version from Library environment :id: 24911876-7c2a-4a12-a3aa-98051dfda29d - :Steps: + :steps: 1. Create a content view 2. Add yum repositories and docker repositories to CV @@ -463,16 +447,20 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): :expectedresults: Content view version exist only in DEV, QE, PROD and not in Library - - :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -480,7 +468,7 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the yum and docker repos - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -489,7 +477,9 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE PROD lifecycle environments for lce in [lce_dev, lce_qe, lce_prod]: @@ -508,12 +498,12 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_env(module_org): +def test_positive_remove_cv_version_from_env(module_org, module_target_sat): """Remove promoted content view version from environment :id: 17cf18bf-09d5-4641-b0e0-c50e628fa6c8 - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker repo to the content view @@ -527,18 +517,24 @@ def test_positive_remove_cv_version_from_env(module_org): :expectedresults: Content view version exist in Library, DEV, QE, STAGE, PROD - - :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # docker repo - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -546,7 +542,7 @@ def test_positive_remove_cv_version_from_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum and docker repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -555,7 +551,9 @@ def test_positive_remove_cv_version_from_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -580,12 +578,12 @@ def test_positive_remove_cv_version_from_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_multi_env(module_org): +def test_positive_remove_cv_version_from_multi_env(module_org, module_target_sat): """Remove promoted content view version from multiple environments :id: 18b86a68-8e6a-43ea-b95e-188fba125a26 - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker repo to the content view @@ -596,18 +594,24 @@ def test_positive_remove_cv_version_from_multi_env(module_org): :expectedresults: Content view version exists only in Library, DEV - :CaseLevel: Integration - :CaseImportance: Low """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -615,7 +619,7 @@ def test_positive_remove_cv_version_from_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -624,7 +628,9 @@ def test_positive_remove_cv_version_from_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -646,13 +652,13 @@ def test_positive_remove_cv_version_from_multi_env(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_cv_promoted_to_multi_env(module_org): +def test_positive_delete_cv_promoted_to_multi_env(module_org, module_target_sat): """Delete published content view with version promoted to multiple environments :id: c164bd97-e710-4a5a-9c9f-657e6bed804b - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker repo to the content view @@ -662,20 +668,26 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): 5. Delete the content view, this should delete the content with all it's published/promoted versions from all environments - :expectedresults: The content view doesn't exists - - :CaseLevel: Integration + :expectedresults: The content view doesn't exist :CaseImportance: Critical """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -683,7 +695,7 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -692,7 +704,9 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -719,7 +733,7 @@ def test_positive_remove_cv_version_from_env_with_host_registered(): :id: a5b9ba8b-80e6-4435-bc0a-041b3fda227c - :Steps: + :steps: 1. Create a content view cv1 2. Add a yum repo to the content view @@ -744,8 +758,6 @@ def test_positive_remove_cv_version_from_env_with_host_registered(): 5. At content-host some package from cv1 is installable :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -759,7 +771,7 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(): :id: 10699af9-617e-4930-9c80-2827a0ba52eb - :Steps: + :steps: 1. Create two content views, cv1 and cv2 2. Add a yum repo to both content views @@ -786,8 +798,6 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(): 6. At content-host some package from cv2 is installable :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -799,7 +809,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario(): :id: 1e8a8e64-eec8-49e0-b121-919c53f416d2 - :Steps: + :steps: 1. Create a content view 2. module_lce_cv satellite to use a capsule and to sync all lifecycle @@ -823,6 +833,4 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario(): Library and DEV and exists only in QE and PROD :CaseAutomation: NotAutomated - - :CaseLevel: System """ diff --git a/tests/foreman/api/test_convert2rhel.py b/tests/foreman/api/test_convert2rhel.py index 544f01b3e86..37bbfa30058 100644 --- a/tests/foreman/api/test_convert2rhel.py +++ b/tests/foreman/api/test_convert2rhel.py @@ -4,23 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: Registration -:Team: Rocket +:CaseImportance: Critical -:TestType: Functional +:Team: Rocket -:Upstream: No """ import pytest import requests from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import REPOS +from robottelo.constants import DEFAULT_ARCHITECTURE, DEFAULT_SUBSCRIPTION_NAME, REPOS def create_repo(sat, org, repo_url, ssl_cert=None): @@ -49,49 +44,50 @@ def create_activation_key(sat, org, lce, cv, subscription_id): environment=lce, ).create() act_key.add_subscriptions(data={'subscription_id': subscription_id}) + content = sat.cli.ActivationKey.product_content({'id': act_key.id, 'organization-id': org.id}) + act_key.content_override( + data={'content_overrides': [{'content_label': content[0]['label'], 'value': '1'}]} + ) + ak_subscriptions = act_key.product_content()['results'] + ak_subscriptions[0]['enabled'] = True return act_key def update_cv(sat, cv, lce, repos): """Update and publish Content view with repos""" - cv = sat.api.ContentView(id=cv.id, repository=repos).update(["repository"]) + cv = sat.api.ContentView(id=cv.id, repository=repos).update(['repository']) cv.publish() cv = cv.read() cv.version[-1].promote(data={'environment_ids': lce.id, 'force': False}) return cv -def register_host(sat, act_key, module_org, module_loc, host, ubi=None): - """Register host to satellite""" - # generate registration command - command = sat.api.RegistrationCommand( - organization=module_org, - activation_keys=[act_key.name], - location=module_loc, - insecure=True, - repo=ubi, - ).create() - assert host.execute(command).status == 0 - - @pytest.fixture(scope='module') -def ssl_cert(module_target_sat, module_org): +def ssl_cert(module_target_sat, module_entitlement_manifest_org): """Create credetial with SSL cert for Oracle Linux""" res = requests.get(settings.repos.convert2rhel.ssl_cert_oracle) res.raise_for_status() return module_target_sat.api.ContentCredential( - content=res.text, organization=module_org, content_type='cert' + content=res.text, organization=module_entitlement_manifest_org, content_type='cert' ).create() @pytest.fixture -def activation_key_rhel(target_sat, module_org, module_lce, module_promoted_cv, version): +def activation_key_rhel( + module_target_sat, module_entitlement_manifest_org, module_lce, module_promoted_cv, version +): """Create activation key that will be used after conversion for registration""" - subs = target_sat.api.Subscription(organization=module_org).search( - query={'search': f'{DEFAULT_SUBSCRIPTION_NAME}'} - ) + subs = module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org.id + ).search(query={'search': f'{DEFAULT_SUBSCRIPTION_NAME}'}) assert subs - return create_activation_key(target_sat, module_org, module_lce, module_promoted_cv, subs[0].id) + return create_activation_key( + module_target_sat, + module_entitlement_manifest_org, + module_lce, + module_promoted_cv, + subs[0].id, + ) @pytest.fixture(scope='module') @@ -125,6 +121,8 @@ def enable_rhel_subscriptions(module_target_sat, module_entitlement_manifest_org module_target_sat.wait_for_tasks( search_query=(f'id = {task["id"]}'), poll_timeout=2500, + search_rate=20, + max_tries=10, ) task_status = module_target_sat.api.ForemanTask(id=task['id']).poll() assert task_status['result'] == 'success' @@ -133,9 +131,9 @@ def enable_rhel_subscriptions(module_target_sat, module_entitlement_manifest_org @pytest.fixture def centos( - target_sat, + module_target_sat, centos_host, - module_org, + module_entitlement_manifest_org, smart_proxy_location, module_promoted_cv, module_lce, @@ -143,18 +141,32 @@ def centos( enable_rhel_subscriptions, ): """Deploy and register Centos host""" - # updating centos packages on CentOS 8 is necessary for conversion major = version.split('.')[0] - if major == '8': - centos_host.execute("yum update -y centos-*") + assert centos_host.execute('yum -y update').status == 0 repo_url = settings.repos.convert2rhel.convert_to_rhel_repo.format(major) - repo = create_repo(target_sat, module_org, repo_url) - cv = update_cv(target_sat, module_promoted_cv, module_lce, enable_rhel_subscriptions + [repo]) - c2r_sub = target_sat.api.Subscription(organization=module_org, name=repo.product.name).search()[ - 0 - ] - act_key = create_activation_key(target_sat, module_org, module_lce, cv, c2r_sub.id) - register_host(target_sat, act_key, module_org, smart_proxy_location, centos_host) + repo = create_repo(module_target_sat, module_entitlement_manifest_org, repo_url) + cv = update_cv( + module_target_sat, module_promoted_cv, module_lce, enable_rhel_subscriptions + [repo] + ) + c2r_sub = module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org.id, name=repo.product.name + ).search()[0] + act_key = create_activation_key( + module_target_sat, module_entitlement_manifest_org, module_lce, cv, c2r_sub.id + ) + + # Register CentOS host with Satellite + command = module_target_sat.api.RegistrationCommand( + organization=module_entitlement_manifest_org, + activation_keys=[act_key.name], + location=smart_proxy_location, + insecure=True, + ).create() + assert centos_host.execute(command).status == 0 + + if centos_host.execute('needs-restarting -r').status == 1: + centos_host.power_control(state='reboot') + yield centos_host # close ssh session before teardown, because of reboot in conversion it may cause problems centos_host.close() @@ -162,9 +174,9 @@ def centos( @pytest.fixture def oracle( - target_sat, + module_target_sat, oracle_host, - module_org, + module_entitlement_manifest_org, smart_proxy_location, module_promoted_cv, module_lce, @@ -173,28 +185,57 @@ def oracle( enable_rhel_subscriptions, ): """Deploy and register Oracle host""" + major = version.split('.')[0] + assert oracle_host.execute('yum -y update').status == 0 # disable rhn-client-tools because it obsoletes the subscription manager package oracle_host.execute('echo "exclude=rhn-client-tools" >> /etc/yum.conf') - # install and set correct kernel, based on convert2rhel docs + + # Install and set correct RHEL compatible kernel and using non-UEK kernel, based on C2R docs result = oracle_host.execute( 'yum install -y kernel && ' 'grubby --set-default /boot/vmlinuz-' '`rpm -q --qf "%{BUILDTIME}\t%{EVR}.%{ARCH}\n" kernel | sort -nr | head -1 | cut -f2`' ) assert result.status == 0 - oracle_host.power_control(state='reboot') - major = version.split('.')[0] + + if major == '8': + # needs-restarting missing in OEL8 + assert oracle_host.execute('dnf install -y yum-utils').status == 0 + # Fix inhibitor CHECK_FIREWALLD_AVAILABILITY::FIREWALLD_MODULES_CLEANUP_ON_EXIT_CONFIG - + # Firewalld is set to cleanup modules after exit + result = oracle_host.execute( + 'sed -i -- "s/CleanupModulesOnExit=yes/CleanupModulesOnExit=no/g" ' + '/etc/firewalld/firewalld.conf && firewall-cmd --reload' + ) + assert result.status == 0 + + if oracle_host.execute('needs-restarting -r').status == 1: + oracle_host.power_control(state='reboot') + repo_url = settings.repos.convert2rhel.convert_to_rhel_repo.format(major) - repo = create_repo(target_sat, module_org, repo_url, ssl_cert) - cv = update_cv(target_sat, module_promoted_cv, module_lce, enable_rhel_subscriptions + [repo]) - c2r_sub = target_sat.api.Subscription(organization=module_org, name=repo.product.name).search()[ - 0 - ] - act_key = create_activation_key(target_sat, module_org, module_lce, cv, c2r_sub.id) + repo = create_repo(module_target_sat, module_entitlement_manifest_org, repo_url, ssl_cert) + cv = update_cv( + module_target_sat, module_promoted_cv, module_lce, enable_rhel_subscriptions + [repo] + ) + c2r_sub = module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org, name=repo.product.name + ).search()[0] + act_key = create_activation_key( + module_target_sat, module_entitlement_manifest_org, module_lce, cv, c2r_sub.id + ) + # UBI repo required for subscription-manager packages on Oracle ubi_url = settings.repos.convert2rhel.ubi7 if major == '7' else settings.repos.convert2rhel.ubi8 - ubi = create_repo(target_sat, module_org, ubi_url) - ubi_repo = ubi.full_path.replace('https', 'http') - register_host(target_sat, act_key, module_org, smart_proxy_location, oracle_host, ubi_repo) + + # Register Oracle host with Satellite + command = module_target_sat.api.RegistrationCommand( + organization=module_entitlement_manifest_org, + activation_keys=[act_key.name], + location=smart_proxy_location, + insecure=True, + repo=ubi_url, + ).create() + assert oracle_host.execute(command).status == 0 + yield oracle_host # close ssh session before teardown, because of reboot in conversion it may cause problems oracle_host.close() @@ -203,21 +244,17 @@ def oracle( @pytest.fixture(scope='module') def version(request): """Version of converted OS""" - return settings.content_host.get(request.param).vm.release + return settings.content_host.get(request.param).vm.deploy_rhel_version @pytest.mark.e2e -@pytest.mark.parametrize( - "version", - ['oracle7', 'oracle8'], - indirect=True, -) -def test_convert2rhel_oracle(target_sat, oracle, activation_key_rhel, version): +@pytest.mark.parametrize('version', ['oracle7', 'oracle8'], indirect=True) +def test_convert2rhel_oracle(module_target_sat, oracle, activation_key_rhel, version): """Convert Oracle linux to RHEL :id: 7fd393f0-551a-4de0-acdd-7f026b485f79 - :Steps: + :steps: 0. Have host registered to Satellite 1. Check for operating system 2. Convert host to RHEL @@ -226,54 +263,62 @@ def test_convert2rhel_oracle(target_sat, oracle, activation_key_rhel, version): and subscription status :parametrized: yes - - :CaseImportance: Medium """ - host_content = target_sat.api.Host(id=oracle.hostname).read_json() + major = version.split('.')[0] + assert oracle.execute('yum -y update').status == 0 + if major == '8': + # Fix inhibitor TAINTED_KMODS::TAINTED_KMODS_DETECTED - Tainted kernel modules detected + blacklist_cfg = '/etc/modprobe.d/blacklist.conf' + assert oracle.execute('modprobe -r nvme_tcp').status == 0 + assert oracle.execute(f'echo "blacklist nvme_tcp" >> {blacklist_cfg}').status == 0 + assert oracle.execute(f'echo "install nvme_tcp /bin/false" >> {blacklist_cfg}').status == 0 + + host_content = module_target_sat.api.Host(id=oracle.hostname).read_json() assert host_content['operatingsystem_name'] == f"OracleLinux {version}" # execute job 'Convert 2 RHEL' on host template_id = ( - target_sat.api.JobTemplate().search(query={'search': 'name="Convert to RHEL"'})[0].id + module_target_sat.api.JobTemplate().search(query={'search': 'name="Convert to RHEL"'})[0].id ) - job = target_sat.api.JobInvocation().run( + job = module_target_sat.api.JobInvocation().run( synchronous=False, data={ 'job_template_id': template_id, 'inputs': { 'Activation Key': activation_key_rhel.id, 'Restart': 'yes', + 'Data telemetry': 'yes', }, 'targeting_type': 'static_query', 'search_query': f'name = {oracle.hostname}', }, ) # wait for job to complete - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + module_target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=2500 ) - result = target_sat.api.JobInvocation(id=job['id']).read() + result = module_target_sat.api.JobInvocation(id=job['id']).read() assert result.succeeded == 1 # check facts: correct os and valid subscription status - host_content = target_sat.api.Host(id=oracle.hostname).read_json() + host_content = module_target_sat.api.Host(id=oracle.hostname).read_json() # workaround for BZ 2080347 assert ( - host_content['operatingsystem_name'].startswith(f"RHEL Server {version}") - or host_content['operatingsystem_name'].startswith(f"RedHat {version}") - or host_content['operatingsystem_name'].startswith(f"RHEL {version}") + host_content['operatingsystem_name'].startswith(f'RHEL Server {version}') + or host_content['operatingsystem_name'].startswith(f'RedHat {version}') + or host_content['operatingsystem_name'].startswith(f'RHEL {version}') ) assert host_content['subscription_status'] == 0 @pytest.mark.e2e -@pytest.mark.parametrize("version", ['centos7', 'centos8'], indirect=True) -def test_convert2rhel_centos(target_sat, centos, activation_key_rhel, version): - """Convert Centos linux to RHEL +@pytest.mark.parametrize('version', ['centos7', 'centos8'], indirect=True) +def test_convert2rhel_centos(module_target_sat, centos, activation_key_rhel, version): + """Convert CentOS linux to RHEL :id: 6f698440-7d85-4deb-8dd9-363ea9003b92 - :Steps: + :steps: 0. Have host registered to Satellite 1. Check for operating system 2. Convert host to RHEL @@ -282,42 +327,42 @@ def test_convert2rhel_centos(target_sat, centos, activation_key_rhel, version): and subscription status :parametrized: yes - - :CaseImportance: Medium """ - host_content = target_sat.api.Host(id=centos.hostname).read_json() + host_content = module_target_sat.api.Host(id=centos.hostname).read_json() major = version.split('.')[0] - assert host_content['operatingsystem_name'] == f"CentOS {major}" - + assert host_content['operatingsystem_name'] == f'CentOS {major}' # execute job 'Convert 2 RHEL' on host template_id = ( - target_sat.api.JobTemplate().search(query={'search': 'name="Convert to RHEL"'})[0].id + module_target_sat.api.JobTemplate().search(query={'search': 'name="Convert to RHEL"'})[0].id ) - job = target_sat.api.JobInvocation().run( + job = module_target_sat.api.JobInvocation().run( synchronous=False, data={ 'job_template_id': template_id, 'inputs': { 'Activation Key': activation_key_rhel.id, 'Restart': 'yes', + 'Data telemetry': 'yes', }, 'targeting_type': 'static_query', 'search_query': f'name = {centos.hostname}', }, ) # wait for job to complete - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + module_target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', + poll_timeout=2500, + search_rate=20, ) - result = target_sat.api.JobInvocation(id=job['id']).read() + result = module_target_sat.api.JobInvocation(id=job['id']).read() assert result.succeeded == 1 # check facts: correct os and valid subscription status - host_content = target_sat.api.Host(id=centos.hostname).read_json() + host_content = module_target_sat.api.Host(id=centos.hostname).read_json() # workaround for BZ 2080347 assert ( - host_content['operatingsystem_name'].startswith(f"RHEL Server {version}") - or host_content['operatingsystem_name'].startswith(f"RedHat {version}") - or host_content['operatingsystem_name'].startswith(f"RHEL {version}") + host_content['operatingsystem_name'].startswith(f'RHEL Server {version}') + or host_content['operatingsystem_name'].startswith(f'RedHat {version}') + or host_content['operatingsystem_name'].startswith(f'RHEL {version}') ) assert host_content['subscription_status'] == 0 diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index ee2f1aa92ff..59783b9a8b6 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -8,22 +8,13 @@ :CaseAutomation: Automated -:CaseLevel: System - -:TestType: Functional - -:Upstream: No """ import re -import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_string +from fauxfactory import gen_choice, gen_ipaddr, gen_mac, gen_string from nailgun import entity_mixins -from wait_for import TimedOutError -from wait_for import wait_for +import pytest +from wait_for import TimedOutError, wait_for from robottelo.logging import logger from robottelo.utils.datafactory import valid_data_list @@ -76,9 +67,9 @@ def _assert_discovered_host(host, channel=None, user_config=None): ]: try: dhcp_pxe = _wait_for_log(channel, pattern[0], timeout=10) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') + raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') from err groups = re.search('DHCPACK on (\\d.+) to', dhcp_pxe.out) assert len(groups.groups()) == 1, 'Unable to parse bootloader ip address' pxe_ip = groups.groups()[0] @@ -91,9 +82,9 @@ def _assert_discovered_host(host, channel=None, user_config=None): ]: try: _wait_for_log(channel, pattern[0], timeout=20) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for VM (tftp) to fetch {pattern[1]}') + raise AssertionError(f'Timed out waiting for VM (tftp) to fetch {pattern[1]}') from err # assert that server receives DHCP discover from FDI for pattern in [ ( @@ -104,9 +95,9 @@ def _assert_discovered_host(host, channel=None, user_config=None): ]: try: dhcp_fdi = _wait_for_log(channel, pattern[0], timeout=30) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') + raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') from err groups = re.search('DHCPACK on (\\d.+) to', dhcp_fdi.out) assert len(groups.groups()) == 1, 'Unable to parse FDI ip address' fdi_ip = groups.groups()[0] @@ -118,17 +109,17 @@ def _assert_discovered_host(host, channel=None, user_config=None): f'"/api/v2/discovered_hosts/facts" for {fdi_ip}', timeout=60, ) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError('Timed out waiting for /facts POST request') + raise AssertionError('Timed out waiting for /facts POST request') from err groups = re.search('\\[I\\|app\\|([a-z0-9]+)\\]', facts_fdi.out) assert len(groups.groups()) == 1, 'Unable to parse POST request UUID' req_id = groups.groups()[0] try: _wait_for_log(channel, f'\\[I\\|app\\|{req_id}\\] Completed 201 Created') - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError('Timed out waiting for "/facts" 201 response') + raise AssertionError('Timed out waiting for "/facts" 201 response') from err default_config = entity_mixins.DEFAULT_SERVER_CONFIG try: wait_for( @@ -142,8 +133,10 @@ def _assert_discovered_host(host, channel=None, user_config=None): delay=2, logger=logger, ) - except TimedOutError: - raise AssertionError('Timed out waiting for discovered_host to appear on satellite') + except TimedOutError as err: + raise AssertionError( + 'Timed out waiting for discovered_host to appear on satellite' + ) from err discovered_host = host.api.DiscoveredHost(user_config or default_config).search( query={'search': f'name={host.guest_name}'} ) @@ -157,11 +150,11 @@ def assert_discovered_host_provisioned(channel, ksrepo): try: log = _wait_for_log(channel, pattern, timeout=300, delay=10) assert pattern in log - except TimedOutError: - raise AssertionError(f'Timed out waiting for {pattern} from VM') + except TimedOutError as err: + raise AssertionError(f'Timed out waiting for {pattern} from VM') from err -@pytest.fixture() +@pytest.fixture def discovered_host_cleanup(target_sat): hosts = target_sat.api.DiscoveredHost().search() for host in hosts: @@ -176,7 +169,7 @@ class TestDiscoveredHost: @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) - @pytest.mark.rhel_ver_match('[^6]') + @pytest.mark.rhel_ver_list([8, 9]) @pytest.mark.tier3 def test_positive_provision_pxe_host( self, @@ -194,8 +187,7 @@ def test_positive_provision_pxe_host( :Setup: Provisioning and discovery should be configured - :Steps: - + :steps: 1. Boot up the host to discover 2. Provision the host @@ -208,8 +200,8 @@ def test_positive_provision_pxe_host( mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=1500, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup @@ -222,8 +214,7 @@ def test_positive_provision_pxe_host( host = sat.api.Host().search(query={"search": f'name={host.name}'})[0] assert host assert_discovered_host_provisioned(shell, module_provisioning_rhel_content.ksrepo) - host.delete() - assert not sat.api.Host().search(query={"search": f'name={host.name}'}) + sat.provisioning_cleanup(host.name) provisioning_host.blank = True @pytest.mark.upgrade @@ -231,7 +222,7 @@ def test_positive_provision_pxe_host( @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) - @pytest.mark.rhel_ver_match('[^6]') + @pytest.mark.rhel_ver_list([8, 9]) @pytest.mark.tier3 def test_positive_provision_pxe_less_host( self, @@ -247,12 +238,10 @@ def test_positive_provision_pxe_less_host( :Setup: Provisioning should be configured and a host should be discovered - :Steps: PUT /api/v2/discovered_hosts/:id + :steps: PUT /api/v2/discovered_hosts/:id :expectedresults: Host should be provisioned successfully - :CaseAutomation: NotAutomated - :CaseImportance: Critical """ sat = module_discovery_sat.sat @@ -260,8 +249,8 @@ def test_positive_provision_pxe_less_host( mac = pxeless_discovery_host._broker_args['provisioning_nic_mac_addr'] wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=1500, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup @@ -274,13 +263,13 @@ def test_positive_provision_pxe_less_host( host = sat.api.Host().search(query={"search": f'name={host.name}'})[0] assert host assert_discovered_host_provisioned(shell, module_provisioning_rhel_content.ksrepo) - host.delete() - assert not sat.api.Host().search(query={"search": f'name={host.name}'}) + sat.provisioning_cleanup(host.name) pxeless_discovery_host.blank = True - @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_auto_provision_pxe_host(self): + def test_positive_auto_provision_pxe_host( + self, module_discovery_hostgroup, module_target_sat, discovery_org, discovery_location + ): """Auto provision a pxe-based host by executing discovery rules :id: c93fd7c9-41ef-4eb5-8042-f72e87e67e10 @@ -290,18 +279,28 @@ def test_positive_auto_provision_pxe_host(self): :Setup: Provisioning should be configured and a host should be discovered - :Steps: POST /api/v2/discovered_hosts/:id/auto_provision + :steps: POST /api/v2/discovered_hosts/:id/auto_provision :expectedresults: Selected Host should be auto-provisioned successfully - :CaseAutomation: Automated - :CaseImportance: Critical """ + discovered_host = module_target_sat.api_factory.create_discovered_host() + + rule = module_target_sat.api.DiscoveryRule( + max_count=1, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + ).create() + result = module_target_sat.api.DiscoveredHost(id=discovered_host['id']).auto_provision() + assert f'provisioned with rule {rule.name}' in result['message'] - @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_auto_provision_all(self): + def test_positive_auto_provision_all( + self, module_discovery_hostgroup, module_target_sat, discovery_org, discovery_location + ): """Auto provision all host by executing discovery rules :id: 954d3688-62d9-47f7-9106-a4fff8825ffa @@ -309,7 +308,7 @@ def test_positive_auto_provision_all(self): :Setup: Provisioning should be configured and more than one host should be discovered - :Steps: POST /api/v2/discovered_hosts/auto_provision_all + :steps: POST /api/v2/discovered_hosts/auto_provision_all :expectedresults: All discovered hosts should be auto-provisioned successfully @@ -318,10 +317,23 @@ def test_positive_auto_provision_all(self): :CaseImportance: High """ + module_target_sat.api.DiscoveryRule( + max_count=25, + hostgroup=module_discovery_hostgroup, + search_=f'location = "{discovery_location.name}"', + location=[discovery_location], + organization=[discovery_org], + ).create() + + for _ in range(2): + module_target_sat.api_factory.create_discovered_host() + + result = module_target_sat.api.DiscoveredHost().auto_provision_all() + assert '2 discovered hosts were provisioned' in result['message'] @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_refresh_facts_pxe_host(self): + def test_positive_refresh_facts_pxe_host(self, module_target_sat): """Refresh the facts of pxe based discovered hosts by adding a new NIC :id: 413fb608-cd5c-441d-af86-fd2d40346d96 @@ -331,19 +343,27 @@ def test_positive_refresh_facts_pxe_host(self): be discovered 2. Add a NIC on discovered host - :Steps: PUT /api/v2/discovered_hosts/:id/refresh_facts + :steps: PUT /api/v2/discovered_hosts/:id/refresh_facts :expectedresults: Added Fact should be displayed on refreshing the facts - :CaseAutomation: NotAutomated - :CaseImportance: High """ - @pytest.mark.stubbed + @pytest.mark.on_premises_provisioning + @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) + @pytest.mark.parametrize('pxe_loader', ['uefi'], indirect=True) + @pytest.mark.rhel_ver_match('9') @pytest.mark.tier3 - def test_positive_reboot_pxe_host(self): + def test_positive_reboot_pxe_host( + self, + module_provisioning_rhel_content, + module_discovery_sat, + provisioning_host, + provisioning_hostgroup, + pxe_loader, + ): """Rebooting a pxe based discovered host :id: 69c807f8-5646-4aa6-8b3c-5ecab69560fc @@ -352,41 +372,79 @@ def test_positive_reboot_pxe_host(self): :Setup: Provisioning should be configured and a host should be discovered via PXE boot. - :Steps: PUT /api/v2/discovered_hosts/:id/reboot + :steps: PUT /api/v2/discovered_hosts/:id/reboot :expectedresults: Selected host should be rebooted successfully - :CaseAutomation: Automated - :CaseImportance: Medium """ + sat = module_discovery_sat.sat + provisioning_host.power_control(ensure=False) + mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=1500, + delay=20, + ) - @pytest.mark.stubbed + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + result = sat.api.DiscoveredHost(id=discovered_host.id).reboot() + assert 'Unable to perform reboot' not in result + + @pytest.mark.on_premises_provisioning + @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) + @pytest.mark.rhel_ver_match('9') @pytest.mark.tier3 - def test_positive_reboot_all_pxe_hosts(self): + def test_positive_reboot_all_pxe_hosts( + self, + module_provisioning_rhel_content, + module_discovery_sat, + provision_multiple_hosts, + provisioning_hostgroup, + pxe_loader, + ): """Rebooting all pxe-based discovered hosts :id: 69c807f8-5646-4aa6-8b3c-5ecdb69560ed :parametrized: yes - :Setup: Provisioning should be configured and a hosts should be discovered via PXE boot. - - :Steps: PUT /api/v2/discovered_hosts/reboot_all + :setup: Provisioning should be configured and hosts should be discovered via PXE boot. - :expectedresults: All disdcovered host should be rebooted successfully + :steps: PUT /api/v2/discovered_hosts/reboot_all - :CaseAutomation: Automated + :expectedresults: All discovered hosst should be rebooted successfully :CaseImportance: Medium + + :BZ: 2264195 """ + sat = module_discovery_sat.sat + for host in provision_multiple_hosts: + host.power_control(ensure=False) + mac = host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], # noqa: B023 + timeout=1500, + delay=20, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + # Until BZ 2264195 is resolved, reboot_all is expected to fail + result = sat.api.DiscoveredHost().reboot_all() + assert 'Discovered hosts are rebooting now' in result['success_msg'] class TestFakeDiscoveryTests: """Tests that use fake discovered host. - :CaseAutomation: Automated - :CaseImportance: High """ @@ -430,15 +488,13 @@ def test_positive_upload_facts(self, target_sat): :BZ: 1349364, 1392919 - :Steps: + :steps: 1. POST /api/v2/discovered_hosts/facts 2. Read the created discovered host :expectedresults: Host should be created successfully - :CaseLevel: Integration - :BZ: 1731112 """ name = gen_choice(list(valid_data_list().values())) @@ -456,7 +512,7 @@ def test_positive_delete_pxe_host(self, target_sat): :Setup: Provisioning should be configured and a host should be discovered - :Steps: DELETE /api/v2/discovered_hosts/:id + :steps: DELETE /api/v2/discovered_hosts/:id :expectedresults: Discovered Host should be deleted successfully """ diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index 2c3680e2985..3d1c1f4bf48 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -8,38 +8,30 @@ :Team: Rocket -:TestType: Functional - -:CaseLevel: Acceptance - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_choice, gen_integer, gen_string import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_string -from nailgun import entities from requests.exceptions import HTTPError from robottelo.utils.datafactory import valid_data_list -@pytest.fixture(scope="module") -def module_hostgroup(module_org): - module_hostgroup = entities.HostGroup(organization=[module_org]).create() +@pytest.fixture(scope='module') +def module_hostgroup(module_org, module_target_sat): + module_hostgroup = module_target_sat.api.HostGroup(organization=[module_org]).create() yield module_hostgroup module_hostgroup.delete() -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def module_location(module_location): yield module_location module_location.delete() -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def module_org(module_org): yield module_org module_org.delete() @@ -47,7 +39,7 @@ def module_org(module_org): @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup): +def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, module_target_sat): """Create a new discovery rule with several attributes, update them and delete the rule itself. @@ -69,7 +61,7 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup) name = gen_choice(list(valid_data_list().values())) search = gen_choice(searches) hostname = 'myhost-<%= rand(99999) %>' - discovery_rule = entities.DiscoveryRule( + discovery_rule = module_target_sat.api.DiscoveryRule( name=name, search_=search, hostname=hostname, @@ -106,7 +98,7 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup) @pytest.mark.tier1 -def test_negative_create_with_invalid_host_limit_and_priority(): +def test_negative_create_with_invalid_host_limit_and_priority(module_target_sat): """Create a discovery rule with invalid host limit and priority :id: e3c7acb1-ac56-496b-ac04-2a83f66ec290 @@ -114,14 +106,15 @@ def test_negative_create_with_invalid_host_limit_and_priority(): :expectedresults: Validation error should be raised """ with pytest.raises(HTTPError): - entities.DiscoveryRule(max_count=gen_string('alpha')).create() + module_target_sat.api.DiscoveryRule(max_count=gen_string('alpha')).create() with pytest.raises(HTTPError): - entities.DiscoveryRule(priority=gen_string('alpha')).create() + module_target_sat.api.DiscoveryRule(priority=gen_string('alpha')).create() -@pytest.mark.stubbed @pytest.mark.tier3 -def test_positive_provision_with_rule_priority(): +def test_positive_update_and_provision_with_rule_priority( + module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org +): """Create multiple discovery rules with different priority and check rule with highest priority executed first @@ -132,44 +125,65 @@ def test_positive_provision_with_rule_priority(): :expectedresults: Host with lower count have higher priority and that rule should be executed first - :CaseAutomation: NotAutomated - :CaseImportance: High """ + discovered_host = module_target_sat.api_factory.create_discovered_host() + + prio_rule = module_target_sat.api.DiscoveryRule( + max_count=5, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=1, + ).create() + rule = module_target_sat.api.DiscoveryRule( + max_count=5, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=10, + ).create() -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_multi_provision_with_rule_limit(): - """Create a discovery rule (CPU_COUNT = 2) with host limit 1 and - provision more than 2 hosts with same rule - - :id: 553c8ebf-d1c1-4ac2-7948-d3664a5b450b - - :Setup: Hosts with two CPUs should already be discovered - - :expectedresults: Rule should only be applied to 2 discovered hosts - and the rule should already be skipped for the 3rd one. - - :CaseAutomation: NotAutomated + result = module_target_sat.api.DiscoveredHost(id=discovered_host['id']).auto_provision() + assert f'provisioned with rule {prio_rule.name}' in result['message'] - :CaseImportance: High - """ + # Delete discovery rule + for _ in rule, prio_rule: + _.delete() + with pytest.raises(HTTPError): + _.read() -@pytest.mark.stubbed @pytest.mark.tier3 -def test_positive_provision_with_updated_discovery_rule(): - """Update an existing rule and provision a host with it. +def test_positive_multi_provision_with_rule_limit( + request, module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org +): + """Create a discovery rule with certain host limit and try to provision more than the passed limit - :id: 3fb20f0f-02e9-4158-9744-f583308c4e89 - - :Setup: Host should already be discovered + :id: 553c8ebf-d1c1-4ac2-7948-d3664a5b450b - :expectedresults: User should be able to update the rule and it should - be applied on discovered host + :Setup: Hosts should already be discovered - :CaseAutomation: NotAutomated + :expectedresults: Rule should only be applied to the number of the hosts passed as limit in the rule :CaseImportance: High """ + + discovered_host1 = module_target_sat.api_factory.create_discovered_host() + request.addfinalizer(module_target_sat.api.Host(id=discovered_host1['id']).delete) + discovered_host2 = module_target_sat.api_factory.create_discovered_host() + request.addfinalizer(module_target_sat.api.DiscoveredHost(id=discovered_host2['id']).delete) + rule = module_target_sat.api.DiscoveryRule( + max_count=1, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host1["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=1000, + ).create() + request.addfinalizer(rule.delete) + result = module_target_sat.api.DiscoveredHost().auto_provision_all() + assert '1 discovered hosts were provisioned' in result['message'] diff --git a/tests/foreman/api/test_docker.py b/tests/foreman/api/test_docker.py index 5d5fe969a72..eab9d213db5 100644 --- a/tests/foreman/api/test_docker.py +++ b/tests/foreman/api/test_docker.py @@ -4,36 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import choice -from random import randint -from random import shuffle +from random import choice, randint, shuffle +from fauxfactory import gen_string, gen_url import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import invalid_docker_upstream_names -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_docker_repository_names -from robottelo.utils.datafactory import valid_docker_upstream_names +from robottelo.constants import CONTAINER_REGISTRY_HUB, CONTAINER_UPSTREAM_NAME +from robottelo.utils.datafactory import ( + generate_strings_list, + invalid_docker_upstream_names, + parametrized, + valid_docker_repository_names, + valid_docker_upstream_names, +) DOCKER_PROVIDER = 'Docker' -def _create_repository(product, name=None, upstream_name=None): +def _create_repository(module_target_sat, product, name=None, upstream_name=None): """Create a Docker-based repository. :param product: A ``Product`` object. @@ -47,7 +39,7 @@ def _create_repository(product, name=None, upstream_name=None): name = choice(generate_strings_list(15, ['numeric', 'html'])) if upstream_name is None: upstream_name = CONTAINER_UPSTREAM_NAME - return entities.Repository( + return module_target_sat.api.Repository( content_type='docker', docker_upstream_name=upstream_name, name=name, @@ -57,21 +49,21 @@ def _create_repository(product, name=None, upstream_name=None): @pytest.fixture -def repo(module_product): +def repo(module_product, module_target_sat): """Create a single repository.""" - return _create_repository(module_product) + return _create_repository(module_target_sat, module_product) @pytest.fixture -def repos(module_product): +def repos(module_product, module_target_sat): """Create and return a list of repositories.""" - return [_create_repository(module_product) for _ in range(randint(2, 5))] + return [_create_repository(module_target_sat, module_product) for _ in range(randint(2, 5))] @pytest.fixture -def content_view(module_org): +def content_view(module_org, module_target_sat): """Create a content view.""" - return entities.ContentView(composite=False, organization=module_org).create() + return module_target_sat.api.ContentView(composite=False, organization=module_org).create() @pytest.fixture @@ -109,7 +101,7 @@ class TestDockerRepository: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_docker_repository_names())) - def test_positive_create_with_name(self, module_product, name): + def test_positive_create_with_name(self, module_product, name, module_target_sat): """Create one Docker-type repository :id: 3360aab2-74f3-4f6e-a083-46498ceacad2 @@ -121,14 +113,16 @@ def test_positive_create_with_name(self, module_product, name): :CaseImportance: Critical """ - repo = _create_repository(module_product, name) + repo = _create_repository(module_target_sat, module_product, name) assert repo.name == name assert repo.docker_upstream_name == CONTAINER_UPSTREAM_NAME assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(valid_docker_upstream_names())) - def test_positive_create_with_upstream_name(self, module_product, upstream_name): + def test_positive_create_with_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a valid docker upstream name @@ -141,13 +135,15 @@ def test_positive_create_with_upstream_name(self, module_product, upstream_name) :CaseImportance: Critical """ - repo = _create_repository(module_product, upstream_name=upstream_name) + repo = _create_repository(module_target_sat, module_product, upstream_name=upstream_name) assert repo.docker_upstream_name == upstream_name assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(invalid_docker_upstream_names())) - def test_negative_create_with_invalid_upstream_name(self, module_product, upstream_name): + def test_negative_create_with_invalid_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a invalid docker upstream name. @@ -161,25 +157,24 @@ def test_negative_create_with_invalid_upstream_name(self, module_product, upstre :CaseImportance: Critical """ with pytest.raises(HTTPError): - _create_repository(module_product, upstream_name=upstream_name) + _create_repository(module_target_sat, module_product, upstream_name=upstream_name) @pytest.mark.tier2 - def test_positive_create_repos_using_same_product(self, module_product): + def test_positive_create_repos_using_same_product(self, module_product, module_target_sat): """Create multiple Docker-type repositories :id: 4a6929fc-5111-43ff-940c-07a754828630 :expectedresults: Multiple docker repositories are created with a - Docker usptream repository and they all belong to the same product. + Docker upstream repository and they all belong to the same product. - :CaseLevel: Integration """ for _ in range(randint(2, 5)): - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) assert repo.id in [repo_.id for repo_ in module_product.read().repository] @pytest.mark.tier2 - def test_positive_create_repos_using_multiple_products(self, module_org): + def test_positive_create_repos_using_multiple_products(self, module_org, module_target_sat): """Create multiple Docker-type repositories on multiple products :id: 5a65d20b-d3b5-4bd7-9c8f-19c8af190558 @@ -188,34 +183,17 @@ def test_positive_create_repos_using_multiple_products(self, module_org): Docker upstream repository and they all belong to their respective products. - :CaseLevel: Integration """ for _ in range(randint(2, 5)): - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 3)): - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) product = product.read() assert repo.id in [repo_.id for repo_ in product.repository] - @pytest.mark.tier1 - def test_positive_sync(self, module_product): - """Create and sync a Docker-type repository - - :id: 80fbcd84-1c6f-444f-a44e-7d2738a0cba2 - - :expectedresults: A repository is created with a Docker repository and - it is synchronized. - - :CaseImportance: Critical - """ - repo = _create_repository(module_product) - repo.sync(timeout=600) - repo = repo.read() - assert repo.content_counts['docker_manifest'] >= 1 - @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_docker_repository_names())) - def test_positive_update_name(self, module_product, repo, new_name): + def test_positive_update_name(self, repo, new_name): """Create a Docker-type repository and update its name. :id: 7967e6b5-c206-4ad0-bcf5-64a7ce85233b @@ -251,7 +229,7 @@ def test_positive_update_upstream_name(self, repo): assert repo.docker_upstream_name == new_upstream_name @pytest.mark.tier2 - def test_positive_update_url(self, module_product, repo): + def test_positive_update_url(self, repo): """Create a Docker-type repository and update its URL. :id: 6a588e65-bf1d-4ca9-82ce-591f9070215f @@ -287,7 +265,7 @@ def test_positive_delete(self, repo): repo.read() @pytest.mark.tier2 - def test_positive_delete_random_repo(self, module_org): + def test_positive_delete_random_repo(self, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -298,10 +276,11 @@ def test_positive_delete_random_repo(self, module_org): """ repos = [] products = [ - entities.Product(organization=module_org).create() for _ in range(randint(2, 5)) + module_target_sat.api.Product(organization=module_org).create() + for _ in range(randint(2, 5)) ] for product in products: - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) assert repo.content_type == 'docker' repos.append(repo) @@ -324,8 +303,6 @@ class TestDockerContentView: :CaseComponent: ContentViews :team: Phoenix-content - - :CaseLevel: Integration """ @pytest.mark.tier2 @@ -343,7 +320,7 @@ def test_positive_add_docker_repo(self, repo, content_view): assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repos(self, module_org, module_product, content_view): + def test_positive_add_docker_repos(self, module_target_sat, module_product, content_view): """Add multiple Docker-type repositories to a non-composite content view. @@ -353,7 +330,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie and the product is added to a non-composite content view. """ repos = [ - _create_repository(module_product, name=gen_string('alpha')) + _create_repository(module_target_sat, module_product, name=gen_string('alpha')) for _ in range(randint(2, 5)) ] repo_ids = {r.id for r in repos} @@ -371,7 +348,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie assert r.docker_upstream_name == CONTAINER_UPSTREAM_NAME @pytest.mark.tier2 - def test_positive_add_synced_docker_repo(self, module_org, module_product): + def test_positive_add_synced_docker_repo(self, module_org, module_product, module_target_sat): """Create and sync a Docker-type repository :id: 3c7d6f17-266e-43d3-99f8-13bf0251eca6 @@ -379,19 +356,21 @@ def test_positive_add_synced_docker_repo(self, module_org, module_product): :expectedresults: A repository is created with a Docker repository and it is synchronized. """ - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['docker_manifest'] > 0 # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repo_to_ccv(self, module_org): + def test_positive_add_docker_repo_to_ccv(self, module_org, module_target_sat): """Add one Docker-type repository to a composite content view :id: fe278275-2bb2-4d68-8624-f0cfd63ecb57 @@ -400,10 +379,14 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): the product is added to a content view which is then added to a composite content view. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -414,7 +397,9 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): assert len(content_view.version) == 1 # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = content_view.version comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -422,7 +407,7 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): ] @pytest.mark.tier2 - def test_positive_add_docker_repos_to_ccv(self, module_org): + def test_positive_add_docker_repos_to_ccv(self, module_org, module_target_sat): """Add multiple Docker-type repositories to a composite content view. @@ -433,11 +418,13 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): views which are then added to a composite content view. """ cv_versions = [] - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 5)): # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() - repo = _create_repository(product) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() + repo = _create_repository(module_target_sat, product) content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -448,14 +435,16 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): cv_versions.append(content_view.version[0]) # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() for cv_version in cv_versions: comp_content_view.component.append(cv_version) comp_content_view = comp_content_view.update(['component']) assert cv_version.id in [component.id for component in comp_content_view.component] @pytest.mark.tier2 - def test_positive_publish_with_docker_repo(self, module_org): + def test_positive_publish_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it once. :id: 86a73e96-ead6-41fb-8095-154a0b83e344 @@ -464,9 +453,13 @@ def test_positive_publish_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published only once. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -483,7 +476,7 @@ def test_positive_publish_with_docker_repo(self, module_org): assert float(content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_with_docker_repo_composite(self, module_org): + def test_positive_publish_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to composite content view and publish it once. @@ -496,8 +489,12 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): :BZ: 1217635 """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -514,7 +511,9 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(content_view.next_version) > 1.0 # Create composite content view… - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -528,7 +527,7 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(comp_content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo(self, module_org): + def test_positive_publish_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it multiple times. @@ -538,8 +537,12 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -553,7 +556,9 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): assert len(content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): + def test_positive_publish_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it multiple times. @@ -564,8 +569,12 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): added to a composite content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -575,7 +584,9 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): content_view = content_view.read() assert content_view.last_published is not None - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert [content_view.version[0].id] == [comp.id for comp in comp_content_view.component] @@ -589,7 +600,7 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): assert len(comp_content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_promote_with_docker_repo(self, module_org): + def test_positive_promote_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to the next available lifecycle-environment. @@ -598,10 +609,14 @@ def test_positive_promote_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -615,7 +630,7 @@ def test_positive_promote_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 2 @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo(self, module_org): + def test_positive_promote_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to multiple available lifecycle-environments. @@ -624,9 +639,13 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -636,12 +655,12 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(cvv.read().environment) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_docker_repo_composite(self, module_org): + def test_positive_promote_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the next available lifecycle-environment. @@ -651,9 +670,13 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -661,7 +684,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -675,7 +700,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): + def test_positive_promote_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the multiple available lifecycle-environments @@ -685,8 +712,12 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -694,7 +725,9 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -704,13 +737,13 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): assert len(comp_cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() comp_cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(comp_cvv.read().environment) == i + 1 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_name_pattern_change(self, module_org): + def test_positive_name_pattern_change(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change registry name pattern for that environment. Verify that repository name on product changed according to new pattern. @@ -727,19 +760,23 @@ def test_positive_name_pattern_change(self, module_org): ) repo = _create_repository( - entities.Product(organization=module_org).create(), upstream_name=docker_upstream_name + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), + upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -748,7 +785,7 @@ def test_positive_name_pattern_change(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_product_name_change_after_promotion(self, module_org): + def test_positive_product_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change product name. Verify that repository name on product changed according to new pattern. @@ -763,21 +800,23 @@ def test_positive_product_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = "<%= organization.label %>/<%= product.name %>" - prod = entities.Product(organization=module_org, name=old_prod_name).create() - repo = _create_repository(prod, upstream_name=docker_upstream_name) + prod = module_target_sat.api.Product(organization=module_org, name=old_prod_name).create() + repo = _create_repository(module_target_sat, prod, upstream_name=docker_upstream_name) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) prod.name = new_prod_name prod.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -787,7 +826,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -795,7 +834,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_repo_name_change_after_promotion(self, module_org): + def test_positive_repo_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change repository name. Verify that Docker repository name on product changed according to new pattern. @@ -811,23 +850,26 @@ def test_positive_repo_name_change_after_promotion(self, module_org): new_pattern = "<%= organization.label %>/<%= repository.name %>" repo = _create_repository( - entities.Product(organization=module_org).create(), + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), name=old_repo_name, upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) repo.name = new_repo_name repo.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -837,7 +879,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -845,7 +887,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_lce): + def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_target_sat): """Set registry name pattern to one that does not guarantee uniqueness. Try to promote content view with multiple Docker repositories to lifecycle environment. Verify that content has not been promoted. @@ -857,16 +899,18 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() @@ -875,7 +919,7 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu cvv.promote(data={'environment_ids': lce.id, 'force': False}) @pytest.mark.tier2 - def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): + def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, module_target_sat): """Promote content view with multiple Docker repositories to lifecycle environment. Set registry name pattern to one that does not guarantee uniqueness. Verify that pattern has not been @@ -888,21 +932,23 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) + lce.registry_name_pattern = new_pattern with pytest.raises(HTTPError): - lce.registry_name_pattern = new_pattern lce.update(['registry_name_pattern']) @@ -912,13 +958,11 @@ class TestDockerActivationKey: :CaseComponent: ActivationKeys :team: Phoenix-subscriptions - - :CaseLevel: Integration """ @pytest.mark.tier2 def test_positive_add_docker_repo_cv( - self, module_lce, module_org, repo, content_view_publish_promote + self, module_lce, module_org, repo, content_view_publish_promote, module_target_sat ): """Add Docker-type repository to a non-composite content view and publish it. Then create an activation key and associate it with the @@ -930,7 +974,7 @@ def test_positive_add_docker_repo_cv( key """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -938,7 +982,7 @@ def test_positive_add_docker_repo_cv( @pytest.mark.tier2 def test_positive_remove_docker_repo_cv( - self, module_org, module_lce, content_view_publish_promote + self, module_org, module_lce, content_view_publish_promote, module_target_sat ): """Create an activation key and associate it with the Docker content view. Then remove this content view from the activation key. @@ -948,10 +992,9 @@ def test_positive_remove_docker_repo_cv( :expectedresults: Docker-based content view can be added and then removed from the activation key. - :CaseLevel: Integration """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -959,7 +1002,9 @@ def test_positive_remove_docker_repo_cv( assert ak.update(['content_view']).content_view is None @pytest.mark.tier2 - def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, module_org): + def test_positive_add_docker_repo_ccv( + self, content_view_version, module_lce, module_org, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -971,7 +1016,9 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo key """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -980,13 +1027,15 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id @pytest.mark.tier2 - def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_view_version): + def test_positive_remove_docker_repo_ccv( + self, module_lce, module_org, content_view_version, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -999,7 +1048,9 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v then removed from the activation key. """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -1008,7 +1059,7 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id diff --git a/tests/foreman/api/test_environment.py b/tests/foreman/api/test_environment.py index 1225659cb18..02f7a3928e3 100644 --- a/tests/foreman/api/test_environment.py +++ b/tests/foreman/api/test_environment.py @@ -8,26 +8,23 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from requests.exceptions import HTTPError -from robottelo.utils.datafactory import invalid_environments_list -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_environments_list +from robottelo.utils.datafactory import ( + invalid_environments_list, + invalid_names_list, + parametrized, + valid_environments_list, +) @pytest.mark.e2e @@ -203,8 +200,6 @@ def test_positive_update_loc(module_puppet_environment): field. :BZ: 1262029 - - :CaseLevel: Integration """ names = {'location', 'location_ids', 'locations'} attributes = set(module_puppet_environment.update_json([]).keys()) @@ -221,8 +216,6 @@ def test_positive_update_org(module_puppet_environment): ``organization`` field. :BZ: 1262029 - - :CaseLevel: Integration """ names = {'organization', 'organization_ids', 'organizations'} attributes = set(module_puppet_environment.update_json([]).keys()) diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index 775420e6c8d..ab4e5dd0c1c 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -4,30 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: System - :CaseComponent: ErrataManagement :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ # For ease of use hc refers to host-collection throughout this document -from time import sleep +from time import sleep, time import pytest -from nailgun import entities -from robottelo import constants -from robottelo.cli.factory import setup_org_for_a_custom_repo -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.cli.host import Host from robottelo.config import settings -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + FAKE_1_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE_NAME, + FAKE_4_CUSTOM_PACKAGE, + FAKE_4_CUSTOM_PACKAGE_NAME, + FAKE_5_CUSTOM_PACKAGE, + FAKE_9_YUM_OUTDATED_PACKAGES, + FAKE_9_YUM_SECURITY_ERRATUM, + FAKE_9_YUM_UPDATED_PACKAGES, + PRDS, + REAL_RHEL8_1_ERRATA_ID, + REAL_RHEL8_1_PACKAGE_FILENAME, + REPOS, + REPOSET, +) pytestmark = [ pytest.mark.run_in_one_thread, @@ -38,24 +44,53 @@ CUSTOM_REPO_URL = settings.repos.yum_9.url CUSTOM_REPO_ERRATA_ID = settings.repos.yum_6.errata[2] +ERRATA = [ + { + 'id': settings.repos.yum_6.errata[2], # security advisory + 'old_package': FAKE_1_CUSTOM_PACKAGE, + 'new_package': FAKE_2_CUSTOM_PACKAGE, + 'package_name': FAKE_2_CUSTOM_PACKAGE_NAME, + }, + { + 'id': settings.repos.yum_6.errata[0], # bugfix advisory + 'old_package': FAKE_4_CUSTOM_PACKAGE, + 'new_package': FAKE_5_CUSTOM_PACKAGE, + 'package_name': FAKE_4_CUSTOM_PACKAGE_NAME, + }, +] +REPO_WITH_ERRATA = { + 'url': settings.repos.yum_9.url, + 'errata': ERRATA, + 'errata_ids': settings.repos.yum_9.errata, +} @pytest.fixture(scope='module') -def activation_key(module_org, module_lce): - activation_key = entities.ActivationKey( - environment=module_lce, organization=module_org +def activation_key(module_sca_manifest_org, module_cv, module_lce, module_target_sat): + """A new Activation Key associated with published version + of module_cv, promoted to module_lce.""" + _cv = cv_publish_promote( + module_target_sat, + module_sca_manifest_org, + module_cv, + module_lce, + )['content-view'] + return module_target_sat.api.ActivationKey( + organization=module_sca_manifest_org, + environment=module_lce, + content_view=_cv, ).create() - return activation_key @pytest.fixture(scope='module') -def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, activation_key): - return setup_org_for_a_rh_repo( +def rh_repo(module_sca_manifest_org, module_lce, module_cv, activation_key, module_target_sat): + "rhel8 rh repos with errata and outdated/updated packages" + return module_target_sat.cli_factory.setup_org_for_a_rh_repo( { - 'product': constants.PRDS['rhel'], - 'repository-set': constants.REPOSET['rhst7'], - 'repository': constants.REPOS['rhst7']['name'], - 'organization-id': module_entitlement_manifest_org.id, + 'product': PRDS['rhel'], + 'repository-set': REPOSET['rhst8'], + 'repository': REPOS['rhst8']['name'], + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': activation_key.id, @@ -64,11 +99,12 @@ def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, activation_k @pytest.fixture(scope='module') -def custom_repo(module_org, module_lce, module_cv, activation_key): - return setup_org_for_a_custom_repo( +def custom_repo(module_sca_manifest_org, module_lce, module_cv, activation_key, module_target_sat): + "zoo repos with errata and outdated/updated packages" + return module_target_sat.cli_factory.setup_org_for_a_custom_repo( { - 'url': settings.repos.yum_9.url, - 'organization-id': module_org.id, + 'url': CUSTOM_REPO_URL, + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': activation_key.id, @@ -76,54 +112,7 @@ def custom_repo(module_org, module_lce, module_cv, activation_key): ) -def _install_package( - module_org, clients, host_ids, package_name, via_ssh=True, rpm_package_name=None -): - """Install package via SSH CLI if via_ssh is True, otherwise - install via http api: PUT /api/v2/hosts/bulk/install_content - """ - if via_ssh: - for client in clients: - result = client.run(f'yum install -y {package_name}') - assert result.status == 0 - result = client.run(f'rpm -q {package_name}') - assert result.status == 0 - else: - entities.Host().install_content( - data={ - 'organization_id': module_org.id, - 'included': {'ids': host_ids}, - 'content_type': 'package', - 'content': [package_name], - } - ) - _validate_package_installed(clients, rpm_package_name) - - -def _validate_package_installed(hosts, package_name, expected_installed=True, timeout=240): - """Check whether package was installed on the list of hosts.""" - for host in hosts: - for _ in range(timeout // 15): - result = host.run(f'rpm -q {package_name}') - if ( - result.status == 0 - and expected_installed - or result.status != 0 - and not expected_installed - ): - break - sleep(15) - else: - pytest.fail( - 'Package {} was not {} host {}'.format( - package_name, - 'installed on' if expected_installed else 'removed from', - host.hostname, - ) - ) - - -def _validate_errata_counts(module_org, host, errata_type, expected_value, timeout=120): +def _validate_errata_counts(host, errata_type, expected_value, timeout=120): """Check whether host contains expected errata counts.""" for _ in range(timeout // 5): host = host.read() @@ -142,167 +131,687 @@ def _validate_errata_counts(module_org, host, errata_type, expected_value, timeo ) -def _fetch_available_errata(module_org, host, expected_amount, timeout=120): +def _fetch_available_errata(host, expected_amount=None, timeout=120): """Fetch available errata for host.""" errata = host.errata() for _ in range(timeout // 5): - if len(errata['results']) == expected_amount: + if expected_amount is None or len(errata['results']) == expected_amount: return errata['results'] sleep(5) errata = host.errata() else: pytest.fail( 'Host {} contains {} available errata, but expected to ' - 'contain {} of them'.format(host.name, len(errata['results']), expected_amount) + 'contain {} of them'.format( + host.name, + len(errata['results']), + expected_amount if not None else 'No expected_amount provided', + ) ) -@pytest.mark.upgrade -@pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) -@pytest.mark.no_containers -def test_positive_install_in_hc(module_org, activation_key, custom_repo, target_sat, content_hosts): - """Install errata in a host-collection - - :id: 6f0242df-6511-4c0f-95fc-3fa32c63a064 +def _fetch_available_errata_instances(sat, host, expected_amount=None, timeout=120): + """Fetch list of instances of avaliable errata for host.""" + _errata_dict = _fetch_available_errata(host.nailgun_host, expected_amount, timeout) + _errata_ids = [errata['id'] for errata in _errata_dict] + instances = [sat.api.Errata(id=_id).read() for _id in _errata_ids] + assert ( + len(instances) == len(_errata_dict) == host.applicable_errata_count + ), 'Length of errata instances list or api result differs from expected applicable count.' + return instances - :Setup: Errata synced on satellite server. - :Steps: PUT /api/v2/hosts/bulk/update_content +def errata_id_set(erratum_list): + """Return a set of unique errata id's, passing list of errata instances, or dictionary. + :raise: `AssertionError`: if errata_id could not be found from a list entry. + :return: set{string} + """ + result = set() + try: + # erratum_list is a list of errata instances + result = set(e.errata_id for e in erratum_list) + except Exception: + try: + # erratum_list is a list of errata dictionary references + result = set(e['errata_id'] for e in erratum_list) + except Exception as err: + # some errata_id cannot be extracted from an entry in erratum_list + raise AssertionError( + 'Must take a dictionary ref or list of erratum instances, each entry needs attribute or key "errata_id".' + f' An entry in the given erratum_list had no discernible "errata_id". Errata(s): {erratum_list}.' + ) from err + return result + + +def package_applicability_changed_as_expected( + sat, + host, + package_filename, + prior_applicable_errata_list, + prior_applicable_errata_count, + prior_applicable_package_count, + return_applicables=False, +): + """Checks that after installing some package, updated any impacted errata(s) + status and host applicability count, and changed applicable package count by one. + + That one of the following occured: + - A non-applicable package was modified, or the same prior version was installed, + the amount of applicable errata and applicable packages remains the same. + Return False, as no applicability changes occured. + + - An Outdated applicable package was installed. Errata applicability increased + by the number of found applicable errata containing that package, + if the errata were not already applicable prior to install. + The number of applicable packages increased by one. + + - An Updated applicable package was installed. Errata applicability decreased + by the amount of found errata containing that package, if the errata are + no longer applicable, but they were prior to install, if any. + The number of applicable packages decreased by one. + + :param string: package_filename: + the full filename of the package version installed. + :param list: prior_applicable_errata_list: + list of all erratum instances from search, that were applicable before modifying package. + :param int prior_applicable_errata_count: + number of total applicable errata prior to modifying package. + :param int prior_applicable_package_count: + number of total applicable packages prior to modifying package. + :param boolean return_applicables (False): if set to True, and method's 'result' is not False: + return a dict containing result, and relevant package and errata information. + + :raise: `AssertionError` if: + Expected changes are not found. + Changes are made to unexpected errata or packages. + A non-readable prior list of erratum was passed. + :return: result(boolean), or relevant applicables(dict) + False if found that no applicable package was modified. + True if method finished executing, expected changes were found. + + :return_applicables: if True: return dict of relevant applicable and changed entities: + result boolean: True, method finished executing + errata_count int: current applicable errata count + package_count int: current applicable package count + current_package string: current version filename of package + prior_package string: previous version filename of package + change_in_errata int: positive, negative, or zero + changed_errata list[string]: of modified errata_ids + """ + assert ( + len(prior_applicable_errata_list) == prior_applicable_errata_count + ), 'Length of "prior_applicable_errata_list" passed, must equal "prior_applicable_errata_count" passed.' + if len(prior_applicable_errata_list) != 0: + try: + prior_applicable_errata_list[0].read() + except Exception as err: + raise AssertionError( + 'Exception on read of index zero in passed parameter "prior_applicable_errata_list".' + ' Must pass a list of readable erratum instances, or empty list.' + ) from err + # schedule errata applicability recalculate for most current status + task = None + epoch_timestamp = int(time() - 1) + result = host.execute('subscription-manager repos') + assert ( + result.status == 0 + ), f'Command "subscription-manager repos" failed to execute on host: {host.hostname},\n{result}' + + try: + task = sat.api_factory.wait_for_errata_applicability_task( + host_id=host.nailgun_host.id, + from_when=epoch_timestamp, + ) + except AssertionError: + # No task for forced applicability regenerate, + # applicability was already up to date + assert task is None + package_basename = str(package_filename.split("-", 1)[0]) # 'package-4.0-1.rpm' > 'package' + prior_unique_errata_ids = errata_id_set(prior_applicable_errata_list) + current_applicable_errata = _fetch_available_errata_instances(sat, host) + app_unique_errata_ids = errata_id_set(current_applicable_errata) + app_errata_with_package_diff = [] + app_errata_diff_ids = set() + + if prior_applicable_errata_count == host.applicable_errata_count: + # Applicable errata count had no change. + # we expect applicable errata id(s) from search also did not change. + assert ( + prior_unique_errata_ids == app_unique_errata_ids + ), 'Expected list of applicable erratum to remain the same.' + if prior_applicable_package_count == host.applicable_package_count: + # no applicable packages were modified + return False + + if prior_applicable_errata_count != host.applicable_errata_count: + # Modifying package changed errata applicability. + # we expect one or more errata id(s) from search to be added or removed. + difference = abs(prior_applicable_errata_count - host.applicable_errata_count) + # Check list of errata id(s) from search matches expected difference + assert ( + len(app_unique_errata_ids) == prior_applicable_errata_count + difference + ), 'Length of applicable errata found by search, does not match applicability count difference.' + # modifying package increased errata applicability count (outdated ver installed) + if prior_applicable_errata_count < host.applicable_errata_count: + # save the new errata(s) found, ones added since package modify + app_errata_with_package_diff = [ + errata + for errata in current_applicable_errata + if ( + any(package_basename in p for p in errata.packages) + and errata.errata_id not in prior_unique_errata_ids + ) + ] + # modifying package decreased errata applicability count (updated ver installed) + elif prior_applicable_errata_count > host.applicable_errata_count: + # save the old errata(s) found, ones removed since package modify + app_errata_with_package_diff = [ + errata + for errata in current_applicable_errata + if ( + not any(package_basename in p.filename for p in errata.packages) + and errata.errata_id in prior_unique_errata_ids + ) + ] + app_errata_diff_ids = errata_id_set(app_errata_with_package_diff) + assert len(app_errata_diff_ids) > 0, ( + f'Applicable errata count changed by {difference}, after modifying {package_filename},' + ' but could not find any affected errata(s) with packages list' + f' that contains a matching package_basename: {package_basename}.' + ) + # Check that applicable_package_count changed, + # if not, an applicable package was not modified. + if prior_applicable_package_count == host.applicable_package_count: + # if applicable packages remains the same, errata should also be the same + assert prior_applicable_errata_count == host.applicable_errata_count + assert prior_unique_errata_ids == app_unique_errata_ids + # no applicable errata were impaced by package install + return False + # is current errata list different from one prior to package install ? + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - len(prior_unique_errata_ids) + # check diff in applicable counts, is equal to diff in length of errata search results. + assert prior_applicable_errata_count + difference == host.applicable_errata_count + + """ Check applicable_package count changed by one. + we expect applicable_errata_count increased/decrease, + only by number of 'new' or 'removed' applicable errata, if any. + """ + if app_errata_with_package_diff: + if host.applicable_errata_count > prior_applicable_errata_count: + """Current applicable errata count is higher than before install, + An outdated package is expected to have been installed. + Check applicable package count increased by one. + Check applicable errata count increased by number + of newly applicable errata. + """ + assert prior_applicable_package_count + 1 == host.applicable_package_count + expected_increase = 0 + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - prior_applicable_errata_count + assert prior_applicable_errata_count + difference == host.applicable_errata_count + expected_increase = len(app_errata_diff_ids) + assert prior_applicable_errata_count + expected_increase == host.applicable_errata_count + + elif host.applicable_errata_count < prior_applicable_errata_count: + """Current applicable errata count is lower than before install, + An updated package is expected to have been installed. + Check applicable package count decreased by one. + Check applicable errata count decreased by number of + prior applicable errata, that are no longer found. + """ + if host.applicable_errata_count < prior_applicable_errata_count: + assert host.applicable_package_count == prior_applicable_package_count - 1 + expected_decrease = 0 + if app_unique_errata_ids != prior_unique_errata_ids: + difference = len(app_unique_errata_ids) - len(prior_applicable_errata_count) + assert prior_applicable_errata_count + difference == host.applicable_errata_count + expected_decrease = len(app_errata_diff_ids) + assert prior_applicable_errata_count - expected_decrease == host.applicable_errata_count + else: + # We found by search an errata that was added or removed compared to prior install, + # But we also found that applicable_errata_count had not changed. + raise AssertionError( + f'Found one or more different errata: {app_errata_diff_ids},' + ' from those present prior to install, but applicable count did not change,' + f' {host.applicable_errata_count} were found, but expected {host.applicable_errata_count + len(app_errata_diff_ids)}.' + ) + else: + # already checked that applicable package count changed, + # but found applicable erratum list should not change, + # check the errata count and list remained the same. + assert ( + host.applicable_errata_count == prior_applicable_errata_count + ), 'Expected current applicable errata count, to equal prior applicable errata count.' + assert ( + len(current_applicable_errata) == prior_applicable_errata_count + ), 'Expected current applicable errata list length, to equal to prior applicable count.' + assert prior_unique_errata_ids == app_unique_errata_ids, ( + f'Expected set of prior applicable errata_ids: {prior_unique_errata_ids},' + f' to be equivalent to set of current applicable errata_ids: {app_unique_errata_ids}.' + ) + if return_applicables is True: + change_in_errata = len(app_unique_errata_ids) - prior_applicable_errata_count + output = host.execute(f'rpm -q {package_basename}').stdout + current_package = output[:-1] + assert package_basename in current_package + if current_package == package_filename: + # we have already checked if applicable package count changed, + # in case the same version as prior was installed and present. + prior_package = None # package must not have been present before this modification + else: + prior_package = package_filename + return { + 'result': True, + 'errata_count': host.applicable_errata_count, + 'package_count': host.applicable_package_count, + 'current_package': current_package, + 'prior_package': prior_package, + 'change_in_errata': change_in_errata, + 'changed_errata': list(app_errata_diff_ids), + } + return True - :expectedresults: errata is installed in the host-collection. - :CaseLevel: System +def cv_publish_promote(sat, org, cv, lce=None, needs_publish=True): + """Publish & promote Content View Version with all content visible in org. - :BZ: 1983043 + :param lce: if None, default to 'Library', + pass a single instance of lce, or list of instances. + :param bool needs_publish: if False, skip publish of a new version + :return dictionary: + 'content-view': instance of updated cv + 'content-view-version': instance of newest cv version """ - for client in content_hosts: - client.install_katello_ca(target_sat) - client.register_contenthost(module_org.label, activation_key.name) - assert client.subscribed - client.add_rex_key(satellite=target_sat) - host_ids = [client.nailgun_host.id for client in content_hosts] - _install_package( - module_org, - clients=content_hosts, - host_ids=host_ids, - package_name=constants.FAKE_1_CUSTOM_PACKAGE, - ) - host_collection = target_sat.api.HostCollection(organization=module_org).create() - host_ids = [client.nailgun_host.id for client in content_hosts] - host_collection.host_ids = host_ids - host_collection = host_collection.update(['host_ids']) - task_id = target_sat.api.JobInvocation().run( - data={ - 'feature': 'katello_errata_install', - 'inputs': {'errata': str(CUSTOM_REPO_ERRATA_ID)}, - 'targeting_type': 'static_query', - 'search_query': f'host_collection_id = {host_collection.id}', - 'organization_id': module_org.id, - }, - )['id'] - target_sat.wait_for_tasks( - search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), - search_rate=15, - max_tries=10, + # Default to 'Library' lce, if None passed + # Take a single instance of lce, or list of instances + lce_ids = 'Library' + if lce is not None: + lce_ids = [lce.id] if not isinstance(lce, list) else [_lce.id for _lce in lce] + + if needs_publish is True: + _publish_and_wait(sat, org, cv) + # Content-view must have at least one published version + cv = sat.api.ContentView(id=cv.id).read() + assert cv.version, f'No version(s) are published to the Content-View: {cv.id}' + # Find highest version id, will be the latest + cvv_id = max(cvv.id for cvv in cv.version) + # Promote to lifecycle-environment(s) + if lce_ids == 'Library': + library_lce = cv.environment[0].read() + sat.api.ContentViewVersion(id=cvv_id).promote( + data={'environment_ids': library_lce.id, 'force': 'True'} + ) + else: + sat.api.ContentViewVersion(id=cvv_id).promote(data={'environment_ids': lce_ids}) + _result = { + 'content-view': sat.api.ContentView(id=cv.id).read(), + 'content-view-version': sat.api.ContentViewVersion(id=cvv_id).read(), + } + assert all( + entry for entry in _result.values() + ), f'One or more necessary components are missing: {_result}' + return _result + + +def _publish_and_wait(sat, org, cv): + """Publish a new version of content-view to organization, wait for task(s) completion.""" + task_id = sat.api.ContentView(id=cv.id).publish({'id': cv.id, 'organization': org})['id'] + assert task_id, f'No task was invoked to publish the Content-View: {cv.id}.' + # Should take < 1 minute, check in 5s intervals + sat.wait_for_tasks( + search_query=(f'label = Actions::Katello::ContentView::Publish and id = {task_id}'), + search_rate=5, + max_tries=12, + ), ( + f'Failed to publish the Content-View: {cv.id}, in time.' + f'Task: {task_id} failed, or timed out (60s).' ) - for client in content_hosts: - result = client.run(f'rpm -q {constants.FAKE_2_CUSTOM_PACKAGE}') - assert result.status == 0 +@pytest.mark.upgrade @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers -def test_positive_install_in_host( - module_org, activation_key, custom_repo, rhel_contenthost, target_sat +@pytest.mark.e2e +def test_positive_install_in_hc( + module_sca_manifest_org, + activation_key, + module_cv, + module_lce, + custom_repo, + target_sat, + content_hosts, ): - """Install errata in a host + """Install errata in a host-collection - :id: 1e6fc159-b0d6-436f-b945-2a5731c46df5 + :id: 6f0242df-6511-4c0f-95fc-3fa32c63a064 - :Setup: Errata synced on satellite server. + :Setup: + 1. Some Unregistered hosts. + 2. Errata synced on satellite server. - :Steps: POST /api/v2/job_invocations/{hash} + :Steps: + 1. Setup custom repo for each client, publish & promote content-view. + 2. Register clients as content hosts, install one outdated custom package on each client. + 3. Create Host Collection from clients, install errata to clients by Host Collection. + 4. PUT /api/v2/hosts/bulk/update_content - :expectedresults: errata is installed in the host. + :expectedresults: + 1. package install invokes errata applicability recalculate + 2. errata is installed in the host-collection + 3. errata installation invokes applicability recalculate + 4. updated custom package is found on the contained hosts - :parametrized: yes + :CaseImportance: Medium - :CaseLevel: System :BZ: 1983043 """ - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, activation_key.name) - assert rhel_contenthost.subscribed - host_id = rhel_contenthost.nailgun_host.id - _install_package( - module_org, - clients=[rhel_contenthost], - host_ids=[host_id], - package_name=constants.FAKE_1_CUSTOM_PACKAGE, + # custom_repo already in published a module_cv version + repo_id = custom_repo['repository-id'] + # just promote to lce, do not publish + cv_publish_promote( + target_sat, module_sca_manifest_org, module_cv, module_lce, needs_publish=False ) - rhel_contenthost.add_rex_key(satellite=target_sat) + # Each client: create custom repo, register as content host to cv, install outdated package + for client in content_hosts: + _repo = target_sat.api.Repository(id=repo_id).read() + client.create_custom_repos(**{f'{_repo.name}': _repo.url}) + result = client.register( + org=module_sca_manifest_org, + activation_keys=activation_key.name, + target=target_sat, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {client.hostname}: {result.stderr}' + client.add_rex_key(satellite=target_sat) + assert client.subscribed + client.run(r'subscription-manager repos --enable \*') + # Remove custom package by name + client.run(f'yum remove -y {FAKE_2_CUSTOM_PACKAGE_NAME}') + # No applicable errata or packages to start + assert (pre_errata_count := client.applicable_errata_count) == 0 + assert (pre_package_count := client.applicable_package_count) == 0 + prior_app_errata = _fetch_available_errata_instances(target_sat, client, expected_amount=0) + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + # install outdated version + assert client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=client.nailgun_host.id, + from_when=epoch_timestamp, + ) + assert client.run(f'rpm -q {FAKE_1_CUSTOM_PACKAGE}').status == 0 + # One errata now applicable on client + assert client.applicable_errata_count == 1 + # One package now has an applicable errata + assert client.applicable_package_count == 1 + # Fetch the new errata instance(s), expecting only one + _fetch_available_errata_instances(target_sat, client, expected_amount=1) + + """ Did installing outdated package, update applicability as expected? + * Call method package_applicability_changed_as_expected * + returns: False if no applicability change occured or expected (package not applicable). + True if applicability changes were expected and occured (package is applicable). + raises: `AssertionError` if any expected changes did not occur, or unexpected changes were found. + + Expected: that each outdated package install: updated one or more errata to applicable, + if those now applicable errata(s) were not already applicable to some package prior. + """ + passed_checks = package_applicability_changed_as_expected( + target_sat, + client, + FAKE_1_CUSTOM_PACKAGE, + prior_app_errata, + pre_errata_count, + pre_package_count, + ) + assert ( + passed_checks is True + ), f'The package: {FAKE_1_CUSTOM_PACKAGE}, was not applicable to any erratum present on host: {client.hostname}.' + # Setup host collection using client ids + host_collection = target_sat.api.HostCollection(organization=module_sca_manifest_org).create() + host_ids = [client.nailgun_host.id for client in content_hosts] + host_collection.host_ids = host_ids + host_collection = host_collection.update(['host_ids']) + # Install erratum to host collection task_id = target_sat.api.JobInvocation().run( data={ 'feature': 'katello_errata_install', 'inputs': {'errata': str(CUSTOM_REPO_ERRATA_ID)}, 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - 'organization_id': module_org.id, + 'search_query': f'host_collection_id = {host_collection.id}', + 'organization_id': module_sca_manifest_org.id, }, )['id'] target_sat.wait_for_tasks( search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), search_rate=15, max_tries=10, + ), ( + f'Could not install erratum: {CUSTOM_REPO_ERRATA_ID}, to Host-Collection.' + f' Task: {task_id} failed, or timed out.' ) - _validate_package_installed([rhel_contenthost], constants.FAKE_2_CUSTOM_PACKAGE) + for client in content_hosts: + # No applicable errata after install on all clients + assert ( + client.applicable_errata_count == 0 + ), f'A client in Host-Collection: {client.hostname}, had {client.applicable_errata_count} ' + 'applicable errata, expected 0.' + # Updated package is present on all clients + result = client.run(f'rpm -q {FAKE_2_CUSTOM_PACKAGE}') + assert result.status == 0, ( + f'The client in Host-Collection: {client.hostname},' + f' could not find the updated package: {FAKE_2_CUSTOM_PACKAGE}' + ) + # No applicable packages on client + assert client.applicable_package_count == 0, ( + f'A client in Host-Collection: {client.hostname}, had {client.applicable_package_count} ' + f'applicable package(s) after installing erratum: {CUSTOM_REPO_ERRATA_ID}, but expected 0.' + ) @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers @pytest.mark.e2e def test_positive_install_multiple_in_host( - module_org, activation_key, custom_repo, rhel_contenthost, target_sat + target_sat, rhel_contenthost, module_org, activation_key, module_lce ): """For a host with multiple applicable errata install one and ensure - the rest of errata is still available + the rest of errata is still available, repeat for some list of errata. + After each package or errata install, check applicability updates + as expected. :id: 67b7e95b-9809-455a-a74e-f1815cc537fc + :setup: + 1. An Unregistered host. + 2. Errata synced on satellite server. + + :steps: + 1. Setup content for a content host (repos, cv, etc) + 2. Register vm as a content host + 3. Remove any impacted custom packages present + - no applicable errata to start + 4. Install outdated versions of the custom packages + - some expected applicable errata + 5. Install any applicable security errata + - errata applicability drops after each install + - applicable packages drops by amount updated + - impacted package(s) updated and found + + :expectedresults: + 1. Package installation succeeded, if the package makes a + new errata applicable; available errata counter + increased by one. + 2. Errata apply task succeeded, available errata + counter decreased by one; it is possible to schedule + another errata installation. + 3. Applicable package counter decreased by number + of updated packages. Updated package(s) found. + 4. Errata recalculate applicability task is invoked + automatically, after install command of applicable package, + and errata apply task. Task(s) found and finish successfully. + :customerscenario: true :BZ: 1469800, 1528275, 1983043, 1905560 - :expectedresults: errata installation task succeeded, available errata - counter decreased by one; it's possible to schedule another errata - installation - :CaseImportance: Medium :parametrized: yes - :CaseLevel: System """ - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, activation_key.name) + # Associate custom repos with org, lce, ak: + custom_repo_id = target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': settings.repos.yum_9.url, + 'organization-id': module_org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': activation_key.id, + } + )['repository-id'] + rhel_contenthost.register( + activation_keys=activation_key.name, + target=target_sat, + org=module_org, + loc=None, + ) assert rhel_contenthost.subscribed - host = rhel_contenthost.nailgun_host - for package in constants.FAKE_9_YUM_OUTDATED_PACKAGES: - _install_package( - module_org, clients=[rhel_contenthost], host_ids=[host.id], package_name=package + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + # Remove any packages errata could apply to, verify none are present on host + for package in FAKE_9_YUM_OUTDATED_PACKAGES: + pkg_name = str(package.split("-", 1)[0]) # 'bear-4.0-1.noarch' > 'bear' + result = rhel_contenthost.run(f'yum remove -y {pkg_name}') + assert rhel_contenthost.run(f'rpm -q {pkg_name}').status == 1 + + # Wait for any recalculate task(s), possibly invoked by yum remove, + # catch AssertionError raised if no task was generated + try: + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + except AssertionError: + # Yum remove did not trigger any errata recalculate task, + # assert any YUM_9 packages were/are not present, then continue + present_packages = set( + [ + package.filename + for package in target_sat.api.Package(repository=custom_repo_id).search() + ] ) - applicable_errata_count = rhel_contenthost.applicable_errata_count - assert applicable_errata_count > 1 - rhel_contenthost.add_rex_key(satellite=target_sat) - for errata in settings.repos.yum_9.errata[1:4]: + assert not set(FAKE_9_YUM_OUTDATED_PACKAGES).intersection(present_packages) + assert not set(FAKE_9_YUM_UPDATED_PACKAGES).intersection(present_packages) + + # No applicable errata to start + assert rhel_contenthost.applicable_errata_count == 0 + present_applicable_packages = [] + # Installing all YUM_9 outdated custom packages + for i in range(len(FAKE_9_YUM_OUTDATED_PACKAGES)): + # record params prior to install, for post-install checks + package_filename = FAKE_9_YUM_OUTDATED_PACKAGES[i] + FAKE_9_YUM_UPDATED_PACKAGES[i] + pre_errata_count = rhel_contenthost.applicable_errata_count + pre_package_count = rhel_contenthost.applicable_package_count + prior_app_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + # 1s margin of safety for rounding + epoch_timestamp = int(time() - 1) + assert rhel_contenthost.run(f'yum install -y {package_filename}').status == 0 + # Wait for async errata recalculate task(s), invoked by yum install, + # searching back 1s prior to install. + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + # outdated package found on host + assert rhel_contenthost.run(f'rpm -q {package_filename}').status == 0 + """ + Modifying the applicable package did all: + 1. changed package applicability count by one and only one. + 2. changed errata applicability count by number of affected errata, whose + applicability status changed after package was modified. + 3. changed lists of applicable packages and applicable errata accordingly. + - otherwise raise `AssertionError` in below method; + """ + passed_checks = package_applicability_changed_as_expected( + target_sat, + rhel_contenthost, + package_filename, + prior_app_errata, + pre_errata_count, + pre_package_count, + ) + # If passed_checks is False, this package was not applicable, continue to next. + if passed_checks is True: + present_applicable_packages.append(package_filename) + + # Some applicable errata(s) now expected for outdated packages + assert rhel_contenthost.applicable_errata_count > 0 + # Expected applicable package(s) now for the applicable errata + assert rhel_contenthost.applicable_package_count == len(present_applicable_packages) + post_app_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + """Installing all YUM_9 security errata sequentially, if applicable. + after each install, applicable-errata-count should drop by one, + one or more of the erratum's listed packages should be updated. + """ + installed_errata = [] + updated_packages = [] + expected_errata_to_install = [ + errata.errata_id + for errata in post_app_errata + if errata.errata_id in FAKE_9_YUM_SECURITY_ERRATUM + ] + all_applicable_packages = set( + package for errata in post_app_errata for package in errata.packages + ) + security_packages_to_install = set() + for errata_id in FAKE_9_YUM_SECURITY_ERRATUM: + errata_instance = ( + target_sat.api.Errata().search(query={'search': f'errata_id="{errata_id}"'})[0].read() + ) + present_packages_impacted_by_errata = [ + package + for package in errata_instance.packages + if package in FAKE_9_YUM_UPDATED_PACKAGES + ] + security_packages_to_install.update(present_packages_impacted_by_errata) + # Are expected security errata packages found in all applicable packages ? + assert security_packages_to_install.issubset(all_applicable_packages) + # Try to install each ERRATUM in FAKE_9_YUM_SECURITY_ERRATUM list, + # Each time, check lists of applicable erratum and packages, and counts + for ERRATUM in FAKE_9_YUM_SECURITY_ERRATUM: + pre_errata_count = rhel_contenthost.applicable_errata_count + ERRATUM_instance = ( + target_sat.api.Errata().search(query={'search': f'errata_id="{ERRATUM}"'})[0].read() + ) + # Check each time before each install + applicable_errata = _fetch_available_errata_instances(target_sat, rhel_contenthost) + # If this ERRATUM is not applicable, continue to next + if (len(applicable_errata) == 0) or ( + ERRATUM not in [_errata.errata_id for _errata in applicable_errata] + ): + continue + assert pre_errata_count >= 1 + errata_packages = [] + pre_package_count = rhel_contenthost.applicable_package_count + # From search result, find this ERRATUM by erratum_id, + # save the relevant list of package(s) + for _errata in applicable_errata: + if _errata.errata_id == ERRATUM: + errata_packages = _errata.packages + assert len(errata_packages) >= 1 + epoch_timestamp = int(time() - 1) + # Install this ERRATUM to host, wait for REX task task_id = target_sat.api.JobInvocation().run( data={ 'feature': 'katello_errata_install', - 'inputs': {'errata': str(errata)}, + 'inputs': {'errata': str(ERRATUM)}, 'targeting_type': 'static_query', 'search_query': f'name = {rhel_contenthost.hostname}', 'organization_id': module_org.id, @@ -313,25 +822,102 @@ def test_positive_install_multiple_in_host( search_rate=20, max_tries=15, ) - applicable_errata_count -= 1 - assert rhel_contenthost.applicable_errata_count == applicable_errata_count + # Wait for async errata recalculate task(s), invoked by REX task + target_sat.api_factory.wait_for_errata_applicability_task( + host_id=rhel_contenthost.nailgun_host.id, + from_when=epoch_timestamp, + ) + # Host Applicable Errata count decreased by one + assert ( + rhel_contenthost.applicable_errata_count == pre_errata_count - 1 + ), f'Host applicable errata did not decrease by one, after installation of {ERRATUM}' + # Applying this ERRATUM updated one or more of the erratum's listed packages + found_updated_packages = [] + for package in errata_packages: + result = rhel_contenthost.run(f'rpm -q {package}') + if result.status == 0: + assert ( + package in FAKE_9_YUM_UPDATED_PACKAGES + ), f'An unexpected package: "{package}", was updated by this errata: {ERRATUM}.' + if package in ERRATUM_instance.packages: + found_updated_packages.append(package) + + assert len(found_updated_packages) > 0, ( + f'None of the expected errata.packages: {errata_packages}, were found on host: "{rhel_contenthost.hostname}",' + f' after installing the applicable errata: {ERRATUM}.' + ) + # Host Applicable Packages count dropped by number of packages updated + assert rhel_contenthost.applicable_package_count == pre_package_count - len( + found_updated_packages + ), ( + f'Host: "{rhel_contenthost.hostname}" applicable package count did not decrease by {len(found_updated_packages)},' + f' after errata: {ERRATUM} installed updated packages: {found_updated_packages}' + ) + installed_errata.append(ERRATUM) + updated_packages.extend(found_updated_packages) + + # In case no ERRATUM in list are applicable: + # Lack of any package or errata install will raise `AssertionError`. + assert ( + len(installed_errata) > 0 + ), f'No applicable errata were found or installed from list: {FAKE_9_YUM_SECURITY_ERRATUM}.' + assert ( + len(updated_packages) > 0 + ), f'No applicable packages were found or installed from list: {FAKE_9_YUM_UPDATED_PACKAGES}.' + # Each expected erratum and packages installed only once + pkg_set = set(updated_packages) + errata_set = set(installed_errata) + assert len(pkg_set) == len( + updated_packages + ), f'Expect no repeat packages in install list: {updated_packages}.' + assert len(errata_set) == len( + installed_errata + ), f'Expected no repeat errata in install list: {installed_errata}.' + # Only the expected YUM_9 packages were installed + assert set(updated_packages).issubset(set(FAKE_9_YUM_UPDATED_PACKAGES)) + # Only the expected YUM_9 errata were updated + assert set(installed_errata).issubset(set(FAKE_9_YUM_SECURITY_ERRATUM)) + # Check number of installed errata id(s) matches expected + assert len(installed_errata) == len(expected_errata_to_install), ( + f'Expected to install {len(expected_errata_to_install)} errata from list: {FAKE_9_YUM_SECURITY_ERRATUM},' + f' but installed: {len(installed_errata)}.' + ) + # Check sets of installed errata id(s) strings, matches expected + assert set(installed_errata) == set( + expected_errata_to_install + ), 'Expected errata id(s) and installed errata id(s) are not the same.' + # Check number of updated package version filename(s) matches expected + assert len(updated_packages) == len(security_packages_to_install), ( + f'Expected to install {len(security_packages_to_install)} packages from list: {FAKE_9_YUM_UPDATED_PACKAGES},' + f' but installed {len(updated_packages)}.' + ) + # Check sets of installed package filename(s) strings, matches expected + assert set(updated_packages) == set( + security_packages_to_install + ), 'Expected package version filename(s) and installed package version filenam(s) are not the same.' @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_list(module_org, custom_repo, target_sat): - """View all errata specific to repository +def test_positive_list_sorted_filtered(custom_repo, target_sat): + """View, sort, and filter all errata specific to repository. :id: 1efceabf-9821-4804-bacf-2213ac0c7550 :Setup: Errata synced on satellite server. - :Steps: Create two repositories each synced and containing errata + :Steps: + + 1. Create two repositories each synced and containing errata + 2. GET /katello/api/errata - :expectedresults: Check that the errata belonging to one repo is not - showing in the other. + :expectedresults: + + 1. Check that the errata belonging to one repo is not + showing in the other. + 2. Check that the errata can be sorted by updated date, + issued date, and filtered by CVE. - :CaseLevel: System """ repo1 = target_sat.api.Repository(id=custom_repo['repository-id']).read() repo2 = target_sat.api.Repository( @@ -344,6 +930,7 @@ def test_positive_list(module_org, custom_repo, target_sat): repo2_errata_ids = [ errata['errata_id'] for errata in repo2.errata(data={'per_page': '1000'})['results'] ] + # Check errata are viewable, errata for one repo is not showing in the other assert len(repo1_errata_ids) == len(settings.repos.yum_9.errata) assert len(repo2_errata_ids) == len(settings.repos.yum_3.errata) assert CUSTOM_REPO_ERRATA_ID in repo1_errata_ids @@ -351,21 +938,7 @@ def test_positive_list(module_org, custom_repo, target_sat): assert settings.repos.yum_3.errata[5] in repo2_errata_ids assert settings.repos.yum_3.errata[5] not in repo1_errata_ids - -@pytest.mark.tier3 -def test_positive_list_updated(module_org, custom_repo, target_sat): - """View all errata in an Org sorted by Updated - - :id: 560d6584-70bd-4d1b-993a-cc7665a9e600 - - :Setup: Errata synced on satellite server. - - :Steps: GET /katello/api/errata - - :expectedresults: Errata is filtered by Org and sorted by Updated date. - - :CaseLevel: System - """ + # View all errata in Org sorted by Updated repo = target_sat.api.Repository(id=custom_repo['repository-id']).read() assert repo.sync()['result'] == 'success' erratum_list = target_sat.api.Errata(repository=repo).search( @@ -374,33 +947,17 @@ def test_positive_list_updated(module_org, custom_repo, target_sat): updated = [errata.updated for errata in erratum_list] assert updated == sorted(updated) - -@pytest.mark.tier3 -def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, target_sat): - """Sort by issued date and filter errata by CVE - - :id: a921d4c2-8d3d-4462-ba6c-fbd4b898a3f2 - - :Setup: Errata synced on satellite server. - - :Steps: GET /katello/api/errata - - :expectedresults: Errata is sorted by issued date and filtered by CVE. - - :CaseLevel: System - """ # Errata is sorted by issued date. - erratum_list = entities.Errata(repository=custom_repo['repository-id']).search( + erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'issued ASC', 'per_page': '1000'} ) issued = [errata.issued for errata in erratum_list] assert issued == sorted(issued) - # Errata is filtered by CVE erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'cve DESC', 'per_page': '1000'} ) - # Most of Errata don't have any CVEs. Removing empty CVEs from results + # Most Errata won't have any CVEs. Removing empty CVEs from results erratum_cves = [errata.cves for errata in erratum_list if errata.cves] # Verifying each errata have its CVEs sorted in DESC order for errata_cves in erratum_cves: @@ -409,64 +966,67 @@ def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, t @pytest.fixture(scope='module') -def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): - """Setup content fot rhel6 content host - Using `Red Hat Enterprise Virtualization Agents for RHEL 6 Server (RPMs)` - from manifest, SATTOOLS_REPO for host-tools and yum_9 repo as custom repo. - - :return: Activation Key, Organization, subscription list - """ - org = module_entitlement_manifest_org - rh_repo_id_rhva = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch='x86_64', - org_id=org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhva6']['name'], - reposet=constants.REPOSET['rhva6'], - releasever=constants.DEFAULT_RELEASE_VERSION, - ) - rh_repo = entities.Repository(id=rh_repo_id_rhva).read() - rh_repo.sync() +def setup_content_rhel8( + module_sca_manifest_org, + rh_repo_module_manifest, + activation_key, + module_lce, + module_cv, + module_target_sat, + return_result=True, +): + """Setup content for rhel8 content host + Using RH SAT-TOOLS RHEL8 for sat-tools, and FAKE_YUM_9 as custom-repo. + Published to content-view and promoted to lifecycle-environment. - host_tools_product = entities.Product(organization=org).create() - host_tools_repo = entities.Repository( - product=host_tools_product, - ).create() - host_tools_repo.url = settings.repos.SATCLIENT_REPO.RHEL6 - host_tools_repo = host_tools_repo.update(['url']) - host_tools_repo.sync() + Raises `AssertionError` if one or more of the setup components read are empty. - custom_product = entities.Product(organization=org).create() - custom_repo = entities.Repository( - product=custom_product, - ).create() - custom_repo.url = CUSTOM_REPO_URL - custom_repo = custom_repo.update(['url']) + :return: if return_result is True: otherwise None + A dictionary (_result) with the satellite instances of activaton-key, organization, + content-view, lifecycle-environment, rh_repo, custom_repo. + """ + org = module_sca_manifest_org + # Setup Custom and RH repos + custom_repo_id = module_target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': CUSTOM_REPO_URL, + 'organization-id': org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': activation_key.id, + 'content-view-id': module_cv.id, + } + )['repository-id'] + custom_repo = module_target_sat.api.Repository(id=custom_repo_id).read() custom_repo.sync() - - lce = entities.LifecycleEnvironment(organization=org).create() - - cv = entities.ContentView( - organization=org, - repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], - ).create() - cv.publish() - cvv = cv.read().version[0].read() - cvv.promote(data={'environment_ids': lce.id, 'force': False}) - - ak = entities.ActivationKey(content_view=cv, organization=org, environment=lce).create() - - sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] - for sub_name in sub_list: - subscription = entities.Subscription(organization=org).search( - query={'search': f'name="{sub_name}"'} - )[0] - ak.add_subscriptions(data={'subscription_id': subscription.id}) - return ak, org, sub_list + # Sync and add RH repo + rh_repo = module_target_sat.api.Repository(id=rh_repo_module_manifest.id).read() + rh_repo.sync() + module_target_sat.cli.ContentView.add_repository( + {'id': module_cv.id, 'organization-id': org.id, 'repository-id': rh_repo.id} + ) + _cv = cv_publish_promote(module_target_sat, org, module_cv, module_lce) + module_cv = _cv['content-view'] + latest_cvv = _cv['content-view-version'] + + _result = { + 'activation-key': activation_key.read(), + 'organization': org.read(), + 'content-view': module_cv.read(), + 'content-view-version': latest_cvv.read(), + 'lifecycle-environment': module_lce.read(), + 'rh_repo': rh_repo.read(), + 'custom_repo': custom_repo.read(), + } + assert all( + entry for entry in _result.values() + ), f'One or more necessary components are not present: {_result}' + return _result if return_result else None -@pytest.mark.tier3 -def test_positive_get_count_for_host(setup_content_rhel6, rhel6_contenthost, target_sat): +@pytest.mark.tier2 +def test_positive_get_count_for_host( + setup_content_rhel8, activation_key, rhel8_contenthost, module_target_sat +): """Available errata count when retrieving Host :id: 2f35933f-8026-414e-8f75-7f4ec048faae @@ -474,88 +1034,131 @@ def test_positive_get_count_for_host(setup_content_rhel6, rhel6_contenthost, tar :Setup: 1. Errata synced on satellite server. - 2. Some Content hosts present. + 2. Some client host present. + 3. Some rh repo and custom repo, added to content-view. - :Steps: GET /api/v2/hosts + :Steps: + + 1. Register content host + 2. Install some outdated packages + 3. GET /api/v2/hosts - :expectedresults: The available errata count is retrieved. + :expectedresults: The applicable errata count is retrieved. - :CaseLevel: System :parametrized: yes :CaseImportance: Medium """ - ak_name = setup_content_rhel6[0].name - org_label = setup_content_rhel6[1].label - org_id = setup_content_rhel6[1].id - sub_list = setup_content_rhel6[2] - rhel6_contenthost.install_katello_ca(target_sat) - rhel6_contenthost.register_contenthost(org_label, ak_name) - assert rhel6_contenthost.subscribed - pool_id = rhel6_contenthost.subscription_manager_get_pool(sub_list=sub_list) - pool_list = [pool_id[0][0]] - rhel6_contenthost.subscription_manager_attach_pool(pool_list=pool_list) - rhel6_contenthost.install_katello_host_tools() - rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id']) - host = rhel6_contenthost.nailgun_host + org = setup_content_rhel8['organization'] + custom_repo = setup_content_rhel8['rh_repo'] + rhel8_contenthost.create_custom_repos(**{f'{custom_repo.name}': custom_repo.url}) + result = rhel8_contenthost.register( + org=org, + activation_keys=activation_key.name, + target=module_target_sat, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {rhel8_contenthost.hostname}: {result.stderr}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + host = rhel8_contenthost.nailgun_host.read() + # No applicable errata to start + assert rhel8_contenthost.applicable_errata_count == 0 for errata in ('security', 'bugfix', 'enhancement'): - _validate_errata_counts(org_id, host, errata_type=errata, expected_value=0) - rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}') - _validate_errata_counts(org_id, host, errata_type='security', expected_value=1) - rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}') - _validate_errata_counts(org_id, host, errata_type='bugfix', expected_value=2) + _validate_errata_counts(host, errata_type=errata, expected_value=0) + # One bugfix errata after installing outdated Kangaroo + result = rhel8_contenthost.execute(f'yum install -y {FAKE_9_YUM_OUTDATED_PACKAGES[7]}') + assert result.status == 0, f'Failed to install package {FAKE_9_YUM_OUTDATED_PACKAGES[7]}' + _validate_errata_counts(host, errata_type='bugfix', expected_value=1) + # One enhancement errata after installing outdated Gorilla + result = rhel8_contenthost.execute(f'yum install -y {FAKE_9_YUM_OUTDATED_PACKAGES[3]}') + assert result.status == 0, f'Failed to install package {FAKE_9_YUM_OUTDATED_PACKAGES[3]}' + _validate_errata_counts(host, errata_type='enhancement', expected_value=1) + # Install and check two outdated packages, with applicable security erratum + # custom_repo outdated Walrus + result = rhel8_contenthost.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + assert result.status == 0, f'Failed to install package {FAKE_1_CUSTOM_PACKAGE}' + _validate_errata_counts(host, errata_type='security', expected_value=1) + # rh_repo outdated Puppet-agent + result = rhel8_contenthost.execute(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + assert result.status == 0, f'Failed to install package {REAL_RHEL8_1_PACKAGE_FILENAME}' + _validate_errata_counts(host, errata_type='security', expected_value=2) + # All avaliable errata present + assert rhel8_contenthost.applicable_errata_count == 4 @pytest.mark.upgrade @pytest.mark.tier3 -def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost, target_sat): +def test_positive_get_applicable_for_host( + setup_content_rhel8, activation_key, rhel8_contenthost, target_sat +): """Get applicable errata ids for a host :id: 51d44d51-eb3f-4ee4-a1df-869629d427ac :Setup: + 1. Errata synced on satellite server. - 2. Some Content hosts present. + 2. Some client hosts present. + 3. Some rh repo and custom repo, added to content-view. - :Steps: GET /api/v2/hosts/:id/errata + :Steps: - :expectedresults: The available errata is retrieved. + 1. Register vm as a content host + 2. Install some outdated packages + 3. GET /api/v2/hosts/:id/errata - :CaseLevel: System + :expectedresults: The available errata is retrieved. :parametrized: yes :CaseImportance: Medium """ - ak_name = setup_content_rhel6[0].name - org_label = setup_content_rhel6[1].label - org_id = setup_content_rhel6[1].id - rhel6_contenthost.install_katello_ca(target_sat) - rhel6_contenthost.register_contenthost(org_label, ak_name) - assert rhel6_contenthost.subscribed - pool_id = rhel6_contenthost.subscription_manager_get_pool(sub_list=setup_content_rhel6[2]) - pool_list = [pool_id[0][0]] - rhel6_contenthost.subscription_manager_attach_pool(pool_list=pool_list) - rhel6_contenthost.install_katello_host_tools() - rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id']) - host = rhel6_contenthost.nailgun_host - erratum = _fetch_available_errata(org_id, host, expected_amount=0) + org = setup_content_rhel8['organization'] + custom_repo = setup_content_rhel8['rh_repo'] + + rhel8_contenthost.create_custom_repos(**{f'{custom_repo.name}': custom_repo.url}) + result = rhel8_contenthost.register( + activation_keys=activation_key.name, + target=target_sat, + org=org, + loc=None, + ) + assert ( + result.status == 0 + ), f'Failed to register the host - {rhel8_contenthost.hostname}: {result.stderr}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + for errata in REPO_WITH_ERRATA['errata']: + # Remove custom package if present, old or new. + package_name = errata['package_name'] + result = rhel8_contenthost.execute(f'yum erase -y {package_name}') + if result.status != 0: + pytest.fail(f'Failed to remove {package_name}: {result.stdout} {result.stderr}') + + rhel8_contenthost.execute('subscription-manager repos') + assert rhel8_contenthost.applicable_errata_count == 0 + host = rhel8_contenthost.nailgun_host.read() + # Check no applicable errata to start + erratum = _fetch_available_errata(host, expected_amount=0) assert len(erratum) == 0 - rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}') - erratum = _fetch_available_errata(org_id, host, 1) + # Install outdated applicable custom package + rhel8_contenthost.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + erratum = _fetch_available_errata(host, 1) assert len(erratum) == 1 assert CUSTOM_REPO_ERRATA_ID in [errata['errata_id'] for errata in erratum] - rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}') - erratum = _fetch_available_errata(org_id, host, 3) - assert len(erratum) == 3 - assert {constants.REAL_1_ERRATA_ID, constants.REAL_2_ERRATA_ID}.issubset( - {errata['errata_id'] for errata in erratum} - ) + # Install outdated applicable real package (from RH repo) + rhel8_contenthost.run(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + erratum = _fetch_available_errata(host, 2) + assert len(erratum) == 2 + assert REAL_RHEL8_1_ERRATA_ID in [errata['errata_id'] for errata in erratum] @pytest.mark.tier3 -def test_positive_get_diff_for_cv_envs(): +def test_positive_get_diff_for_cv_envs(target_sat): """Generate a difference in errata between a set of environments for a content view @@ -571,14 +1174,14 @@ def test_positive_get_diff_for_cv_envs(): :expectedresults: Difference in errata between a set of environments for a content view is retrieved. - :CaseLevel: System """ - org = entities.Organization().create() - env = entities.LifecycleEnvironment(organization=org).create() - content_view = entities.ContentView(organization=org).create() - activation_key = entities.ActivationKey(environment=env, organization=org).create() + org = target_sat.api.Organization().create() + env = target_sat.api.LifecycleEnvironment(organization=org).create() + content_view = target_sat.api.ContentView(organization=org).create() + activation_key = target_sat.api.ActivationKey(environment=env, organization=org).create() + # Published content-view-version with repos will be created for repo_url in [settings.repos.yum_9.url, CUSTOM_REPO_URL]: - setup_org_for_a_custom_repo( + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': repo_url, 'organization-id': org.id, @@ -587,33 +1190,35 @@ def test_positive_get_diff_for_cv_envs(): 'activationkey-id': activation_key.id, } ) - new_env = entities.LifecycleEnvironment(organization=org, prior=env).create() - cvvs = content_view.read().version[-2:] - cvvs[-1].promote(data={'environment_ids': new_env.id, 'force': False}) - result = entities.Errata().compare( - data={'content_view_version_ids': [cvv.id for cvv in cvvs], 'per_page': '9999'} + new_env = target_sat.api.LifecycleEnvironment(organization=org, prior=env).create() + # no need to publish a new version, just promote newest + cv_publish_promote( + sat=target_sat, org=org, cv=content_view, lce=[env, new_env], needs_publish=False + ) + content_view = target_sat.api.ContentView(id=content_view.id).read() + # Get last two versions by id to compare + cvv_ids = sorted(cvv.id for cvv in content_view.version)[-2:] + result = target_sat.api.Errata().compare( + data={'content_view_version_ids': [cvv_id for cvv_id in cvv_ids], 'per_page': '9999'} ) cvv2_only_errata = next( errata for errata in result['results'] if errata['errata_id'] == CUSTOM_REPO_ERRATA_ID ) - assert cvvs[-1].id in cvv2_only_errata['comparison'] + assert cvv_ids[-1] in cvv2_only_errata['comparison'] both_cvvs_errata = next( - errata - for errata in result['results'] - if errata['errata_id'] in constants.FAKE_9_YUM_SECURITY_ERRATUM + errata for errata in result['results'] if errata['errata_id'] in FAKE_9_YUM_SECURITY_ERRATUM ) - assert {cvv.id for cvv in cvvs} == set(both_cvvs_errata['comparison']) + assert {cvv_id for cvv_id in cvv_ids} == set(both_cvvs_errata['comparison']) @pytest.mark.tier3 def test_positive_incremental_update_required( - module_org, + module_sca_manifest_org, module_lce, activation_key, module_cv, - custom_repo, - rh_repo, - rhel7_contenthost, + rh_repo_module_manifest, + rhel8_contenthost, target_sat, ): """Given a set of hosts and errata, check for content view version @@ -642,111 +1247,128 @@ def test_positive_incremental_update_required( :parametrized: yes - :CaseLevel: System - :BZ: 2013093 """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost(module_org.label, activation_key.name) - assert rhel7_contenthost.subscribed - rhel7_contenthost.enable_repo(constants.REPOS['rhst7']['id']) - rhel7_contenthost.install_katello_agent() - host = rhel7_contenthost.nailgun_host - # install package to create demand for an Erratum - _install_package( - module_org, - [rhel7_contenthost], - [host.id], - constants.FAKE_1_CUSTOM_PACKAGE, - via_ssh=True, - rpm_package_name=constants.FAKE_1_CUSTOM_PACKAGE, + org = module_sca_manifest_org + rh_repo = target_sat.api.Repository( + id=rh_repo_module_manifest.id, + ).read() + rh_repo.sync() + # Add RH repo to content-view + target_sat.cli.ContentView.add_repository( + {'id': module_cv.id, 'organization-id': org.id, 'repository-id': rh_repo.id} ) + module_cv = target_sat.api.ContentView(id=module_cv.id).read() + _cv = cv_publish_promote(target_sat, org, module_cv, module_lce) + module_cv = _cv['content-view'] + + result = rhel8_contenthost.register( + org=org, + activation_keys=activation_key.name, + target=target_sat, + loc=None, + ) + assert result.status == 0, f'Failed to register the host: {rhel8_contenthost.hostname}' + assert rhel8_contenthost.subscribed + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + host = rhel8_contenthost.nailgun_host.read() + # install package to create demand for an Erratum + result = rhel8_contenthost.run(f'yum install -y {REAL_RHEL8_1_PACKAGE_FILENAME}') + assert result.status == 0, f'Failed to install package: {REAL_RHEL8_1_PACKAGE_FILENAME}' # Call nailgun to make the API POST to see if any incremental updates are required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ - 'organization_id': module_org.id, + 'organization_id': org.id, 'included': {'ids': [host.id]}, - 'errata_ids': [settings.repos.yum_6.errata[2]], + 'errata_ids': [REAL_RHEL8_1_ERRATA_ID], }, ) assert not response, 'Incremental update should not be required at this point' # Add filter of type include but do not include anything # this will hide all RPMs from selected erratum before publishing - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=module_cv, inclusion=True, name='Include Nothing' ).create() - module_cv.publish() - module_cv = module_cv.read() - CV1V = module_cv.version[-1].read() - # Must promote a CV version into a new Environment before we can add errata - CV1V.promote(data={'environment_ids': module_lce.id, 'force': False}) - module_cv = module_cv.read() + module_cv = target_sat.api.ContentView(id=module_cv.id).read() + module_cv = cv_publish_promote(target_sat, org, module_cv, module_lce)['content-view'] + rhel8_contenthost.execute('subscription-manager repos') # Call nailgun to make the API POST to ensure an incremental update is required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ - 'organization_id': module_org.id, + 'organization_id': org.id, 'included': {'ids': [host.id]}, - 'errata_ids': [settings.repos.yum_6.errata[2]], + 'errata_ids': [REAL_RHEL8_1_ERRATA_ID], }, ) - assert 'next_version' in response[0], 'Incremental update should be suggested' - 'at this point' + assert response, 'Nailgun response for host(s) with avaliable incremental update was None' + assert 'next_version' in response[0], 'Incremental update should be suggested at this point' -def _run_remote_command_on_content_host(module_org, command, vm, return_result=False): +def _run_remote_command_on_content_host(command, vm, return_result=False): result = vm.run(command) assert result.status == 0 if return_result: return result.stdout + return None -def _set_prerequisites_for_swid_repos(module_org, vm): +def _set_prerequisites_for_swid_repos(vm): _run_remote_command_on_content_host( - module_org, f'curl --insecure --remote-name {settings.repos.swid_tools_repo}', vm + f'curl --insecure --remote-name {settings.repos.swid_tools_repo}', vm ) - _run_remote_command_on_content_host(module_org, "mv *swid*.repo /etc/yum.repos.d", vm) - _run_remote_command_on_content_host(module_org, "yum install -y swid-tools", vm) - _run_remote_command_on_content_host(module_org, "dnf install -y dnf-plugin-swidtags", vm) + _run_remote_command_on_content_host('mv *swid*.repo /etc/yum.repos.d', vm) + _run_remote_command_on_content_host('yum install -y swid-tools', vm) + _run_remote_command_on_content_host('yum install -y dnf-plugin-swidtags', vm) -def _validate_swid_tags_installed(module_org, vm, module_name): +def _validate_swid_tags_installed(vm, module_name): result = _run_remote_command_on_content_host( - module_org, f"swidq -i -n {module_name} | grep 'Name'", vm, return_result=True + f"swidq -i -n {module_name} | grep 'Name'", vm, return_result=True ) assert module_name in result +@pytest.fixture +def errata_host_lce(module_sca_manifest_org, target_sat): + """Create and return a new lce in module SCA org.""" + return target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() + + @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.pit_client -@pytest.mark.parametrize( - 'module_repos_collection_with_manifest', - [{'YumRepository': {'url': settings.repos.swid_tag.url, 'distro': 'rhel8'}}], - indirect=True, -) @pytest.mark.no_containers +@pytest.mark.rhel_ver_match('8') def test_errata_installation_with_swidtags( - module_org, module_lce, module_repos_collection_with_manifest, rhel8_contenthost, target_sat + module_sca_manifest_org, + rhel_contenthost, + errata_host_lce, + target_sat, ): """Verify errata installation with swid_tags and swid tags get updated after module stream update. :id: 43a59b9a-eb9b-4174-8b8e-73d923b1e51e + :setup: + + 1. rhel8 contenthost checked out, using org with simple content access. + 2. create satellite repositories having rhel8 baseOS, prereqs, custom content w/ swid tags. + 3. associate repositories to org, lifecycle environment, and cv. Sync all content. + 4. publish & promote to environment, content view version with all content. + 5. create activation key, for registering host to cv. + :steps: - 1. create product and repository having swid tags - 2. create content view and published it with repository - 3. create activation key and register content host - 4. create rhel8, swid repos on content host - 5. install swid-tools, dnf-plugin-swidtags packages on content host - 6. install older module stream and generate errata, swid tag - 7. assert errata count, swid tags are generated - 8. install errata vis updating module stream - 9. assert errata count and swid tag after module update + 1. register host using cv's activation key, assert succeeded. + 2. install swid-tools, dnf-plugin-swidtags packages on content host. + 3. install older module stream and generate errata, swid tag. + 4. assert errata count, swid tags are generated. + 5. install errata via updating module stream. + 6. assert errata count and swid tag changed after module update. - :expectedresults: swid tags should get updated after errata installation via - module stream update + :expectedresults: + swid tags should get updated after errata installation via module stream update :CaseAutomation: Automated @@ -754,214 +1376,119 @@ def test_errata_installation_with_swidtags( :CaseImportance: Critical - :CaseLevel: System """ module_name = 'kangaroo' version = '20180704111719' - # setup rhel8 and sat_tools_repos - rhel8_contenthost.create_custom_repos( - **{ - 'baseos': settings.repos.rhel8_os.baseos, - 'appstream': settings.repos.rhel8_os.appstream, - } - ) - module_repos_collection_with_manifest.setup_virtual_machine( - rhel8_contenthost, install_katello_agent=False - ) + org = module_sca_manifest_org + lce = errata_host_lce + cv = target_sat.api.ContentView( + organization=org, + environment=[lce], + ).create() - # install older module stream - rhel8_contenthost.add_rex_key(satellite=target_sat) - _set_prerequisites_for_swid_repos(module_org, vm=rhel8_contenthost) - _run_remote_command_on_content_host( - module_org, f'dnf -y module install {module_name}:0:{version}', rhel8_contenthost + repos = { + 'base_os': settings.repos.rhel8_os.baseos, # base rhel8 + 'sat_tools': settings.repos.rhel8_os.appstream, # swid prereqs + 'swid_tags': settings.repos.swid_tag.url, # module stream pkgs and errata + } + # associate repos with sat, org, lce, cv, and sync + for r in repos: + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': repos[r], + 'organization-id': org.id, + 'lifecycle-environment-id': lce.id, + 'content-view-id': cv.id, + }, + ) + # promote newest cv version with all repos/content + cv = cv_publish_promote( + sat=target_sat, + org=org, + cv=cv, + lce=lce, + )['content-view'] + # ak in env, tied to content-view + ak = target_sat.api.ActivationKey( + organization=org, + environment=lce, + content_view=cv, + ).create() + # register host with ak, succeeds + result = rhel_contenthost.register( + activation_keys=ak.name, + target=target_sat, + org=org, + loc=None, ) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) + assert result.status == 0, f'Failed to register the host {target_sat.hostname},\n{result}' + assert ( + rhel_contenthost.subscribed + ), f'Failed to subscribe the host {target_sat.hostname}, to content.' + result = rhel_contenthost.execute(r'subscription-manager repos --enable \*') + assert result.status == 0, f'Failed to enable repositories with subscription-manager,\n{result}' + + # install outdated module stream package + _set_prerequisites_for_swid_repos(rhel_contenthost) + result = rhel_contenthost.execute(f'dnf -y module install {module_name}:0:{version}') + assert ( + result.status == 0 + ), f'Failed to install module stream package: {module_name}:0:{version}.\n{result.stdout}' + # recalculate errata after install of old module stream + rhel_contenthost.execute('subscription-manager repos') + # validate swid tags Installed - before_errata_apply_result = _run_remote_command_on_content_host( - module_org, - f"swidq -i -n {module_name} | grep 'File' | grep -o 'rpm-.*.swidtag'", - rhel8_contenthost, - return_result=True, + result = rhel_contenthost.execute( + f'swidq -i -n {module_name} | grep "File" | grep -o "rpm-.*.swidtag"', + ) + assert ( + result.status == 0 + ), f'An error occured trying to fetch swid tags for {module_name}.\n{result}' + before_errata_apply_result = result.stdout + assert before_errata_apply_result != '', f'Found no swid tags contained in {module_name}.' + assert (app_errata_count := rhel_contenthost.applicable_errata_count) == 1, ( + f'Found {rhel_contenthost.applicable_errata_count} applicable errata,' + f' after installing {module_name}:0:{version}, expected 1.' ) - assert before_errata_apply_result != '' - applicable_errata_count = rhel8_contenthost.applicable_errata_count - assert applicable_errata_count == 1 # apply modular errata - _run_remote_command_on_content_host( - module_org, f'dnf -y module update {module_name}', rhel8_contenthost + result = rhel_contenthost.execute(f'dnf -y module update {module_name}') + assert ( + result.status == 0 + ), f'Failed to update module stream package: {module_name}.\n{result.stdout}' + assert rhel_contenthost.execute('dnf -y upload-profile').status == 0 + + # recalculate and check errata after modular update + rhel_contenthost.execute('subscription-manager repos') + app_errata_count -= 1 + assert rhel_contenthost.applicable_errata_count == app_errata_count, ( + f'Found {rhel_contenthost.applicable_errata_count} applicable errata, after modular update of {module_name},' + f' expected {app_errata_count}.' ) - _run_remote_command_on_content_host(module_org, 'dnf -y upload-profile', rhel8_contenthost) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) - applicable_errata_count -= 1 - assert rhel8_contenthost.applicable_errata_count == applicable_errata_count - after_errata_apply_result = _run_remote_command_on_content_host( - module_org, - f"swidq -i -n {module_name} | grep 'File'| grep -o 'rpm-.*.swidtag'", - rhel8_contenthost, - return_result=True, + # swidtags were updated based on package version + result = rhel_contenthost.execute( + f'swidq -i -n {module_name} | grep "File" | grep -o "rpm-.*.swidtag"', ) - - # swidtags get updated based on package version + assert ( + result.status == 0 + ), f'An error occured trying to fetch swid tags for {module_name}.\n{result}' + after_errata_apply_result = result.stdout assert before_errata_apply_result != after_errata_apply_result -"""Section for tests using RHEL8 Content Host. - The applicability tests using Default Content View are related to the introduction of Pulp3. - """ - - @pytest.fixture(scope='module') -def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): +def rh_repo_module_manifest(module_sca_manifest_org, module_target_sat): """Use module manifest org, creates tools repo, syncs and returns RH repo.""" # enable rhel repo and return its ID rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch=constants.DEFAULT_ARCHITECTURE, - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhel8'], - repo=constants.REPOS['rhst8']['name'], - reposet=constants.REPOSET['rhst8'], + basearch=DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=PRDS['rhel8'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], releasever='None', ) # Sync step because repo is not synced by default - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo - - -@pytest.fixture(scope='module') -def rhel8_custom_repo_cv(module_entitlement_manifest_org): - """Create repo and publish CV so that packages are in Library""" - return setup_org_for_a_custom_repo( - { - 'url': settings.repos.module_stream_1.url, - 'organization-id': module_entitlement_manifest_org.id, - } - ) - - -@pytest.fixture(scope='module') -def rhel8_module_ak( - module_entitlement_manifest_org, default_lce, rh_repo_module_manifest, rhel8_custom_repo_cv -): - rhel8_module_ak = entities.ActivationKey( - content_view=module_entitlement_manifest_org.default_content_view, - environment=entities.LifecycleEnvironment(id=module_entitlement_manifest_org.library.id), - organization=module_entitlement_manifest_org, - ).create() - # Ensure tools repo is enabled in the activation key - rhel8_module_ak.content_override( - data={ - 'content_overrides': [{'content_label': constants.REPOS['rhst8']['id'], 'value': '1'}] - } - ) - # Fetch available subscriptions - subs = entities.Subscription(organization=module_entitlement_manifest_org).search( - query={'search': f'{constants.DEFAULT_SUBSCRIPTION_NAME}'} - ) - assert subs - # Add default subscription to activation key - rhel8_module_ak.add_subscriptions(data={'subscription_id': subs[0].id}) - # Add custom subscription to activation key - product = entities.Product(organization=module_entitlement_manifest_org).search( - query={'search': "redhat=false"} - ) - custom_sub = entities.Subscription(organization=module_entitlement_manifest_org).search( - query={'search': f"name={product[0].name}"} - ) - rhel8_module_ak.add_subscriptions(data={'subscription_id': custom_sub[0].id}) - return rhel8_module_ak - - -@pytest.mark.tier2 -def test_apply_modular_errata_using_default_content_view( - module_entitlement_manifest_org, - default_lce, - rhel8_contenthost, - rhel8_module_ak, - rhel8_custom_repo_cv, - target_sat, -): - """ - Registering a RHEL8 system to the default content view with no modules enabled results in - no modular errata or packages showing as applicable or installable - - Enabling a module on a RHEL8 system assigned to the default content view and installing an - older package should result in the modular errata and package showing as applicable and - installable - - :id: 030981dd-19ba-4f8b-9c24-0aee90aaa4c4 - - Steps: - 1. Register host with AK, install tools - 2. Assert no errata indicated - 3. Install older version of stream - 4. Assert errata is applicable - 5. Update module stream - 6. Assert errata is no longer applicable - - :expectedresults: Errata enumeration works with module streams when using default Content View - - :CaseAutomation: Automated - - :parametrized: yes - - :CaseLevel: System - """ - module_name = 'duck' - stream = '0' - version = '20180704244205' - - rhel8_contenthost.install_katello_ca(target_sat) - rhel8_contenthost.register_contenthost( - module_entitlement_manifest_org.label, rhel8_module_ak.name - ) - assert rhel8_contenthost.subscribed - host = rhel8_contenthost.nailgun_host - host = host.read() - # Assert no errata on host, no packages applicable or installable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, expected_amount=0) - assert len(errata) == 0 - rhel8_contenthost.install_katello_host_tools() - # Install older version of module stream to generate the errata - result = rhel8_contenthost.execute( - f'yum -y module install {module_name}:{stream}:{version}', - ) - assert result.status == 0 - # Check that there is now two errata applicable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, 2) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) - assert len(errata) == 2 - # Assert that errata package is required - assert constants.FAKE_3_CUSTOM_PACKAGE in errata[0]['module_streams'][0]['packages'] - # Update module - result = rhel8_contenthost.execute( - f'yum -y module update {module_name}:{stream}:{version}', - ) - assert result.status == 0 - # Check that there is now no errata applicable - errata = _fetch_available_errata(module_entitlement_manifest_org, host, 0) - assert len(errata) == 0 - - @pytest.mark.tier2 - @pytest.mark.skip("Uses old large_errata repo from repos.fedorapeople") - def test_positive_sync_repos_with_large_errata(target_sat): - """Attempt to synchronize 2 repositories containing large (or lots of) - errata. - - :id: d6680b9f-4c88-40b4-8b96-3d170664cb28 - - :customerscenario: true - - :BZ: 1463811 - - :CaseLevel: Integration - - :expectedresults: both repositories were successfully synchronized - """ - org = target_sat.api.Organization().create() - for _ in range(2): - product = target_sat.api.Product(organization=org).create() - repo = target_sat.api.Repository(product=product, url=settings.repos.yum_7.url).create() - response = repo.sync() - assert response, f"Repository {repo} failed to sync." diff --git a/tests/foreman/api/test_filter.py b/tests/foreman/api/test_filter.py index 98cf6c26f51..b676a67c6da 100644 --- a/tests/foreman/api/test_filter.py +++ b/tests/foreman/api/test_filter.py @@ -8,34 +8,28 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ + import pytest -from nailgun import entities from requests.exceptions import HTTPError @pytest.fixture(scope='module') -def module_perms(): +def module_perms(module_target_sat): """Search for provisioning template permissions. Set ``cls.ct_perms``.""" - ct_perms = entities.Permission().search( + return module_target_sat.api.Permission().search( query={'search': 'resource_type="ProvisioningTemplate"'} ) - return ct_perms @pytest.mark.tier1 -def test_positive_create_with_permission(module_perms): +def test_positive_create_with_permission(module_perms, module_target_sat): """Create a filter and assign it some permissions. :id: b8631d0a-a71a-41aa-9f9a-d12d62adc496 @@ -45,14 +39,14 @@ def test_positive_create_with_permission(module_perms): :CaseImportance: Critical """ # Create a filter and assign all ProvisioningTemplate permissions to it - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_perms = [perm.id for perm in filter_.permission] perms = [perm.id for perm in module_perms] assert filter_perms == perms @pytest.mark.tier1 -def test_positive_delete(module_perms): +def test_positive_delete(module_perms, module_target_sat): """Create a filter and delete it afterwards. :id: f0c56fd8-c91d-48c3-ad21-f538313b17eb @@ -61,14 +55,14 @@ def test_positive_delete(module_perms): :CaseImportance: Critical """ - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_.delete() with pytest.raises(HTTPError): filter_.read() @pytest.mark.tier1 -def test_positive_delete_role(module_perms): +def test_positive_delete_role(module_perms, module_target_sat): """Create a filter and delete the role it points at. :id: b129642d-926d-486a-84d9-5952b44ac446 @@ -77,8 +71,8 @@ def test_positive_delete_role(module_perms): :CaseImportance: Critical """ - role = entities.Role().create() - filter_ = entities.Filter(permission=module_perms, role=role).create() + role = module_target_sat.api.Role().create() + filter_ = module_target_sat.api.Filter(permission=module_perms, role=role).create() # A filter depends on a role. Deleting a role implicitly deletes the # filter pointing at it. diff --git a/tests/foreman/api/test_foremantask.py b/tests/foreman/api/test_foremantask.py index 6ddcc3c0884..c736a0e7161 100644 --- a/tests/foreman/api/test_foremantask.py +++ b/tests/foreman/api/test_foremantask.py @@ -4,25 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: TasksPlugin :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities from requests.exceptions import HTTPError @pytest.mark.tier1 -def test_negative_fetch_non_existent_task(): +def test_negative_fetch_non_existent_task(target_sat): """Fetch a non-existent task. :id: a2a81ca2-63c4-47f5-9314-5852f5e2617f @@ -32,13 +26,13 @@ def test_negative_fetch_non_existent_task(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ForemanTask(id='abc123').read() + target_sat.api.ForemanTask(id='abc123').read() @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.e2e -def test_positive_get_summary(): +def test_positive_get_summary(target_sat): """Get a summary of foreman tasks. :id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce @@ -47,7 +41,7 @@ def test_positive_get_summary(): :CaseImportance: Critical """ - summary = entities.ForemanTask().summary() - assert type(summary) is list + summary = target_sat.api.ForemanTask().summary() + assert isinstance(summary, list) for item in summary: - assert type(item) is dict + assert isinstance(item, dict) diff --git a/tests/foreman/api/test_host.py b/tests/foreman/api/test_host.py index 5982918c8b0..94730d56925 100644 --- a/tests/foreman/api/test_host.py +++ b/tests/foreman/api/test_host.py @@ -8,44 +8,33 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import http -import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_string +from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_mac, gen_string from nailgun import client -from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.config import get_credentials -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT +from robottelo.constants import DEFAULT_CV, ENVIRONMENT from robottelo.utils import datafactory -def update_smart_proxy(location, smart_proxy): - if location.id not in [location.id for location in smart_proxy.location]: - smart_proxy.location.append(entities.Location(id=location.id)) - smart_proxy.update(['location']) +def update_smart_proxy(smart_proxy_location, smart_proxy): + if smart_proxy_location.id not in [location.id for location in smart_proxy.location]: + smart_proxy.location.append(smart_proxy_location) + smart_proxy.update(['location']) @pytest.mark.tier1 -def test_positive_get_search(): +def test_positive_get_search(target_sat): """GET ``api/v2/hosts`` and specify the ``search`` parameter. :id: d63f87e5-66e6-4886-8b44-4129259493a6 @@ -56,7 +45,7 @@ def test_positive_get_search(): """ query = gen_string('utf8', gen_integer(1, 100)) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'search': query}, verify=False, @@ -66,7 +55,7 @@ def test_positive_get_search(): @pytest.mark.tier1 -def test_positive_get_per_page(): +def test_positive_get_per_page(target_sat): """GET ``api/v2/hosts`` and specify the ``per_page`` parameter. :id: 9086f41c-b3b9-4af2-b6c4-46b80b4d1cfd @@ -78,7 +67,7 @@ def test_positive_get_per_page(): """ per_page = gen_integer(1, 1000) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'per_page': str(per_page)}, verify=False, @@ -88,7 +77,7 @@ def test_positive_get_per_page(): @pytest.mark.tier2 -def test_positive_search_by_org_id(): +def test_positive_search_by_org_id(target_sat): """Search for host by specifying host's organization id :id: 56353f7c-b77e-4b6c-9ec3-51b58f9a18d8 @@ -99,12 +88,10 @@ def test_positive_search_by_org_id(): organization id was done :BZ: 1447958 - - :CaseLevel: Integration """ - host = entities.Host().create() + host = target_sat.api.Host().create() # adding org id as GET parameter for correspondence with BZ - query = entities.Host() + query = target_sat.api.Host() query._meta['api_path'] += f'?organization_id={host.organization.id}' results = query.search() assert len(results) == 1 @@ -113,7 +100,7 @@ def test_positive_search_by_org_id(): @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_negative_create_with_owner_type(owner_type): +def test_negative_create_with_owner_type(owner_type, target_sat): """Create a host and specify only ``owner_type``. :id: cdf9d16f-1c47-498a-be48-901355385dde @@ -125,13 +112,15 @@ def test_negative_create_with_owner_type(owner_type): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.Host(owner_type=owner_type).create() + target_sat.api.Host(owner_type=owner_type).create() assert str(422) in str(error) @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_positive_update_owner_type(owner_type, module_org, module_location, module_user): +def test_positive_update_owner_type( + owner_type, module_org, module_location, module_user, module_target_sat +): """Update a host's ``owner_type``. :id: b72cd8ef-3a0b-4d2d-94f9-9b64908d699a @@ -147,9 +136,9 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod """ owners = { 'User': module_user, - 'Usergroup': entities.UserGroup().create(), + 'Usergroup': module_target_sat.api.UserGroup().create(), } - host = entities.Host(organization=module_org, location=module_location).create() + host = module_target_sat.api.Host(organization=module_org, location=module_location).create() host.owner_type = owner_type host.owner = owners[owner_type] host = host.update(['owner_type', 'owner']) @@ -157,9 +146,8 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod assert host.owner.read() == owners[owner_type] -@pytest.mark.build_sanity @pytest.mark.tier1 -def test_positive_create_and_update_with_name(): +def test_positive_create_and_update_with_name(target_sat): """Create and update a host with different names and minimal input parameters :id: a7c0e8ec-3816-4092-88b1-0324cb271752 @@ -169,7 +157,7 @@ def test_positive_create_and_update_with_name(): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name).create() + host = target_sat.api.Host(name=name).create() assert host.name == f'{name}.{host.domain.read().name}' new_name = gen_choice(datafactory.valid_hosts_list()) host.name = new_name @@ -178,7 +166,7 @@ def test_positive_create_and_update_with_name(): @pytest.mark.tier1 -def test_positive_create_and_update_with_ip(): +def test_positive_create_and_update_with_ip(target_sat): """Create and update host with IP address specified :id: 3f266906-c509-42ce-9b20-def448bf8d86 @@ -188,7 +176,7 @@ def test_positive_create_and_update_with_ip(): :CaseImportance: Critical """ ip_addr = gen_ipaddr() - host = entities.Host(ip=ip_addr).create() + host = target_sat.api.Host(ip=ip_addr).create() assert host.ip == ip_addr new_ip_addr = gen_ipaddr() host.ip = new_ip_addr @@ -197,7 +185,7 @@ def test_positive_create_and_update_with_ip(): @pytest.mark.tier1 -def test_positive_create_and_update_mac(): +def test_positive_create_and_update_mac(target_sat): """Create host with MAC address and update it :id: 72e3b020-7347-4500-8669-c6ddf6dfd0b6 @@ -208,7 +196,7 @@ def test_positive_create_and_update_mac(): """ mac = gen_mac(multicast=False) - host = entities.Host(mac=mac).create() + host = target_sat.api.Host(mac=mac).create() assert host.mac == mac new_mac = gen_mac(multicast=False) host.mac = new_mac @@ -218,19 +206,19 @@ def test_positive_create_and_update_mac(): @pytest.mark.tier2 def test_positive_create_and_update_with_hostgroup( - module_org, module_location, module_lce, module_published_cv + module_org, module_location, module_lce, module_published_cv, module_target_sat ): """Create and update host with hostgroup specified :id: 8f9601f9-afd8-4a88-8f28-a5cbc996e805 :expectedresults: A host is created and updated with expected hostgroup assigned - - :CaseLevel: Integration """ module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - hostgroup = entities.HostGroup(location=[module_location], organization=[module_org]).create() - host = entities.Host( + hostgroup = module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() + host = module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -240,7 +228,7 @@ def test_positive_create_and_update_with_hostgroup( }, ).create() assert host.hostgroup.read().name == hostgroup.name - new_hostgroup = entities.HostGroup( + new_hostgroup = module_target_sat.api.HostGroup( location=[host.location], organization=[host.organization] ).create() host.hostgroup = new_hostgroup @@ -253,7 +241,9 @@ def test_positive_create_and_update_with_hostgroup( @pytest.mark.tier2 -def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_library, module_org): +def test_positive_create_inherit_lce_cv( + module_default_org_view, module_lce_library, module_org, module_target_sat +): """Create a host with hostgroup specified. Make sure host inherited hostgroup's lifecycle environment and content-view @@ -262,16 +252,14 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr :expectedresults: Host's lifecycle environment and content view match the ones specified in hostgroup - :CaseLevel: Integration - :BZ: 1391656 """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( content_view=module_default_org_view, lifecycle_environment=module_lce_library, organization=[module_org], ).create() - host = entities.Host(hostgroup=hostgroup, organization=module_org).create() + host = module_target_sat.api.Host(hostgroup=hostgroup, organization=module_org).create() assert ( host.content_facet_attributes['lifecycle_environment_id'] == hostgroup.lifecycle_environment.id @@ -280,7 +268,7 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr @pytest.mark.tier2 -def test_positive_create_with_inherited_params(module_org, module_location): +def test_positive_create_with_inherited_params(module_org, module_location, module_target_sat): """Create a new Host in organization and location with parameters :BZ: 1287223 @@ -294,18 +282,20 @@ def test_positive_create_with_inherited_params(module_org, module_location): :CaseImportance: High """ - org_param = entities.Parameter(organization=module_org).create() - loc_param = entities.Parameter(location=module_location).create() - host = entities.Host(location=module_location, organization=module_org).create() + org_param = module_target_sat.api.Parameter(organization=module_org).create() + loc_param = module_target_sat.api.Parameter(location=module_location).create() + host = module_target_sat.api.Host(location=module_location, organization=module_org).create() # get global parameters - glob_param_list = {(param.name, param.value) for param in entities.CommonParameter().search()} + glob_param_list = { + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() + } # if there are no global parameters, create one if len(glob_param_list) == 0: param_name = gen_string('alpha') param_global_value = gen_string('numeric') - entities.CommonParameter(name=param_name, value=param_global_value).create() + module_target_sat.api.CommonParameter(name=param_name, value=param_global_value).create() glob_param_list = { - (param.name, param.value) for param in entities.CommonParameter().search() + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() } assert len(host.all_parameters) == 2 + len(glob_param_list) innerited_params = {(org_param.name, org_param.value), (loc_param.name, loc_param.value)} @@ -404,29 +394,30 @@ def test_positive_end_to_end_with_puppet_class( @pytest.mark.tier2 -def test_positive_create_and_update_with_subnet(module_location, module_org, module_default_subnet): +def test_positive_create_and_update_with_subnet( + module_location, module_org, module_default_subnet, module_target_sat +): """Create and update a host with subnet specified :id: 9aa97aff-8439-4027-89ee-01c643fbf7d1 :expectedresults: A host is created and updated with expected subnet assigned - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( location=module_location, organization=module_org, subnet=module_default_subnet ).create() assert host.subnet.read().name == module_default_subnet.name - new_subnet = entities.Subnet(location=[module_location], organization=[module_org]).create() + new_subnet = module_target_sat.api.Subnet( + location=[module_location], organization=[module_org] + ).create() host.subnet = new_subnet host = host.update(['subnet']) assert host.subnet.read().name == new_subnet.name @pytest.mark.tier2 -@pytest.mark.on_premises_provisioning def test_positive_create_and_update_with_compresource( - module_org, module_location, module_cr_libvirt + module_org, module_location, module_cr_libvirt, module_target_sat ): """Create and update a host with compute resource specified @@ -434,14 +425,12 @@ def test_positive_create_and_update_with_compresource( :expectedresults: A host is created and updated with expected compute resource assigned - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( compute_resource=module_cr_libvirt, location=module_location, organization=module_org ).create() assert host.compute_resource.read().name == module_cr_libvirt.name - new_compresource = entities.LibvirtComputeResource( + new_compresource = module_target_sat.api.LibvirtComputeResource( location=[host.location], organization=[host.organization] ).create() host.compute_resource = new_compresource @@ -450,65 +439,65 @@ def test_positive_create_and_update_with_compresource( @pytest.mark.tier2 -def test_positive_create_and_update_with_model(module_model): +def test_positive_create_and_update_with_model(module_model, module_target_sat): """Create and update a host with model specified :id: 7a912a19-71e4-4843-87fd-bab98c156f4a :expectedresults: A host is created and updated with expected model assigned - - :CaseLevel: Integration """ - host = entities.Host(model=module_model).create() + host = module_target_sat.api.Host(model=module_model).create() assert host.model.read().name == module_model.name - new_model = entities.Model().create() + new_model = module_target_sat.api.Model().create() host.model = new_model host = host.update(['model']) assert host.model.read().name == new_model.name @pytest.mark.tier2 -def test_positive_create_and_update_with_user(module_org, module_location, module_user): +def test_positive_create_and_update_with_user( + module_org, module_location, module_user, module_target_sat +): """Create and update host with user specified :id: 72e20f8f-17dc-4e38-8ac1-d08df8758f56 :expectedresults: A host is created and updated with expected user assigned - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( owner=module_user, owner_type='User', organization=module_org, location=module_location ).create() assert host.owner.read() == module_user - new_user = entities.User(organization=[module_org], location=[module_location]).create() + new_user = module_target_sat.api.User( + organization=[module_org], location=[module_location] + ).create() host.owner = new_user host = host.update(['owner']) assert host.owner.read() == new_user @pytest.mark.tier2 -def test_positive_create_and_update_with_usergroup(module_org, module_location, function_role): +def test_positive_create_and_update_with_usergroup( + module_org, module_location, function_role, module_target_sat +): """Create and update host with user group specified :id: 706e860c-8c05-4ddc-be20-0ecd9f0da813 :expectedresults: A host is created and updated with expected user group assigned - - :CaseLevel: Integration """ - user = entities.User( + user = module_target_sat.api.User( location=[module_location], organization=[module_org], role=[function_role] ).create() - usergroup = entities.UserGroup(role=[function_role], user=[user]).create() - host = entities.Host( + usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() + host = module_target_sat.api.Host( location=module_location, organization=module_org, owner=usergroup, owner_type='Usergroup', ).create() assert host.owner.read().name == usergroup.name - new_usergroup = entities.UserGroup(role=[function_role], user=[user]).create() + new_usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() host.owner = new_usergroup host = host.update(['owner']) assert host.owner.read().name == new_usergroup.name @@ -516,7 +505,7 @@ def test_positive_create_and_update_with_usergroup(module_org, module_location, @pytest.mark.tier1 @pytest.mark.parametrize('build', [True, False]) -def test_positive_create_and_update_with_build_parameter(build): +def test_positive_create_and_update_with_build_parameter(build, target_sat): """Create and update a host with 'build' parameter specified. Build parameter determines whether to enable the host for provisioning @@ -529,7 +518,7 @@ def test_positive_create_and_update_with_build_parameter(build): :CaseImportance: Critical """ - host = entities.Host(build=build).create() + host = target_sat.api.Host(build=build).create() assert host.build == build host.build = not build host = host.update(['build']) @@ -538,7 +527,7 @@ def test_positive_create_and_update_with_build_parameter(build): @pytest.mark.tier1 @pytest.mark.parametrize('enabled', [True, False], ids=['enabled', 'disabled']) -def test_positive_create_and_update_with_enabled_parameter(enabled): +def test_positive_create_and_update_with_enabled_parameter(enabled, target_sat): """Create and update a host with 'enabled' parameter specified. Enabled parameter determines whether to include the host within Satellite 6 reporting @@ -552,7 +541,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): :CaseImportance: Critical """ - host = entities.Host(enabled=enabled).create() + host = target_sat.api.Host(enabled=enabled).create() assert host.enabled == enabled host.enabled = not enabled host = host.update(['enabled']) @@ -561,7 +550,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): @pytest.mark.tier1 @pytest.mark.parametrize('managed', [True, False], ids=['managed', 'unmanaged']) -def test_positive_create_and_update_with_managed_parameter(managed): +def test_positive_create_and_update_with_managed_parameter(managed, target_sat): """Create and update a host with managed parameter specified. Managed flag shows whether the host is managed or unmanaged and determines whether some extra parameters are required @@ -575,7 +564,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): :CaseImportance: Critical """ - host = entities.Host(managed=managed).create() + host = target_sat.api.Host(managed=managed).create() assert host.managed == managed host.managed = not managed host = host.update(['managed']) @@ -583,7 +572,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): @pytest.mark.tier1 -def test_positive_create_and_update_with_comment(): +def test_positive_create_and_update_with_comment(target_sat): """Create and update a host with a comment :id: 9b78663f-139c-4d0b-9115-180624b0d41b @@ -593,7 +582,7 @@ def test_positive_create_and_update_with_comment(): :CaseImportance: Critical """ comment = gen_choice(list(datafactory.valid_data_list().values())) - host = entities.Host(comment=comment).create() + host = target_sat.api.Host(comment=comment).create() assert host.comment == comment new_comment = gen_choice(list(datafactory.valid_data_list().values())) host.comment = new_comment @@ -602,19 +591,17 @@ def test_positive_create_and_update_with_comment(): @pytest.mark.tier2 -def test_positive_create_and_update_with_compute_profile(module_compute_profile): +def test_positive_create_and_update_with_compute_profile(module_compute_profile, module_target_sat): """Create and update a host with a compute profile specified :id: 94be25e8-035d-42c5-b1f3-3aa20030410d :expectedresults: A host is created and updated with expected compute profile assigned - - :CaseLevel: Integration """ - host = entities.Host(compute_profile=module_compute_profile).create() + host = module_target_sat.api.Host(compute_profile=module_compute_profile).create() assert host.compute_profile.read().name == module_compute_profile.name - new_cprofile = entities.ComputeProfile().create() + new_cprofile = module_target_sat.api.ComputeProfile().create() host.compute_profile = new_cprofile host = host.update(['compute_profile']) assert host.compute_profile.read().name == new_cprofile.name @@ -622,17 +609,15 @@ def test_positive_create_and_update_with_compute_profile(module_compute_profile) @pytest.mark.tier2 def test_positive_create_and_update_with_content_view( - module_org, module_location, module_default_org_view, module_lce_library + module_org, module_location, module_default_org_view, module_lce_library, module_target_sat ): """Create and update host with a content view specified :id: 10f69c7a-088e-474c-b869-1ad12deda2ad :expectedresults: A host is created and updated with expected content view - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -654,7 +639,7 @@ def test_positive_create_and_update_with_content_view( @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_with_host_parameters(module_org, module_location): +def test_positive_end_to_end_with_host_parameters(module_org, module_location, module_target_sat): """Create a host with a host parameters specified then remove and update with the newly specified parameters @@ -666,7 +651,7 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): :CaseImportance: Critical """ parameters = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, host_parameters_attributes=parameters, @@ -690,9 +675,8 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): @pytest.mark.tier2 @pytest.mark.e2e -@pytest.mark.on_premises_provisioning def test_positive_end_to_end_with_image( - module_org, module_location, module_cr_libvirt, module_libvirt_image + module_org, module_location, module_cr_libvirt, module_libvirt_image, module_target_sat ): """Create a host with an image specified then remove it and update the host with the same image afterwards @@ -701,10 +685,8 @@ def test_positive_end_to_end_with_image( :expectedresults: A host is created with expected image, image is removed and host is updated with expected image - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -722,10 +704,9 @@ def test_positive_end_to_end_with_image( @pytest.mark.tier1 -@pytest.mark.on_premises_provisioning @pytest.mark.parametrize('method', ['build', 'image']) def test_positive_create_with_provision_method( - method, module_org, module_location, module_cr_libvirt + method, module_org, module_location, module_cr_libvirt, module_target_sat ): """Create a host with provision method specified @@ -738,7 +719,7 @@ def test_positive_create_with_provision_method( :CaseImportance: Critical """ # Compute resource is required for 'image' method - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -748,7 +729,7 @@ def test_positive_create_with_provision_method( @pytest.mark.tier1 -def test_positive_delete(): +def test_positive_delete(target_sat): """Delete a host :id: ec725359-a75e-498c-9da8-f5abd2343dd3 @@ -757,28 +738,30 @@ def test_positive_delete(): :CaseImportance: Critical """ - host = entities.Host().create() + host = target_sat.api.Host().create() host.delete() with pytest.raises(HTTPError): host.read() @pytest.mark.tier2 -def test_positive_create_and_update_domain(module_org, module_location, module_domain): +def test_positive_create_and_update_domain( + module_org, module_location, module_domain, module_target_sat +): """Create and update a host with a domain :id: 8ca9f67c-4c11-40f9-b434-4f200bad000f :expectedresults: A host is created and updated with expected domain - - :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, domain=module_domain ).create() assert host.domain.read().name == module_domain.name - new_domain = entities.Domain(organization=[module_org], location=[module_location]).create() + new_domain = module_target_sat.api.Domain( + organization=[module_org], location=[module_location] + ).create() host.domain = new_domain host = host.update(['domain']) assert host.domain.read().name == new_domain.name @@ -793,8 +776,6 @@ def test_positive_create_and_update_env( :id: 87a08dbf-fd4c-4b6c-bf73-98ab70756fc6 :expectedresults: A host is created and updated with expected environment - - :CaseLevel: Integration """ host = session_puppet_enabled_sat.api.Host( organization=module_puppet_org, @@ -812,41 +793,37 @@ def test_positive_create_and_update_env( @pytest.mark.tier2 -def test_positive_create_and_update_arch(module_architecture): +def test_positive_create_and_update_arch(module_architecture, module_target_sat): """Create and update a host with an architecture :id: 5f190b14-e6db-46e1-8cd1-e94e048e6a77 :expectedresults: A host is created and updated with expected architecture - - :CaseLevel: Integration """ - host = entities.Host(architecture=module_architecture).create() + host = module_target_sat.api.Host(architecture=module_architecture).create() assert host.architecture.read().name == module_architecture.name - new_arch = entities.Architecture(operatingsystem=[host.operatingsystem]).create() + new_arch = module_target_sat.api.Architecture(operatingsystem=[host.operatingsystem]).create() host.architecture = new_arch host = host.update(['architecture']) assert host.architecture.read().name == new_arch.name @pytest.mark.tier2 -def test_positive_create_and_update_os(module_os): +def test_positive_create_and_update_os(module_os, module_target_sat): """Create and update a host with an operating system :id: 46edced1-8909-4066-b196-b8e22512341f :expectedresults: A host is created updated with expected operating system - - :CaseLevel: Integration """ - host = entities.Host(operatingsystem=module_os).create() + host = module_target_sat.api.Host(operatingsystem=module_os).create() assert host.operatingsystem.read().name == module_os.name - new_os = entities.OperatingSystem( + new_os = module_target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() - medium = entities.Media(id=host.medium.id).read() + medium = module_target_sat.api.Media(id=host.medium.id).read() medium.operatingsystem.append(new_os) medium.update(['operatingsystem']) host.operatingsystem = new_os @@ -855,20 +832,20 @@ def test_positive_create_and_update_os(module_os): @pytest.mark.tier2 -def test_positive_create_and_update_medium(module_org, module_location): +def test_positive_create_and_update_medium(module_org, module_location, module_target_sat): """Create and update a host with a medium :id: d81cb65c-48b3-4ce3-971e-51b9dd123697 :expectedresults: A host is created and updated with expected medium - - :CaseLevel: Integration """ - medium = entities.Media(organization=[module_org], location=[module_location]).create() - host = entities.Host(medium=medium).create() + medium = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() + host = module_target_sat.api.Host(medium=medium).create() assert host.medium.read().name == medium.name - new_medium = entities.Media( + new_medium = module_target_sat.api.Media( operatingsystem=[host.operatingsystem], location=[host.location], organization=[host.organization], @@ -917,17 +894,15 @@ def test_negative_update_mac(module_host): @pytest.mark.tier2 -def test_negative_update_arch(module_architecture): +def test_negative_update_arch(module_architecture, module_target_sat): """Attempt to update a host with an architecture, which does not belong to host's operating system :id: 07b9c0e7-f02b-4aff-99ae-5c203255aba1 :expectedresults: A host is not updated - - :CaseLevel: Integration """ - host = entities.Host().create() + host = module_target_sat.api.Host().create() host.architecture = module_architecture with pytest.raises(HTTPError): host = host.update(['architecture']) @@ -935,18 +910,16 @@ def test_negative_update_arch(module_architecture): @pytest.mark.tier2 -def test_negative_update_os(): +def test_negative_update_os(target_sat): """Attempt to update a host with an operating system, which is not associated with host's medium :id: 40e79f73-6356-4d61-9806-7ade2f4f8829 :expectedresults: A host is not updated - - :CaseLevel: Integration """ - host = entities.Host().create() - new_os = entities.OperatingSystem( + host = target_sat.api.Host().create() + new_os = target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() host.operatingsystem = new_os @@ -970,12 +943,10 @@ def test_positive_read_content_source_id( response :BZ: 1339613, 1488130 - - :CaseLevel: System """ - proxy = entities.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() + proxy = target_sat.api.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - host = entities.Host( + host = target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -984,7 +955,7 @@ def test_positive_read_content_source_id( 'lifecycle_environment_id': module_lce.id, }, ).create() - content_facet_attributes = getattr(host, 'content_facet_attributes') + content_facet_attributes = host.content_facet_attributes assert content_facet_attributes is not None content_source_id = content_facet_attributes.get('content_source_id') assert content_source_id is not None @@ -1006,8 +977,6 @@ def test_positive_update_content_source_id( response :BZ: 1339613, 1488130 - - :CaseLevel: System """ proxy = target_sat.api.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0] module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) @@ -1051,8 +1020,6 @@ def test_positive_read_enc_information( :expectedresults: host ENC information read successfully :BZ: 1362372 - - :CaseLevel: Integration """ lce = ( session_puppet_enabled_sat.api.LifecycleEnvironment() @@ -1113,8 +1080,6 @@ def test_positive_add_future_subscription(): 2. Add the subscription to the content host :expectedresults: The future-dated subscription was added to the host - - :CaseLevel: Integration """ @@ -1134,8 +1099,6 @@ def test_positive_add_future_subscription_with_ak(): 3. Register a new content host with the activation key :expectedresults: The host was registered and future subscription added - - :CaseLevel: Integration """ @@ -1154,8 +1117,6 @@ def test_negative_auto_attach_future_subscription(): 3. Run auto-attach on the content host :expectedresults: Only the current subscription was added to the host - - :CaseLevel: Integration """ @@ -1175,8 +1136,6 @@ def test_positive_create_baremetal_with_bios(): :expectedresults: Host is created :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -1196,8 +1155,6 @@ def test_positive_create_baremetal_with_uefi(): :expectedresults: Host is created :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -1228,8 +1185,6 @@ def test_positive_verify_files_with_pxegrub_uefi(): And record in /var/lib/dhcpd/dhcpd.leases points to the bootloader :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -1264,8 +1219,6 @@ def test_positive_verify_files_with_pxegrub_uefi_secureboot(): :CaseComponent: TFTP :Team: Rocket - - :CaseLevel: Integration """ @@ -1300,8 +1253,6 @@ def test_positive_verify_files_with_pxegrub2_uefi(): :CaseComponent: TFTP :Team: Rocket - - :CaseLevel: Integration """ @@ -1333,8 +1284,6 @@ def test_positive_verify_files_with_pxegrub2_uefi_secureboot(): :CaseAutomation: NotAutomated - :CaseLevel: Integration - :CaseComponent: TFTP :Team: Rocket @@ -1387,12 +1336,37 @@ def test_positive_read_puppet_ca_proxy_name( assert session_puppet_enabled_proxy.name == host['puppet_ca_proxy_name'] +@pytest.mark.tier2 +def test_positive_list_hosts_thin_all(module_target_sat): + """List hosts with thin=true and per_page=all + + :id: 00b7e603-aed5-4b19-bfec-1a179fad6743 + + :expectedresults: Hosts listed without ISE + + :BZ: 1969263, 1644750 + + :customerscenario: true + """ + hosts = module_target_sat.api.Host().search(query={'thin': 'true', 'per_page': 'all'}) + assert module_target_sat.hostname in [host.name for host in hosts] + keys = dir(hosts[0]) + assert 'id' in keys + assert 'name' in keys + # Can't check for only id and name being present because the framework adds + # some data the API doesn't actually return (currently, it adds empty Environment). + # Instead, check for some data to be missing, as opposed to non-thin. + assert 'domain' not in keys + assert 'ip' not in keys + assert 'architecture' not in keys + + class TestHostInterface: """Tests for Host Interfaces""" @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_create_end_to_end(self, module_host): + def test_positive_create_end_to_end(self, module_host, target_sat): """Create update and delete an interface with different names and minimal input parameters @@ -1403,7 +1377,7 @@ def test_positive_create_end_to_end(self, module_host): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_interfaces_list()) - interface = entities.Interface(host=module_host, name=name).create() + interface = target_sat.api.Interface(host=module_host, name=name).create() assert interface.name == name new_name = gen_choice(datafactory.valid_interfaces_list()) interface.name = new_name @@ -1414,7 +1388,7 @@ def test_positive_create_end_to_end(self, module_host): interface.read() @pytest.mark.tier1 - def test_negative_end_to_end(self, module_host): + def test_negative_end_to_end(self, module_host, target_sat): """Attempt to create and update an interface with different invalid entries as names (>255 chars, unsupported string types), at the end attempt to remove primary interface @@ -1427,9 +1401,9 @@ def test_negative_end_to_end(self, module_host): """ name = gen_choice(datafactory.invalid_interfaces_list()) with pytest.raises(HTTPError) as error: - entities.Interface(host=module_host, name=name).create() + target_sat.api.Interface(host=module_host, name=name).create() assert str(422) in str(error) - interface = entities.Interface(host=module_host).create() + interface = target_sat.api.Interface(host=module_host).create() interface.name = name with pytest.raises(HTTPError) as error: interface.update(['name']) @@ -1448,7 +1422,7 @@ def test_negative_end_to_end(self, module_host): @pytest.mark.upgrade @pytest.mark.tier1 - def test_positive_delete_and_check_host(self): + def test_positive_delete_and_check_host(self, target_sat): """Delete host's interface (not primary) and make sure the host was not accidentally removed altogether with the interface @@ -1461,8 +1435,8 @@ def test_positive_delete_and_check_host(self): :CaseImportance: Critical """ - host = entities.Host().create() - interface = entities.Interface(host=host, primary=False).create() + host = target_sat.api.Host().create() + interface = target_sat.api.Interface(host=host, primary=False).create() interface.delete() with pytest.raises(HTTPError): interface.read() @@ -1476,7 +1450,7 @@ class TestHostBulkAction: """Tests for host bulk actions.""" @pytest.mark.tier2 - def test_positive_bulk_destroy(self, module_org): + def test_positive_bulk_destroy(self, module_org, module_target_sat): """Destroy multiple hosts make sure that hosts were removed, or were not removed when host is excluded from the list. @@ -1491,10 +1465,10 @@ def test_positive_bulk_destroy(self, module_org): host_ids = [] for _ in range(3): name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name, organization=module_org).create() + host = module_target_sat.api.Host(name=name, organization=module_org).create() host_ids.append(host.id) - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={ 'organization_id': module_org.id, 'included': {'ids': host_ids}, @@ -1502,15 +1476,15 @@ def test_positive_bulk_destroy(self, module_org): } ) for host_id in host_ids[:-1]: - result = entities.Host(id=host_id).read() + result = module_target_sat.api.Host(id=host_id).read() assert result.id == host_id with pytest.raises(HTTPError): - entities.Host(id=host_ids[-1]).read() + module_target_sat.api.Host(id=host_ids[-1]).read() - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={'organization_id': module_org.id, 'included': {'ids': host_ids[:-1]}} ) for host_id in host_ids[:-1]: with pytest.raises(HTTPError): - entities.Host(id=host_id).read() + module_target_sat.api.Host(id=host_id).read() diff --git a/tests/foreman/api/test_hostcollection.py b/tests/foreman/api/test_hostcollection.py index 5484dfd4ea9..985f989718e 100644 --- a/tests/foreman/api/test_hostcollection.py +++ b/tests/foreman/api/test_hostcollection.py @@ -4,42 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: HostCollections :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import choice -from random import randint +from random import choice, randint -import pytest from broker import Broker -from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.hosts import ContentHost -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) @pytest.fixture(scope='module') -def fake_hosts(module_org): +def fake_hosts(module_org, module_target_sat): """Create content hosts that can be shared by tests.""" - hosts = [entities.Host(organization=module_org).create() for _ in range(2)] - return hosts + return [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create host collections with different names. :id: 8f2b9223-f5be-4cb1-8316-01ea747cae14 @@ -51,12 +45,14 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(name=name, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + name=name, organization=module_org + ).create() assert host_collection.name == name @pytest.mark.tier1 -def test_positive_list(module_org): +def test_positive_list(module_org, module_target_sat): """Create new host collection and then retrieve list of all existing host collections @@ -71,13 +67,13 @@ def test_positive_list(module_org): :CaseImportance: Critical """ - entities.HostCollection(organization=module_org).create() - hc_list = entities.HostCollection().search() + module_target_sat.api.HostCollection(organization=module_org).create() + hc_list = module_target_sat.api.HostCollection().search() assert len(hc_list) >= 1 @pytest.mark.tier1 -def test_positive_list_for_organization(): +def test_positive_list_for_organization(target_sat): """Create host collection for specific organization. Retrieve list of host collections for that organization @@ -88,16 +84,16 @@ def test_positive_list_for_organization(): :CaseImportance: Critical """ - org = entities.Organization().create() - hc = entities.HostCollection(organization=org).create() - hc_list = entities.HostCollection(organization=org).search() + org = target_sat.api.Organization().create() + hc = target_sat.api.HostCollection(organization=org).create() + hc_list = target_sat.api.HostCollection(organization=org).search() assert len(hc_list) == 1 assert hc_list[0].id == hc.id @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, desc): +def test_positive_create_with_description(module_org, desc, module_target_sat): """Create host collections with different descriptions. :id: 9d13392f-8d9d-4ff1-8909-4233e4691055 @@ -109,12 +105,14 @@ def test_positive_create_with_description(module_org, desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(description=desc, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + description=desc, organization=module_org + ).create() assert host_collection.description == desc @pytest.mark.tier1 -def test_positive_create_with_limit(module_org): +def test_positive_create_with_limit(module_org, module_target_sat): """Create host collections with different limits. :id: 86d9387b-7036-4794-96fd-5a3472dd9160 @@ -126,13 +124,15 @@ def test_positive_create_with_limit(module_org): """ for _ in range(5): limit = randint(1, 30) - host_collection = entities.HostCollection(max_hosts=limit, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + max_hosts=limit, organization=module_org + ).create() assert host_collection.max_hosts == limit @pytest.mark.parametrize("unlimited", [False, True]) @pytest.mark.tier1 -def test_positive_create_with_unlimited_hosts(module_org, unlimited): +def test_positive_create_with_unlimited_hosts(module_org, unlimited, module_target_sat): """Create host collection with different values of 'unlimited hosts' parameter. @@ -145,7 +145,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=None if unlimited else 1, organization=module_org, unlimited_hosts=unlimited, @@ -154,7 +154,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): @pytest.mark.tier1 -def test_positive_create_with_host(module_org, fake_hosts): +def test_positive_create_with_host(module_org, fake_hosts, module_target_sat): """Create a host collection that contains a host. :id: 9dc0ad72-58c2-4079-b1ca-2c4373472f0f @@ -166,14 +166,14 @@ def test_positive_create_with_host(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() assert len(host_collection.host) == 1 @pytest.mark.tier1 -def test_positive_create_with_hosts(module_org, fake_hosts): +def test_positive_create_with_hosts(module_org, fake_hosts, module_target_sat): """Create a host collection that contains hosts. :id: bb8d2b42-9a8b-4c4f-ba0c-c56ae5a7eb1d @@ -185,23 +185,23 @@ def test_positive_create_with_hosts(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert len(host_collection.host) == len(fake_hosts) @pytest.mark.tier2 -def test_positive_add_host(module_org, fake_hosts): +def test_positive_add_host(module_org, fake_hosts, module_target_sat): """Add a host to host collection. :id: da8bc901-7ac8-4029-bb62-af21aa4d3a88 :expectedresults: Host was added to the host collection. - :CaseLevel: Integration - :BZ:1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.host_ids = [fake_hosts[0].id] host_collection = host_collection.update(['host_ids']) assert len(host_collection.host) == 1 @@ -209,18 +209,16 @@ def test_positive_add_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_add_hosts(module_org, fake_hosts): +def test_positive_add_hosts(module_org, fake_hosts, module_target_sat): """Add hosts to host collection. :id: f76b4db1-ccd5-47ab-be15-8c7d91d03b22 :expectedresults: Hosts were added to the host collection. - :CaseLevel: Integration - :BZ: 1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_ids = [str(host.id) for host in fake_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -228,7 +226,7 @@ def test_positive_add_hosts(module_org, fake_hosts): @pytest.mark.tier1 -def test_positive_read_host_ids(module_org, fake_hosts): +def test_positive_read_host_ids(module_org, fake_hosts, module_target_sat): """Read a host collection and look at the ``host_ids`` field. :id: 444a1528-64c8-41b6-ba2b-6c49799d5980 @@ -240,7 +238,9 @@ def test_positive_read_host_ids(module_org, fake_hosts): :BZ:1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert frozenset(host.id for host in host_collection.host) == frozenset( host.id for host in fake_hosts ) @@ -248,7 +248,7 @@ def test_positive_read_host_ids(module_org, fake_hosts): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Check if host collection name can be updated :id: b2dedb99-6dd7-41be-8aaa-74065c820ac6 @@ -259,14 +259,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.name = new_name assert host_collection.update().name == new_name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_description(module_org, new_desc): +def test_positive_update_description(module_org, new_desc, module_target_sat): """Check if host collection description can be updated :id: f8e9bd1c-1525-4b5f-a07c-eb6b6e7aa628 @@ -277,13 +277,13 @@ def test_positive_update_description(module_org, new_desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.description = new_desc assert host_collection.update().description == new_desc @pytest.mark.tier1 -def test_positive_update_limit(module_org): +def test_positive_update_limit(module_org, module_target_sat): """Check if host collection limit can be updated :id: 4eda7796-cd81-453b-9b72-4ef84b2c1d8c @@ -292,7 +292,7 @@ def test_positive_update_limit(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1, organization=module_org, unlimited_hosts=False ).create() for limit in (1, 3, 5, 10, 20): @@ -301,7 +301,7 @@ def test_positive_update_limit(module_org): @pytest.mark.tier1 -def test_positive_update_unlimited_hosts(module_org): +def test_positive_update_unlimited_hosts(module_org, module_target_sat): """Check if host collection 'unlimited hosts' parameter can be updated :id: 09a3973d-9832-4255-87bf-f9eaeab4aee8 @@ -312,7 +312,7 @@ def test_positive_update_unlimited_hosts(module_org): :CaseImportance: Critical """ random_unlimited = choice([True, False]) - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1 if not random_unlimited else None, organization=module_org, unlimited_hosts=random_unlimited, @@ -325,7 +325,7 @@ def test_positive_update_unlimited_hosts(module_org): @pytest.mark.tier1 -def test_positive_update_host(module_org, fake_hosts): +def test_positive_update_host(module_org, fake_hosts, module_target_sat): """Update host collection's host. :id: 23082854-abcf-4085-be9c-a5d155446acb @@ -334,7 +334,7 @@ def test_positive_update_host(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() host_collection.host_ids = [fake_hosts[1].id] @@ -344,7 +344,7 @@ def test_positive_update_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_update_hosts(module_org, fake_hosts): +def test_positive_update_hosts(module_org, fake_hosts, module_target_sat): """Update host collection's hosts. :id: 0433b37d-ae16-456f-a51d-c7b800334861 @@ -353,8 +353,10 @@ def test_positive_update_hosts(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() - new_hosts = [entities.Host(organization=module_org).create() for _ in range(2)] + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() + new_hosts = [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] host_ids = [str(host.id) for host in new_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -363,7 +365,7 @@ def test_positive_update_hosts(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Check if host collection can be deleted :id: 13a16cd2-16ce-4966-8c03-5d821edf963b @@ -372,7 +374,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.delete() with pytest.raises(HTTPError): host_collection.read() @@ -380,7 +382,7 @@ def test_positive_delete(module_org): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Try to create host collections with different invalid names :id: 38f67d04-a19d-4eab-a577-21b8d62c7389 @@ -392,7 +394,7 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostCollection(name=name, organization=module_org).create() + module_target_sat.api.HostCollection(name=name, organization=module_org).create() @pytest.mark.tier1 @@ -417,14 +419,14 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s """ # this command creates a host collection and "appends", makes available, to the AK module_ak_cv_lce.host_collection.append( - entities.HostCollection(organization=module_org).create() + target_sat.api.HostCollection(organization=module_org).create() ) # Move HC from Add tab to List tab on AK view module_ak_cv_lce = module_ak_cv_lce.update(['host_collection']) # Create a product so we have a subscription to use - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() prod_name = product.name - product_subscription = entities.Subscription(organization=module_org).search( + product_subscription = target_sat.api.Subscription(organization=module_org).search( query={'search': f'name={prod_name}'} )[0] # Create and register VMs as members of Host Collection @@ -437,7 +439,7 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s host_ids = [host.id for host in host_collection.host] # Add subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_add_subscriptions( + target_sat.api.Host().bulk_add_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -446,13 +448,13 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are there for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert ( prod_name in req['results'][0]['product_name'] ), 'Subscription not applied to HC members' # Remove the subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_remove_subscriptions( + target_sat.api.Host().bulk_remove_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -461,5 +463,5 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are gone for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert not req['results'], 'Subscription not removed from HC members' diff --git a/tests/foreman/api/test_hostgroup.py b/tests/foreman/api/test_hostgroup.py index 405ed180dea..e0e6fd9ed91 100644 --- a/tests/foreman/api/test_hostgroup.py +++ b/tests/foreman/api/test_hostgroup.py @@ -4,36 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: HostGroup :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import randint -import pytest from fauxfactory import gen_string -from nailgun import client -from nailgun import entities -from nailgun import entity_fields +from nailgun import client, entity_fields +import pytest from requests.exceptions import HTTPError from robottelo.config import get_credentials -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_hostgroups_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_hostgroups_list, +) @pytest.fixture -def hostgroup(module_org, module_location): - return entities.HostGroup(location=[module_location], organization=[module_org]).create() +def hostgroup(module_org, module_location, module_target_sat): + return module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() @pytest.fixture @@ -60,7 +57,6 @@ def test_inherit_puppetclass(self, session_puppet_enabled_sat): :BZ: 1107708, 1222118, 1487586 - :CaseLevel: System """ # Creating entities like organization, content view and lifecycle_env # with not utf-8 names for easier interaction with puppet environment @@ -158,7 +154,7 @@ def test_inherit_puppetclass(self, session_puppet_enabled_sat): @pytest.mark.upgrade @pytest.mark.tier3 - def test_rebuild_config(self, module_org, module_location, hostgroup): + def test_rebuild_config(self, module_org, module_location, hostgroup, module_target_sat): """'Rebuild orchestration config' of an existing host group :id: 58bf7015-18fc-4d25-9b64-7f2dd6dde425 @@ -167,14 +163,13 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): :CaseImportance: Medium - :CaseLevel: System """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - content_view = entities.ContentView(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() content_view = content_view.read() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) - entities.Host( + module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -193,7 +188,7 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_hostgroups_list())) - def test_positive_create_with_name(self, name, module_org, module_location): + def test_positive_create_with_name(self, name, module_org, module_location, module_target_sat): """Create a hostgroup with different names :id: fd5d353c-fd0c-4752-8a83-8f399b4c3416 @@ -204,13 +199,13 @@ def test_positive_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() assert name == hostgroup.name @pytest.mark.tier1 - def test_positive_clone(self, hostgroup): + def test_positive_clone(self, hostgroup, target_sat): """Create a hostgroup by cloning an existing one :id: 44ac8b3b-9cb0-4a9e-ad9b-2c67b2411922 @@ -220,7 +215,7 @@ def test_positive_clone(self, hostgroup): :CaseImportance: Critical """ hostgroup_cloned_name = gen_string('alpha') - hostgroup_cloned = entities.HostGroup(id=hostgroup.id).clone( + hostgroup_cloned = target_sat.api.HostGroup(id=hostgroup.id).clone( data={'name': hostgroup_cloned_name} ) hostgroup_origin = hostgroup.read_json() @@ -246,8 +241,6 @@ def test_positive_create_with_properties( :expectedresults: A hostgroup is created with expected properties, updated and deleted - :CaseLevel: Integration - :CaseImportance: High """ env = session_puppet_enabled_sat.api.Environment( @@ -400,22 +393,21 @@ def test_positive_create_with_realm(self, module_org, module_location, target_sa :expectedresults: A hostgroup is created with expected realm assigned - :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() assert hostgroup.realm.read().name == realm.name @pytest.mark.tier2 - def test_positive_create_with_locs(self, module_org): + def test_positive_create_with_locs(self, module_org, module_target_sat): """Create a hostgroup with multiple locations specified :id: 0c2ee2ff-9e7a-4931-8cea-f4eecbd8c4c0 @@ -425,14 +417,18 @@ def test_positive_create_with_locs(self, module_org): :expectedresults: A hostgroup is created with expected multiple locations assigned - :CaseLevel: Integration """ - locs = [entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(location=locs, organization=[module_org]).create() + locs = [ + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) + ] + hostgroup = module_target_sat.api.HostGroup( + location=locs, organization=[module_org] + ).create() assert {loc.name for loc in locs} == {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_create_with_orgs(self): + def test_positive_create_with_orgs(self, target_sat): """Create a hostgroup with multiple organizations specified :id: 09642238-cf0d-469a-a0b5-c167b1b8edf5 @@ -442,10 +438,9 @@ def test_positive_create_with_orgs(self): :expectedresults: A hostgroup is created with expected multiple organizations assigned - :CaseLevel: Integration """ - orgs = [entities.Organization().create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(organization=orgs).create() + orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] + hostgroup = target_sat.api.HostGroup(organization=orgs).create() assert {org.name for org in orgs}, {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @@ -475,7 +470,6 @@ def test_positive_update_puppet_ca_proxy(self, puppet_hostgroup, session_puppet_ :CaseImportance: Medium - :CaseLevel: Integration """ new_proxy = session_puppet_enabled_sat.api.SmartProxy().search( query={'search': f'url = {session_puppet_enabled_sat.url}:9090'} @@ -495,22 +489,21 @@ def test_positive_update_realm(self, module_org, module_location, target_sat): :expectedresults: A hostgroup is updated with expected realm - :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() - new_realm = entities.Realm( + new_realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() @@ -528,7 +521,6 @@ def test_positive_update_puppet_proxy(self, puppet_hostgroup, session_puppet_ena :expectedresults: A hostgroup is updated with expected puppet proxy - :CaseLevel: Integration """ new_proxy = session_puppet_enabled_sat.api.SmartProxy().search( query={'search': f'url = {session_puppet_enabled_sat.url}:9090'} @@ -547,9 +539,8 @@ def test_positive_update_content_source(self, hostgroup, target_sat): :expectedresults: A hostgroup is updated with expected puppet proxy - :CaseLevel: Integration """ - new_content_source = entities.SmartProxy().search( + new_content_source = target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0] hostgroup.content_source = new_content_source @@ -557,7 +548,7 @@ def test_positive_update_content_source(self, hostgroup, target_sat): assert hostgroup.content_source.read().name == new_content_source.name @pytest.mark.tier2 - def test_positive_update_locs(self, module_org, hostgroup): + def test_positive_update_locs(self, module_org, hostgroup, module_target_sat): """Update a hostgroup with new multiple locations :id: b045f7e8-d7c0-428b-a29c-8d54e53742e2 @@ -566,17 +557,17 @@ def test_positive_update_locs(self, module_org, hostgroup): :expectedresults: A hostgroup is updated with expected locations - :CaseLevel: Integration """ new_locs = [ - entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5)) + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) ] hostgroup.location = new_locs hostgroup = hostgroup.update(['location']) assert {loc.name for loc in new_locs}, {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_update_orgs(self, hostgroup): + def test_positive_update_orgs(self, hostgroup, target_sat): """Update a hostgroup with new multiple organizations :id: 5f6bd4f9-4bd6-4d7e-9a91-de824299020e @@ -585,16 +576,15 @@ def test_positive_update_orgs(self, hostgroup): :expectedresults: A hostgroup is updated with expected organizations - :CaseLevel: Integration """ - new_orgs = [entities.Organization().create() for _ in range(randint(3, 5))] + new_orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] hostgroup.organization = new_orgs hostgroup = hostgroup.update(['organization']) assert {org.name for org in new_orgs} == {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name, module_org, module_location): + def test_negative_create_with_name(self, name, module_org, module_location, module_target_sat): """Attempt to create a hostgroup with invalid names :id: 3f5aa17a-8db9-4fe9-b309-b8ec5e739da1 @@ -606,7 +596,7 @@ def test_negative_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostGroup( + module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() @@ -630,7 +620,7 @@ def test_negative_update_name(self, new_name, hostgroup): assert hostgroup.read().name == original_name @pytest.mark.tier2 - def test_positive_create_with_group_parameters(self, module_org): + def test_positive_create_with_group_parameters(self, module_org, module_target_sat): """Create a hostgroup with 'group parameters' specified :id: 0959e2a2-d635-482b-9b2e-d33990d6f0dc @@ -641,12 +631,10 @@ def test_positive_create_with_group_parameters(self, module_org): :customerscenario: true - :CaseLevel: Integration - :BZ: 1710853 """ group_params = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( organization=[module_org], group_parameters_attributes=[group_params] ).create() assert group_params['name'] == hostgroup.group_parameters_attributes[0]['name'] @@ -673,7 +661,6 @@ def test_positive_get_content_source(self, hostgroup, module_target_sat): :expectedresults: The response contains both values for the ``content_source`` field. - :CaseLevel: Integration """ names = module_target_sat.api_factory.one_to_one_names('content_source') hostgroup_attrs = set(hostgroup.read_json().keys()) @@ -692,7 +679,6 @@ def test_positive_get_cv(self, hostgroup, module_target_sat): :expectedresults: The response contains both values for the ``content_view`` field. - :CaseLevel: Integration """ names = module_target_sat.api_factory.one_to_one_names('content_view') hostgroup_attrs = set(hostgroup.read_json().keys()) @@ -711,7 +697,6 @@ def test_positive_get_lce(self, hostgroup, module_target_sat): :expectedresults: The response contains both values for the ``lifecycle_environment`` field. - :CaseLevel: Integration """ names = module_target_sat.api_factory.one_to_one_names('lifecycle_environment') hostgroup_attrs = set(hostgroup.read_json().keys()) @@ -732,7 +717,6 @@ def test_positive_read_puppet_proxy_name(self, session_puppet_enabled_sat): :BZ: 1371900 - :CaseLevel: Integration """ proxy = session_puppet_enabled_sat.api.SmartProxy().search( query={'search': f'url = {session_puppet_enabled_sat.url}:9090'} @@ -754,7 +738,6 @@ def test_positive_read_puppet_ca_proxy_name(self, session_puppet_enabled_sat): :BZ: 1371900 - :CaseLevel: Integration """ proxy = session_puppet_enabled_sat.api.SmartProxy().search( query={'search': f'url = {session_puppet_enabled_sat.url}:9090'} diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index 34d398b25f6..51ceb485361 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -2,26 +2,25 @@ :Requirement: HttpProxy -:CaseLevel: Acceptance - -:CaseComponent: Repositories +:CaseComponent: HTTPProxy :team: Phoenix-content -:TestType: Functional - :CaseImportance: High :CaseAutomation: Automated -:Upstream: No """ -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from robottelo import constants from robottelo.config import settings +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, +) +from robottelo.constants.repos import ANSIBLE_GALAXY, CUSTOM_FILE_REPO @pytest.mark.tier2 @@ -30,27 +29,58 @@ @pytest.mark.run_in_one_thread @pytest.mark.parametrize( 'setup_http_proxy', - [None, True, False], + [True, False], + indirect=True, + ids=['auth_http_proxy', 'unauth_http_proxy'], +) +@pytest.mark.parametrize( + 'module_repos_collection_with_manifest', + [ + { + 'distro': 'rhel7', + 'RHELAnsibleEngineRepository': {'cdn': True}, + 'YumRepository': {'url': settings.repos.module_stream_1.url}, + 'FileRepository': {'url': CUSTOM_FILE_REPO}, + 'DockerRepository': { + 'url': CONTAINER_REGISTRY_HUB, + 'upstream_name': CONTAINER_UPSTREAM_NAME, + }, + 'AnsibleRepository': { + 'url': ANSIBLE_GALAXY, + 'requirements': [ + {'name': 'theforeman.foreman', 'version': '2.1.0'}, + {'name': 'theforeman.operations', 'version': '0.1.0'}, + ], + }, + } + ], indirect=True, - ids=['no_http_proxy', 'auth_http_proxy', 'unauth_http_proxy'], ) -def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifest_org): +def test_positive_end_to_end( + setup_http_proxy, module_target_sat, module_org, module_repos_collection_with_manifest +): """End-to-end test for HTTP Proxy related scenarios. :id: 38df5479-9127-49f3-a30e-26b33655971a :customerscenario: true - :steps: - 1. Set Http proxy settings for Satellite. - 2. Enable and sync redhat repository. - 3. Assign Http Proxy to custom repository and perform repo sync. - 4. Discover yum type repo. - 5. Discover docker type repo. + :setup: + 1. Create HTTP proxy entity at the Satellite. + 2. Create RH yum repository. + 3. Create custom repo of each content type (yum, file, docker, ansible collection). - :expectedresults: HTTP Proxy works with other satellite components. + :steps: + 1. Set immediate download policy where applicable for complete sync testing. + 2. For each repo set global default HTTP proxy and sync it. + 3. For each repo set specific HTTP proxy and sync it. + 4. For each repo set no HTTP proxy and sync it. + 5. Discover yum type repo through HTTP proxy. + 6. Discover docker type repo through HTTP proxy. - :Team: Phoenix-content + :expectedresults: + 1. All repository updates and syncs succeed. + 2. Yum and docker repos can be discovered through HTTP proxy. :BZ: 2011303, 2042473, 2046337 @@ -58,57 +88,36 @@ def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifes :CaseImportance: Critical """ - http_proxy, http_proxy_type = setup_http_proxy - http_proxy_id = http_proxy.id if http_proxy_type is not None else None - http_proxy_policy = 'use_selected_http_proxy' if http_proxy_type is not None else 'none' - # Assign http_proxy to Redhat repository and perform repository sync. - rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch=constants.DEFAULT_ARCHITECTURE, - org_id=module_manifest_org.id, - product=constants.PRDS['rhae'], - repo=constants.REPOS['rhae2']['name'], - reposet=constants.REPOSET['rhae2'], - releasever=None, - ) - rh_repo = module_target_sat.api.Repository( - id=rh_repo_id, - http_proxy_policy=http_proxy_policy, - http_proxy_id=http_proxy_id, - download_policy='immediate', - ).update() - assert rh_repo.http_proxy_policy == http_proxy_policy - assert rh_repo.http_proxy_id == http_proxy_id - assert rh_repo.download_policy == 'immediate' - rh_repo.sync() - assert rh_repo.read().content_counts['rpm'] >= 1 - - # Assign http_proxy to Repositories and perform repository sync. - repo_options = { - 'http_proxy_policy': http_proxy_policy, - 'http_proxy_id': http_proxy_id, - } - repo = module_target_sat.api.Repository(**repo_options).create() - - assert repo.http_proxy_policy == http_proxy_policy - assert repo.http_proxy_id == http_proxy_id - repo.sync() - assert repo.read().content_counts['rpm'] >= 1 - - # Use global_default_http_proxy - repo_options['http_proxy_policy'] = 'global_default_http_proxy' - repo_2 = module_target_sat.api.Repository(**repo_options).create() - assert repo_2.http_proxy_policy == 'global_default_http_proxy' - - # Update to selected_http_proxy - repo_2.http_proxy_policy = 'none' - repo_2.update(['http_proxy_policy']) - assert repo_2.http_proxy_policy == 'none' - - # test scenario for yum type repo discovery. + # Set immediate download policy where applicable for complete sync testing + for repo in module_repos_collection_with_manifest.repos_info: + if repo['content-type'] in ['yum', 'docker']: + module_target_sat.api.Repository(id=repo['id'], download_policy='immediate').update() + + # For each repo set global/specific/no HTTP proxy and sync it + for policy in ['global_default_http_proxy', 'use_selected_http_proxy', 'none']: + for repo in module_repos_collection_with_manifest.repos_info: + repo = module_target_sat.api.Repository( + id=repo['id'], + http_proxy_policy=policy, + http_proxy_id=setup_http_proxy[0].id if 'selected' in policy else None, + ).update() + assert ( + repo.http_proxy_policy == policy + ), f'Policy update failed for {repo.content_type} repo with {policy} HTTP policy' + assert ( + repo.http_proxy_id == setup_http_proxy[0].id + if 'selected' in policy + else repo.http_proxy_id is None + ), f'Proxy id update failed for {repo.content_type} repo with {policy} HTTP policy' + assert ( + 'success' in module_target_sat.api.Repository(id=repo.id).sync()['result'] + ), f'Sync of a {repo.content_type} repo with {policy} HTTP policy failed' + + # Discover yum type repo through HTTP proxy repo_name = 'fakerepo01' - yum_repo = module_target_sat.api.Organization(id=module_manifest_org.id).repo_discover( + yum_repo = module_target_sat.api.Organization(id=module_org.id).repo_discover( data={ - "id": module_manifest_org.id, + "id": module_org.id, "url": settings.repos.repo_discovery.url, "content_type": "yum", } @@ -116,17 +125,17 @@ def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifes assert len(yum_repo['output']) == 1 assert yum_repo['output'][0] == f'{settings.repos.repo_discovery.url}/{repo_name}/' - # test scenario for docker type repo discovery. - yum_repo = module_target_sat.api.Organization(id=module_manifest_org.id).repo_discover( + # Discover docker type repo through HTTP proxy + docker_repo = module_target_sat.api.Organization(id=module_org.id).repo_discover( data={ - "id": module_manifest_org.id, + "id": module_org.id, "url": 'quay.io', "content_type": "docker", - "search": 'quay/busybox', + "search": 'foreman/foreman', } ) - assert len(yum_repo['output']) >= 1 - assert 'quay/busybox' in yum_repo['output'] + assert len(docker_repo['output']) > 0 + assert docker_repo['result'] == 'success' @pytest.mark.upgrade @@ -156,8 +165,6 @@ def test_positive_auto_attach_with_http_proxy( :BZ: 2046337 :parametrized: yes - - :CaseLevel: System """ org = function_entitlement_manifest_org lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() @@ -206,63 +213,66 @@ def test_positive_auto_attach_with_http_proxy( @pytest.mark.e2e @pytest.mark.tier2 -def test_positive_assign_http_proxy_to_products(): +def test_positive_assign_http_proxy_to_products(target_sat, function_org): """Assign http_proxy to Products and check whether http-proxy is used during sync. :id: c9d23aa1-3325-4abd-a1a6-d5e75c12b08a - :expectedresults: HTTP Proxy is assigned to all repos present - in Products and sync operation uses assigned http-proxy. + :setup: + 1. Create an Organization. - :Team: Phoenix-content - - :CaseImportance: Critical + :steps: + 1. Create two HTTP proxies. + 2. Create two products and two repos in each product with various HTTP proxy policies. + 3. Set the HTTP proxy through bulk action for both products. + 4. Bulk sync one product. + + :expectedresults: + 1. HTTP Proxy is assigned to all repos present in Products + and sync operation uses assigned http-proxy and pass. """ - org = entities.Organization().create() - # create HTTP proxies - http_proxy_a = entities.HTTPProxy( + # Create two HTTP proxies + http_proxy_a = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.un_auth_proxy_url, - organization=[org], + organization=[function_org], ).create() - - http_proxy_b = entities.HTTPProxy( + http_proxy_b = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.auth_proxy_url, username=settings.http_proxy.username, password=settings.http_proxy.password, - organization=[org], + organization=[function_org], ).create() - # Create products and repositories - product_a = entities.Product(organization=org).create() - product_b = entities.Product(organization=org).create() - repo_a1 = entities.Repository(product=product_a, http_proxy_policy='none').create() - repo_a2 = entities.Repository( + # Create two products and two repos in each product with various HTTP proxy policies + product_a = target_sat.api.Product(organization=function_org).create() + product_b = target_sat.api.Product(organization=function_org).create() + repo_a1 = target_sat.api.Repository(product=product_a, http_proxy_policy='none').create() + repo_a2 = target_sat.api.Repository( product=product_a, http_proxy_policy='use_selected_http_proxy', http_proxy_id=http_proxy_a.id, ).create() - repo_b1 = entities.Repository(product=product_b, http_proxy_policy='none').create() - repo_b2 = entities.Repository( + repo_b1 = target_sat.api.Repository(product=product_b, http_proxy_policy='none').create() + repo_b2 = target_sat.api.Repository( product=product_b, http_proxy_policy='global_default_http_proxy' ).create() - # Add http_proxy to products - entities.ProductBulkAction().http_proxy( + + # Set the HTTP proxy through bulk action for both products + target_sat.api.ProductBulkAction().http_proxy( data={ "ids": [product_a.id, product_b.id], "http_proxy_policy": "use_selected_http_proxy", "http_proxy_id": http_proxy_b.id, } ) - for repo in repo_a1, repo_a2, repo_b1, repo_b2: r = repo.read() assert r.http_proxy_policy == "use_selected_http_proxy" assert r.http_proxy_id == http_proxy_b.id - - product_a.sync({'async': True}) + assert 'success' in product_a.sync()['result'], 'Product sync failed' @pytest.mark.tier2 @@ -285,6 +295,11 @@ def test_positive_sync_proxy_with_certificate(request, target_sat, module_org, m :customerscenario: true """ + + @request.addfinalizer + def _finalize(): + target_sat.custom_certs_cleanup() + # Cleanup any existing certs that may conflict target_sat.custom_certs_cleanup() proxy_host = settings.http_proxy.auth_proxy_url.replace('http://', '').replace(':3128', '') @@ -292,8 +307,13 @@ def test_positive_sync_proxy_with_certificate(request, target_sat, module_org, m # Create and fetch new cerfiticate target_sat.custom_cert_generate(proxy_host) + + @request.addfinalizer + def _finalize(): + target_sat.custom_certs_cleanup() + cacert = target_sat.execute(f'cat {cacert_path}').stdout - assert 'BEGIN CERTIFICATE' and 'END CERTIFICATE' in cacert + assert 'END CERTIFICATE' in cacert # Create http-proxy and repository http_proxy = target_sat.api.HTTPProxy( @@ -313,13 +333,8 @@ def test_positive_sync_proxy_with_certificate(request, target_sat, module_org, m assert repo.http_proxy_policy == 'use_selected_http_proxy' assert repo.http_proxy_id == http_proxy.id - assert http_proxy.cacert == cacert_path response = repo.sync() assert response.get('errors') is None assert repo.read().last_sync is not None assert repo.read().content_counts['rpm'] >= 1 - - @request.addfinalizer - def _finalize(): - target_sat.custom_certs_cleanup() diff --git a/tests/foreman/api/test_ldapauthsource.py b/tests/foreman/api/test_ldapauthsource.py index e896397c4eb..885d290e741 100644 --- a/tests/foreman/api/test_ldapauthsource.py +++ b/tests/foreman/api/test_ldapauthsource.py @@ -4,31 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE +from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE from robottelo.utils.datafactory import generate_strings_list @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.parametrize('auth_source_type', ['AD', 'IPA']) -def test_positive_endtoend(auth_source_type, module_org, module_location, ad_data, ipa_data): +def test_positive_endtoend( + auth_source_type, module_org, module_location, ad_data, ipa_data, module_target_sat +): """Create/update/delete LDAP authentication with AD using names of different types :id: e3607c97-7c48-4cf6-b119-2bfd895d9325 @@ -47,7 +42,7 @@ def test_positive_endtoend(auth_source_type, module_org, module_location, ad_dat auth_source_data = ipa_data auth_source_data['ldap_user_name'] = auth_source_data['ldap_user_cn'] auth_type_attr = LDAP_ATTR['login'] - authsource = entities.AuthSourceLDAP( + authsource = module_target_sat.api.AuthSourceLDAP( onthefly_register=True, account=auth_source_data['ldap_user_cn'], account_password=auth_source_data['ldap_user_passwd'], diff --git a/tests/foreman/api/test_lifecycleenvironment.py b/tests/foreman/api/test_lifecycleenvironment.py index 57effa04b1a..b68017efb87 100644 --- a/tests/foreman/api/test_lifecycleenvironment.py +++ b/tests/foreman/api/test_lifecycleenvironment.py @@ -8,44 +8,40 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: LifecycleEnvironments :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests.exceptions import HTTPError from robottelo.constants import ENVIRONMENT -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) @pytest.fixture(scope='module') -def module_lce(module_org): - return entities.LifecycleEnvironment( +def module_lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment( organization=module_org, description=gen_string('alpha') ).create() @pytest.fixture -def lce(module_org): - return entities.LifecycleEnvironment(organization=module_org).create() +def lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment(organization=module_org).create() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create lifecycle environment with valid name only :id: ec1d985a-6a39-4de6-b635-c803ecedd832 @@ -56,12 +52,12 @@ def test_positive_create_with_name(name): :parametrized: yes """ - assert entities.LifecycleEnvironment(name=name).create().name == name + assert target_sat.api.LifecycleEnvironment(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create lifecycle environment with valid description :id: 0bc05510-afc7-4087-ab75-1065ab5ba1d3 @@ -73,11 +69,11 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - assert entities.LifecycleEnvironment(description=desc).create().description == desc + assert target_sat.api.LifecycleEnvironment(description=desc).create().description == desc @pytest.mark.tier1 -def test_positive_create_prior(module_org): +def test_positive_create_prior(module_org, module_target_sat): """Create a lifecycle environment with valid name with Library as prior @@ -88,13 +84,13 @@ def test_positive_create_prior(module_org): :CaseImportance: Critical """ - lc_env = entities.LifecycleEnvironment(organization=module_org).create() + lc_env = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() assert lc_env.prior.read().name == ENVIRONMENT @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier3 -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create lifecycle environment providing an invalid name :id: 7e8ea2e6-5927-4e86-8ea8-04c3feb524a6 @@ -106,12 +102,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.LifecycleEnvironment(name=name).create() + target_sat.api.LifecycleEnvironment(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_lce, new_name): +def test_positive_update_name(module_lce, new_name, module_target_sat): """Create lifecycle environment providing the initial name, then update its name to another valid name. @@ -123,13 +119,13 @@ def test_positive_update_name(module_lce, new_name): """ module_lce.name = new_name module_lce.update(['name']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_lce, new_desc): +def test_positive_update_description(module_lce, new_desc, module_target_sat): """Create lifecycle environment providing the initial description, then update its description to another one. @@ -137,15 +133,13 @@ def test_positive_update_description(module_lce, new_desc): :expectedresults: Lifecycle environment is created and updated properly - :CaseLevel: Integration - :CaseImportance: Low :parametrized: yes """ module_lce.description = new_desc module_lce.update(['description']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_desc == updated.description @@ -163,8 +157,8 @@ def test_negative_update_name(module_lce, new_name): :parametrized: yes """ + module_lce.name = new_name with pytest.raises(HTTPError): - module_lce.name = new_name module_lce.update(['name']) lce = module_lce.read() assert lce.name != new_name @@ -173,7 +167,7 @@ def test_negative_update_name(module_lce, new_name): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete(lce, name): +def test_positive_delete(lce, name, target_sat): """Create lifecycle environment and then delete it. :id: cd5a97ca-c1e8-41c7-8d6b-f908916b24e1 @@ -186,17 +180,17 @@ def test_positive_delete(lce, name): """ lce.delete() with pytest.raises(HTTPError): - entities.LifecycleEnvironment(id=lce.id).read() + target_sat.api.LifecycleEnvironment(id=lce.id).read() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_search_in_org(name): +def test_positive_search_in_org(name, target_sat): """Search for a lifecycle environment and specify an org ID. :id: 110e4777-c374-4365-b676-b1db4552fe51 - :Steps: + :steps: 1. Create an organization. 2. Create a lifecycle environment belonging to the organization. @@ -205,12 +199,10 @@ def test_positive_search_in_org(name): :expectedresults: Only "Library" and the lifecycle environment just created are in the search results. - :CaseLevel: Integration - :parametrized: yes """ - new_org = entities.Organization().create() - lc_env = entities.LifecycleEnvironment(organization=new_org).create() + new_org = target_sat.api.Organization().create() + lc_env = target_sat.api.LifecycleEnvironment(organization=new_org).create() lc_envs = lc_env.search({'organization'}) assert len(lc_envs) == 2 assert {lc_env_.name for lc_env_ in lc_envs}, {'Library', lc_env.name} @@ -231,11 +223,9 @@ def test_positive_create_environment_after_host_register(): 2. Create a new content host. 3. Register the content host to the Library environment. - :Steps: Create a new environment. + :steps: Create a new environment. :expectedresults: The environment is created without any errors. - :CaseLevel: Integration - :CaseAutomation: NotAutomated """ diff --git a/tests/foreman/api/test_location.py b/tests/foreman/api/test_location.py index 22546cd06cd..2e54a4862fa 100644 --- a/tests/foreman/api/test_location.py +++ b/tests/foreman/api/test_location.py @@ -7,29 +7,25 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: OrganizationsandLocations :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import randint +from fauxfactory import gen_integer, gen_string import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string from requests.exceptions import HTTPError from robottelo.constants import DEFAULT_LOC -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, +) @filtered_datapoint @@ -39,7 +35,6 @@ def valid_loc_data_list(): Note: The maximum allowed length of location name is 246 only. This is an intended behavior (Also note that 255 is the standard across other entities.) - """ return dict( alpha=gen_string('alpha', randint(1, 246)), @@ -126,7 +121,6 @@ def test_positive_create_and_update_with_org(self, make_orgs, target_sat): :expectedresults: Location created successfully and has correct organization assigned to it with expected title - :CaseLevel: Integration """ location = target_sat.api.Location(organization=[make_orgs['org']]).create() assert location.organization[0].id == make_orgs['org'].id @@ -207,7 +201,6 @@ def test_positive_update_entities(self, make_entities, target_sat): :expectedresults: Location updated successfully and has correct domain assigned - :CaseLevel: Integration """ location = target_sat.api.Location().create() @@ -246,8 +239,6 @@ def test_positive_create_update_and_remove_capsule(self, make_proxies, target_sa :BZ: 1398695 - :CaseLevel: Integration - :CaseImportance: High """ proxy_id_1 = make_proxies['proxy1']['id'] @@ -275,7 +266,6 @@ def test_negative_update_domain(self, target_sat): :expectedresults: Location is not updated - :CaseLevel: Integration """ location = target_sat.api.Location(domain=[target_sat.api.Domain().create()]).create() domain = target_sat.api.Domain().create() diff --git a/tests/foreman/api/test_media.py b/tests/foreman/api/test_media.py index 1f2abbbdefe..79c5cb7f20e 100644 --- a/tests/foreman/api/test_media.py +++ b/tests/foreman/api/test_media.py @@ -4,46 +4,43 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random +from fauxfactory import gen_string, gen_url import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url -from nailgun import entities from requests.exceptions import HTTPError from robottelo.constants import OPERATING_SYSTEMS -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) class TestMedia: """Tests for ``api/v2/media``.""" @pytest.fixture(scope='class') - def class_media(self, module_org): - return entities.Media(organization=[module_org]).create() + def class_media(self, module_org, class_target_sat): + return class_target_sat.api.Media(organization=[module_org]).create() @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', - **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) + ('name', 'new_name'), + **parametrized( + list(zip(valid_data_list().values(), valid_data_list().values(), strict=True)) + ) ) - def test_positive_crud_with_name(self, module_org, name, new_name): + def test_positive_crud_with_name(self, module_org, name, new_name, module_target_sat): """Create, update, delete media with valid name only :id: b07a4549-7dd5-4b36-a1b4-9f8d48ddfcb5 @@ -54,9 +51,9 @@ def test_positive_crud_with_name(self, module_org, name, new_name): :CaseImportance: Critical """ - media = entities.Media(organization=[module_org], name=name).create() + media = module_target_sat.api.Media(organization=[module_org], name=name).create() assert media.name == name - media = entities.Media(id=media.id, name=new_name).update(['name']) + media = module_target_sat.api.Media(id=media.id, name=new_name).update(['name']) assert media.name == new_name media.delete() with pytest.raises(HTTPError): @@ -64,7 +61,7 @@ def test_positive_crud_with_name(self, module_org, name, new_name): @pytest.mark.tier1 @pytest.mark.parametrize('os_family', **parametrized(OPERATING_SYSTEMS)) - def test_positive_create_update_with_os_family(self, module_org, os_family): + def test_positive_create_update_with_os_family(self, module_org, os_family, module_target_sat): """Create and update media with every OS family possible :id: d02404f0-b2ad-412c-b1cd-0548254f7c88 @@ -74,41 +71,43 @@ def test_positive_create_update_with_os_family(self, module_org, os_family): :expectedresults: Media entity is created and has proper OS family assigned """ - media = entities.Media(organization=[module_org], os_family=os_family).create() + media = module_target_sat.api.Media(organization=[module_org], os_family=os_family).create() assert media.os_family == os_family new_os_family = new_os_family = random.choice(OPERATING_SYSTEMS) media.os_family = new_os_family assert media.update(['os_family']).os_family == new_os_family @pytest.mark.tier2 - def test_positive_create_with_location(self, module_org, module_location): + def test_positive_create_with_location(self, module_org, module_location, module_target_sat): """Create media entity assigned to non-default location :id: 1c4fa736-c145-46ca-9feb-c4046fc778c6 :expectedresults: Media entity is created and has proper location - :CaseLevel: Integration """ - media = entities.Media(organization=[module_org], location=[module_location]).create() + media = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() assert media.location[0].read().name == module_location.name @pytest.mark.tier2 - def test_positive_create_with_os(self, module_org): + def test_positive_create_with_os(self, module_org, module_target_sat): """Create media entity assigned to operation system entity :id: dec22198-ed07-480c-9306-fa5458baec0b :expectedresults: Media entity is created and assigned to expected OS - :CaseLevel: Integration """ - os = entities.OperatingSystem().create() - media = entities.Media(organization=[module_org], operatingsystem=[os]).create() + os = module_target_sat.api.OperatingSystem().create() + media = module_target_sat.api.Media( + organization=[module_org], operatingsystem=[os] + ).create() assert os.read().medium[0].read().name == media.name @pytest.mark.tier2 - def test_positive_create_update_url(self, module_org): + def test_positive_create_update_url(self, module_org, module_target_sat): """Create media entity providing the initial url path, then update that url to another valid one. @@ -119,15 +118,15 @@ def test_positive_create_update_url(self, module_org): :CaseImportance: Medium """ url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(organization=[module_org], path_=url).create() + media = module_target_sat.api.Media(organization=[module_org], path_=url).create() assert media.path_ == url new_url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(id=media.id, path_=new_url).update(['path_']) + media = module_target_sat.api.Media(id=media.id, path_=new_url).update(['path_']) assert media.path_ == new_url @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Try to create media entity providing an invalid name :id: 0934f4dc-f674-40fe-a639-035761139c83 @@ -139,10 +138,10 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(name=name).create() + target_sat.api.Media(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_invalid_url(self): + def test_negative_create_with_invalid_url(self, target_sat): """Try to create media entity providing an invalid URL :id: ae00b6bb-37ed-459e-b9f7-acc92ed0b262 @@ -152,10 +151,10 @@ def test_negative_create_with_invalid_url(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(path_='NON_EXISTENT_URL').create() + target_sat.api.Media(path_='NON_EXISTENT_URL').create() @pytest.mark.tier1 - def test_negative_create_with_invalid_os_family(self): + def test_negative_create_with_invalid_os_family(self, target_sat): """Try to create media entity providing an invalid OS family :id: 368b7eac-8c52-4071-89c0-1946d7101291 @@ -165,11 +164,11 @@ def test_negative_create_with_invalid_os_family(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(os_family='NON_EXISTENT_OS').create() + target_sat.api.Media(os_family='NON_EXISTENT_OS').create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, module_org, class_media, new_name): + def test_negative_update_name(self, class_media, new_name, target_sat): """Create media entity providing the initial name, then try to update its name to invalid one. @@ -182,10 +181,10 @@ def test_negative_update_name(self, module_org, class_media, new_name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, name=new_name).update(['name']) + target_sat.api.Media(id=class_media.id, name=new_name).update(['name']) @pytest.mark.tier1 - def test_negative_update_url(self, module_org, class_media): + def test_negative_update_url(self, class_media, target_sat): """Try to update media with invalid url. :id: 6832f178-4adc-4bb1-957d-0d8d4fd8d9cd @@ -195,10 +194,10 @@ def test_negative_update_url(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) + target_sat.api.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) @pytest.mark.tier1 - def test_negative_update_os_family(self, module_org, class_media): + def test_negative_update_os_family(self, class_media, target_sat): """Try to update media with invalid operation system. :id: f4c5438d-5f98-40b1-9bc7-c0741e81303a @@ -208,4 +207,6 @@ def test_negative_update_os_family(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update(['os_family']) + target_sat.api.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update( + ['os_family'] + ) diff --git a/tests/foreman/api/test_multiple_paths.py b/tests/foreman/api/test_multiple_paths.py index bf975ce7f58..8c683ca309a 100644 --- a/tests/foreman/api/test_multiple_paths.py +++ b/tests/foreman/api/test_multiple_paths.py @@ -4,31 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: API :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import http +from nailgun import client, entities, entity_fields import pytest -from nailgun import client -from nailgun import entities -from nailgun import entity_fields -from robottelo.config import get_credentials -from robottelo.config import user_nailgun_config +from robottelo.config import get_credentials, user_nailgun_config from robottelo.logging import logger from robottelo.utils.datafactory import parametrized - VALID_ENTITIES = { entities.ActivationKey, entities.Architecture, @@ -92,7 +83,7 @@ def _get_readable_attributes(entity): for field_name in list(attributes.keys()): if isinstance( entity.get_fields()[field_name], - (entity_fields.OneToOneField, entity_fields.OneToManyField), + entity_fields.OneToOneField | entity_fields.OneToManyField, ): del attributes[field_name] @@ -139,7 +130,7 @@ def test_positive_get_status_code(self, entity_cls): logger.info('test_get_status_code arg: %s', entity_cls) response = client.get(entity_cls().path(), auth=get_credentials(), verify=False) response.raise_for_status() - assert http.client.OK == response.status_code + assert response.status_code == http.client.OK assert 'application/json' in response.headers['content-type'] @pytest.mark.tier1 @@ -160,7 +151,7 @@ def test_negative_get_unauthorized(self, entity_cls): """ logger.info('test_get_unauthorized arg: %s', entity_cls) response = client.get(entity_cls().path(), auth=(), verify=False) - assert http.client.UNAUTHORIZED == response.status_code + assert response.status_code == http.client.UNAUTHORIZED @pytest.mark.tier3 @pytest.mark.parametrize( @@ -182,7 +173,7 @@ def test_positive_post_status_code(self, entity_cls): :BZ: 1118015 """ response = entity_cls().create_raw() - assert http.client.CREATED == response.status_code + assert response.status_code == http.client.CREATED assert 'application/json' in response.headers['content-type'] @pytest.mark.tier1 @@ -204,7 +195,7 @@ def test_negative_post_unauthorized(self, entity_cls): """ server_cfg = user_nailgun_config() return_code = entity_cls(server_cfg).create_raw(create_missing=False).status_code - assert http.client.UNAUTHORIZED == return_code + assert return_code == http.client.UNAUTHORIZED class TestEntityId: @@ -226,7 +217,7 @@ def test_positive_get_status_code(self, entity_cls): """ entity = entity_cls(id=entity_cls().create_json()['id']) response = entity.read_raw() - assert http.client.OK == response.status_code + assert response.status_code == http.client.OK assert 'application/json' in response.headers['content-type'] @pytest.mark.tier1 @@ -257,7 +248,7 @@ def test_positive_put_status_code(self, entity_cls): auth=get_credentials(), verify=False, ) - assert http.client.OK == response.status_code + assert response.status_code == http.client.OK assert 'application/json' in response.headers['content-type'] @pytest.mark.tier1 @@ -334,7 +325,7 @@ def test_positive_put_and_get_requests(self, entity_cls): payload = _get_readable_attributes(new_entity) entity_attrs = entity_cls(id=entity['id']).read_json() for key, value in payload.items(): - assert key in entity_attrs.keys() + assert key in entity_attrs assert value == entity_attrs[key] @pytest.mark.tier1 @@ -358,7 +349,7 @@ def test_positive_post_and_get_requests(self, entity_cls): payload = _get_readable_attributes(entity) entity_attrs = entity_cls(id=entity_id).read_json() for key, value in payload.items(): - assert key in entity_attrs.keys() + assert key in entity_attrs assert value == entity_attrs[key] @pytest.mark.tier1 @@ -378,7 +369,7 @@ def test_positive_delete_and_get_requests(self, entity_cls): # Create an entity, delete it and get it. entity = entity_cls(id=entity_cls().create_json()['id']) entity.delete() - assert http.client.NOT_FOUND == entity.read_raw().status_code + assert entity.read_raw().status_code == http.client.NOT_FOUND class TestEntityRead: @@ -415,7 +406,7 @@ def test_positive_entity_read(self, entity_cls): assert isinstance(entity_cls(id=entity_id).read(), entity_cls) @pytest.mark.tier1 - def test_positive_architecture_read(self): + def test_positive_architecture_read(self, target_sat): """Create an arch that points to an OS, and read the arch. :id: e4c7babe-11d8-4f85-8382-5267a49046e9 @@ -425,14 +416,14 @@ def test_positive_architecture_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - arch_id = entities.Architecture(operatingsystem=[os_id]).create_json()['id'] - architecture = entities.Architecture(id=arch_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + arch_id = target_sat.api.Architecture(operatingsystem=[os_id]).create_json()['id'] + architecture = target_sat.api.Architecture(id=arch_id).read() assert len(architecture.operatingsystem) == 1 assert architecture.operatingsystem[0].id == os_id @pytest.mark.tier1 - def test_positive_syncplan_read(self): + def test_positive_syncplan_read(self, target_sat): """Create a SyncPlan and read it back using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -443,14 +434,14 @@ def test_positive_syncplan_read(self): :CaseImportance: Critical """ - org_id = entities.Organization().create_json()['id'] - syncplan_id = entities.SyncPlan(organization=org_id).create_json()['id'] + org_id = target_sat.api.Organization().create_json()['id'] + syncplan_id = target_sat.api.SyncPlan(organization=org_id).create_json()['id'] assert isinstance( - entities.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan + target_sat.api.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan ) @pytest.mark.tier1 - def test_positive_osparameter_read(self): + def test_positive_osparameter_read(self, target_sat): """Create an OperatingSystemParameter and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -461,15 +452,15 @@ def test_positive_osparameter_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - osp_id = entities.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] + os_id = target_sat.api.OperatingSystem().create_json()['id'] + osp_id = target_sat.api.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] assert isinstance( - entities.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), - entities.OperatingSystemParameter, + target_sat.api.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), + target_sat.api.OperatingSystemParameter, ) @pytest.mark.tier1 - def test_positive_permission_read(self): + def test_positive_permission_read(self, target_sat): """Create an Permission entity and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -480,12 +471,12 @@ class and name and resource_type fields are populated :CaseImportance: Critical """ - perm = entities.Permission().search(query={'per_page': '1'})[0] + perm = target_sat.api.Permission().search(query={'per_page': '1'})[0] assert perm.name assert perm.resource_type @pytest.mark.tier1 - def test_positive_media_read(self): + def test_positive_media_read(self, target_sat): """Create a media pointing at an OS and read the media. :id: 67b656fe-9302-457a-b544-3addb11c85e0 @@ -494,8 +485,8 @@ def test_positive_media_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - media_id = entities.Media(operatingsystem=[os_id]).create_json()['id'] - media = entities.Media(id=media_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + media_id = target_sat.api.Media(operatingsystem=[os_id]).create_json()['id'] + media = target_sat.api.Media(id=media_id).read() assert len(media.operatingsystem) == 1 assert media.operatingsystem[0].id == os_id diff --git a/tests/foreman/api/test_notifications.py b/tests/foreman/api/test_notifications.py new file mode 100644 index 00000000000..c6fd2f3e5dc --- /dev/null +++ b/tests/foreman/api/test_notifications.py @@ -0,0 +1,258 @@ +"""Test class for Notifications API + +:Requirement: Notifications + +:CaseAutomation: Automated + +:CaseComponent: Notifications + +:Team: Endeavour + +:CaseImportance: High + +""" +from mailbox import mbox +from re import findall +from tempfile import mkstemp + +from fauxfactory import gen_string +import pytest +from wait_for import TimedOutError, wait_for + +from robottelo.config import settings +from robottelo.constants import DEFAULT_LOC, DEFAULT_ORG + + +@pytest.fixture +def admin_user_with_localhost_email(target_sat): + """Admin user with e-mail set to `root@localhost`.""" + user = target_sat.api.User( + admin=True, + default_organization=DEFAULT_ORG, + default_location=DEFAULT_LOC, + description='created by nailgun', + login=gen_string("alphanumeric"), + password=gen_string("alphanumeric"), + mail='root@localhost', + ).create() + user.mail_enabled = True + user.update() + + yield user + + user.delete() + + +@pytest.fixture +def reschedule_long_running_tasks_notification(target_sat): + """Reschedule long-running tasks checker from midnight (default) to every minute. + Reset it back after the test. + """ + default_cron_schedule = '0 0 * * *' + every_minute_cron_schedule = '* * * * *' + + assert ( + target_sat.execute( + "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker " + f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{every_minute_cron_schedule}'" + ).status + == 0 + ) + + yield + + assert ( + target_sat.execute( + "foreman-rake foreman_tasks:reschedule_long_running_tasks_checker " + f"FOREMAN_TASKS_CHECK_LONG_RUNNING_TASKS_CRONLINE='{default_cron_schedule}'" + ).status + == 0 + ) + + +@pytest.fixture(autouse=True) +def start_postfix_service(target_sat): + """Start postfix service (disabled by default).""" + assert target_sat.execute('systemctl start postfix').status == 0 + + +@pytest.fixture +def clean_root_mailbox(target_sat): + """Backup & purge local mailbox of the Satellite's root@localhost user. + Restore it afterwards. + """ + root_mailbox = '/var/spool/mail/root' + root_mailbox_backup = f'{root_mailbox}-{gen_string("alphanumeric")}.bak' + target_sat.execute(f'cp -f {root_mailbox} {root_mailbox_backup}') + target_sat.execute(f'truncate -s 0 {root_mailbox}') + + yield root_mailbox + + target_sat.execute(f'mv -f {root_mailbox_backup} {root_mailbox}') + + +def wait_for_mail(sat_obj, mailbox_file, contains_string, timeout=300, delay=5): + """ + Wait until the desired string is found in the Satellite's mbox file. + """ + try: + wait_for( + func=sat_obj.execute, + func_args=[f"grep --quiet '{contains_string}' {mailbox_file}"], + fail_condition=lambda res: res.status != 0, + timeout=timeout, + delay=delay, + ) + except TimedOutError as err: + raise AssertionError( + f'No e-mail with text "{contains_string}" has arrived to mailbox {mailbox_file} ' + f'after {timeout} seconds.' + ) from err + return True + + +@pytest.fixture +def wait_for_long_running_task_mail(target_sat, clean_root_mailbox, long_running_task): + """Wait until the long-running task ID is found in the Satellite's mbox file.""" + return wait_for_mail( + sat_obj=target_sat, + mailbox_file=clean_root_mailbox, + contains_string=long_running_task["task"]["id"], + ) + + +@pytest.fixture +def root_mailbox_copy(target_sat, clean_root_mailbox): + """Parsed local system copy of the Satellite's root user mailbox. + + :returns: :class:`mailbox.mbox` instance + """ + result = target_sat.execute(f'cat {clean_root_mailbox}') + assert result.status == 0, f'Could not read mailbox {clean_root_mailbox} on Satellite host.' + mbox_content = result.stdout + _, local_mbox_file = mkstemp() + with open(local_mbox_file, 'w') as fh: + fh.writelines(mbox_content) + return mbox(path=local_mbox_file) + + +@pytest.fixture +def long_running_task(target_sat): + """Create an async task and set its start time and last report time to two days ago. + After the test finishes, the task is cancelled. + """ + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Run Command - Script Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'organization': DEFAULT_ORG, + 'location': DEFAULT_LOC, + 'inputs': { + 'command': 'sleep 300', + }, + 'targeting_type': 'static_query', + 'search_query': f'name = {target_sat.hostname}', + 'password': settings.server.ssh_password, + }, + ) + sql_date_2_days_ago = "now() - INTERVAL \'2 days\'" + result = target_sat.execute( + "su - postgres -c \"psql foreman postgres <= 9: + assert ( + provisioning_host.execute( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) + host_ssh_os = sat.execute( + f'sshpass -p {settings.provisioning.host_root_password} ' + 'ssh -o StrictHostKeyChecking=no -o PubkeyAuthentication=no -o PasswordAuthentication=yes ' + f'-o UserKnownHostsFile=/dev/null root@{provisioning_host.hostname} cat /etc/redhat-release' + ) + assert host_ssh_os.status == 0 assert ( - provisioning_host.os_version == expected_rhel_version + expected_rhel_version in host_ssh_os.stdout ), 'Different than the expected OS version was installed' + # Verify provisioning log exists on host at correct path + assert provisioning_host.execute('test -s /root/install.post.log').status == 0 + assert provisioning_host.execute('test -s /mnt/sysimage/root/install.post.log').status == 1 + # Run a command on the host using REX to verify that Satellite's SSH key is present on the host template_id = ( sat.api.JobTemplate().search(query={'search': 'name="Run Command - Script Default"'})[0].id @@ -152,646 +187,545 @@ def test_rhel_pxe_provisioning( assert provisioning_host.subscribed, 'Host is not subscribed' -class TestOperatingSystemParameter: - """Tests for operating system parameters.""" - - @pytest.mark.tier1 - @pytest.mark.upgrade - def test_verify_bugzilla_1114640(self, target_sat): - """Create a parameter for operating system 1. - - :id: e817ae43-226c-44e3-b559-62b8d394047b - - :expectedresults: A parameter is created and can be read afterwards. - - :CaseImportance: Critical - """ - # Check whether OS 1 exists. - os1 = target_sat.api.OperatingSystem(id=1).read_raw() - if os1.status_code == NOT_FOUND and target_sat.api.OperatingSystem().create().id != 1: - pytest.skip('Cannot execute test, as operating system 1 is not available.') - - # Create and read a parameter for operating system 1. The purpose of - # this test is to make sure an HTTP 422 is not returned, but we're also - # going to verify the name and value of the parameter, just for good - # measure. - name = gen_string('utf8') - value = gen_string('utf8') - os_param = target_sat.api.OperatingSystemParameter( - name=name, operatingsystem=1, value=value - ).create() - assert os_param.name == name - assert os_param.value == value - - -class TestOperatingSystem: - """Tests for operating systems.""" - - @pytest.mark.tier1 - @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name, target_sat): - """Create operating system with valid name only - - :id: e95707bf-3344-4d85-866f-4642a8f66cff - - :parametrized: yes - - :expectedresults: Operating system entity is created and has proper - name - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(name=name).create() - assert os.name == name - - @pytest.mark.tier1 - @pytest.mark.parametrize('os_family', **parametrized(OPERATING_SYSTEMS)) - def test_positive_create_with_os_family(self, os_family, target_sat): - """Create operating system with every OS family possible - - :id: 6ad32d22-53cc-4bab-ac10-f466f75d7cc6 - - :parametrized: yes - - :expectedresults: Operating system entity is created and has proper OS - family assigned - - :CaseAutomation: Automated - - :CaseImportance: Critical - """ - if is_open('BZ:1709683') and os_family == 'Debian': - pytest.skip("BZ 1709683") - os = target_sat.api.OperatingSystem(family=os_family).create() - assert os.family == os_family - - @pytest.mark.tier1 - def test_positive_create_with_minor_version(self, target_sat): - """Create operating system with minor version - - :id: fc2e36ca-eb5c-440b-957e-390cd9820945 - - :expectedresults: Operating system entity is created and has proper - minor version - - :CaseImportance: Critical - """ - minor_version = gen_string('numeric') - os = target_sat.api.OperatingSystem(minor=minor_version).create() - assert os.minor == minor_version - - @pytest.mark.tier1 - def test_positive_read_minor_version_as_string(self, target_sat): - """Create an operating system with an integer minor version. - - :id: b45e0b94-62f7-45ff-a19e-83c7a0f51339 - - :expectedresults: The minor version can be read back as a string. - - :CaseImportance: Critical - - :BZ: 1230902 - """ - minor = int(gen_string('numeric', random.randint(1, 16))) - operating_sys = target_sat.api.OperatingSystem(minor=minor).create() - assert operating_sys.minor == str(minor) - - @pytest.mark.tier1 - @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, desc, target_sat): - """Create operating system with description - - :id: 980e6411-da11-4fec-ae46-47722367ae40 - - :parametrized: yes - - :expectedresults: Operating system entity is created and has proper - description - - :CaseImportance: Critical - """ - name = gen_string('utf8') - os = target_sat.api.OperatingSystem(name=name, description=desc).create() - assert os.name == name - assert os.description == desc - - @pytest.mark.tier1 - @pytest.mark.parametrize('pass_hash', **parametrized(('SHA256', 'SHA512'))) - def test_positive_create_with_password_hash(self, pass_hash, target_sat): - """Create operating system with valid password hash option - - :id: 00830e71-b414-41ab-bc8f-03fd2fbd5a84 - - :parametrized: yes - - :expectedresults: Operating system entity is created and has proper - password hash type - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(password_hash=pass_hash).create() - assert os.password_hash == pass_hash - - @pytest.mark.tier2 - def test_positive_create_with_arch(self, target_sat): - """Create an operating system that points at an architecture. - - :id: 6a3f7183-b0bf-4834-8c69-a49fe8d7ee5a - - :expectedresults: The operating system is created and points at the - given architecture. - - :CaseLevel: Integration - """ - arch = target_sat.api.Architecture().create() - operating_sys = target_sat.api.OperatingSystem(architecture=[arch]).create() - assert len(operating_sys.architecture) == 1 - assert operating_sys.architecture[0].id == arch.id - - @pytest.mark.tier2 - def test_positive_create_with_archs(self, target_sat): - """Create an operating system that points at multiple different - architectures. - - :id: afd26c6a-bf54-4883-baa5-95f263e6fb36 - - :expectedresults: The operating system is created and points at the - expected architectures. - - :CaseLevel: Integration - """ - amount = range(random.randint(3, 5)) - archs = [target_sat.api.Architecture().create() for _ in amount] - operating_sys = target_sat.api.OperatingSystem(architecture=archs).create() - assert len(operating_sys.architecture) == len(amount) - assert {arch.id for arch in operating_sys.architecture} == {arch.id for arch in archs} - - @pytest.mark.tier2 - def test_positive_create_with_ptable(self, target_sat): - """Create an operating system that points at a partition table. - - :id: bef37ff9-d8fa-4518-9073-0518aa9f9a42 - - :expectedresults: The operating system is created and points at the - given partition table. - - :CaseLevel: Integration - """ - ptable = target_sat.api.PartitionTable().create() - operating_sys = target_sat.api.OperatingSystem(ptable=[ptable]).create() - assert len(operating_sys.ptable) == 1 - assert operating_sys.ptable[0].id == ptable.id - - @pytest.mark.tier2 - def test_positive_create_with_ptables(self, target_sat): - """Create an operating system that points at multiple different - partition tables. - - :id: ed48a279-a222-45ce-81e4-72ae9422482a - - :expectedresults: The operating system is created and points at the - expected partition tables. - - :CaseLevel: Integration - """ - amount = range(random.randint(3, 5)) - ptables = [target_sat.api.PartitionTable().create() for _ in amount] - operating_sys = target_sat.api.OperatingSystem(ptable=ptables).create() - assert len(operating_sys.ptable) == len(amount) - assert {ptable.id for ptable in operating_sys.ptable} == {ptable.id for ptable in ptables} - - @pytest.mark.tier2 - def test_positive_create_with_media(self, module_org, module_target_sat): - """Create an operating system that points at a media. - - :id: 56fadee4-c676-48b6-a2db-e6fef9d2a575 - - :expectedresults: The operating system is created and points at the - given media. +@pytest.mark.e2e +@pytest.mark.upgrade +@pytest.mark.parametrize('pxe_loader', ['ipxe'], indirect=True) +@pytest.mark.on_premises_provisioning +@pytest.mark.rhel_ver_match('[^6]') +def test_rhel_ipxe_provisioning( + request, + module_provisioning_sat, + module_sca_manifest_org, + module_location, + provisioning_host, + pxe_loader, + module_provisioning_rhel_content, + provisioning_hostgroup, + module_lce_library, + module_default_org_view, +): + """Provision a host using iPXE workflow - :CaseLevel: Integration - """ - medium = module_target_sat.api.Media(organization=[module_org]).create() - operating_sys = module_target_sat.api.OperatingSystem(medium=[medium]).create() - assert len(operating_sys.medium) == 1 - assert operating_sys.medium[0].id == medium.id + :id: 9e016e1d-757a-48e7-9159-131bb65dc4ed - @pytest.mark.tier2 - def test_positive_create_with_template(self, module_org, module_target_sat): - """Create an operating system that points at a provisioning template. + :steps: + 1. Configure satellite for provisioning + 2. provision a host + 3. Check that resulting host is registered to Satellite + 4. Check host is subscribed to Satellite - :id: df73ecba-5a1c-4201-9c2f-b2e03e8fec25 + :expectedresults: + 1. Provisioning via iPXE is successful + 2. Host installs right version of RHEL + 3. Satellite is able to run REX job on the host + 4. Host is registered to Satellite and subscription status is 'Success' - :expectedresults: The operating system is created and points at the - expected provisioning template. + :parametrized: yes + """ + # TODO: parametrize iPXE Chain BIOS as pxe loader after #BZ:2171172 is fixed + sat = module_provisioning_sat.sat + # set http url + ipxe_http_url = sat.install( + InstallerCommand( + f'foreman-proxy-dhcp-ipxefilename "http://{sat.hostname}/unattended/iPXE?bootstrap=1"' + ) + ) + assert ipxe_http_url.status == 0 + host_mac_addr = provisioning_host._broker_args['provisioning_nic_mac_addr'] + host = sat.api.Host( + hostgroup=provisioning_hostgroup, + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=host_mac_addr, + operatingsystem=module_provisioning_rhel_content.os, + subnet=module_provisioning_sat.subnet, + host_parameters_attributes=[ + {'name': 'remote_execution_connect_by_ip', 'value': 'true', 'parameter_type': 'boolean'} + ], + build=True, # put the host in build mode + ).create(create_missing=False) + # Clean up the host to free IP leases on Satellite. + # broker should do that as a part of the teardown, putting here just to make sure. + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) - :CaseLevel: Integration - """ - template = module_target_sat.api.ProvisioningTemplate(organization=[module_org]).create() - operating_sys = module_target_sat.api.OperatingSystem( - provisioning_template=[template] - ).create() - assert len(operating_sys.provisioning_template) == 1 - assert operating_sys.provisioning_template[0].id == template.id + # Start the VM, do not ensure that we can connect to SSHD + provisioning_host.power_control(ensure=False) - @pytest.mark.tier1 - @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name, target_sat): - """Try to create operating system entity providing an invalid - name + # TODO: Implement Satellite log capturing logic to verify that + # all the events are captured in the logs. - :id: cd4286fd-7128-4385-9c8d-ef979c22ee38 + # Host should do call back to the Satellite reporting + # the result of the installation. Wait until Satellite reports that the host is installed. + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + host = host.read() + assert host.build_status_label == 'Installed' - :parametrized: yes + # Change the hostname of the host as we know it already. + # In the current infra environment we do not support + # addressing hosts using FQDNs, falling back to IP. + provisioning_host.hostname = host.ip + # Host is not blank anymore + provisioning_host.blank = False - :expectedresults: Operating system entity is not created + # Wait for the host to be rebooted and SSH daemon to be started. + provisioning_host.wait_for_connection() - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(name=name).create() + # Perform version check and check if root password is properly updated + host_os = host.operatingsystem.read() + expected_rhel_version = f'{host_os.major}.{host_os.minor}' + + if int(host_os.major) >= 9: + assert ( + provisioning_host.execute( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) + host_ssh_os = sat.execute( + f'sshpass -p {settings.provisioning.host_root_password} ' + 'ssh -o StrictHostKeyChecking=no -o PubkeyAuthentication=no -o PasswordAuthentication=yes ' + f'-o UserKnownHostsFile=/dev/null root@{provisioning_host.hostname} cat /etc/redhat-release' + ) + assert host_ssh_os.status == 0 + assert ( + expected_rhel_version in host_ssh_os.stdout + ), f'The installed OS version differs from the expected version {expected_rhel_version}' - @pytest.mark.tier1 - def test_negative_create_with_invalid_os_family(self, target_sat): - """Try to create operating system entity providing an invalid - operating system family + # Run a command on the host using REX to verify that Satellite's SSH key is present on the host + template_id = ( + sat.api.JobTemplate().search(query={'search': 'name="Run Command - Script Default"'})[0].id + ) + job = sat.api.JobInvocation().run( + data={ + 'job_template_id': template_id, + 'inputs': { + 'command': f'subscription-manager config | grep "hostname = {sat.hostname}"' + }, + 'search_query': f"name = {host.name}", + 'targeting_type': 'static_query', + }, + ) + assert job['result'] == 'success', 'Job invocation failed' - :id: 205a433d-750b-4b06-9fd4-274303780d6d + # assert that the host is subscribed and consumes + # subsctiption provided by the activation key + assert provisioning_host.subscribed, 'Host is not subscribed' - :expectedresults: Operating system entity is not created - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(family='NON_EXISTENT_OS').create() +@pytest.mark.skip_if_open("BZ:2242925") +@pytest.mark.e2e +@pytest.mark.upgrade +@pytest.mark.parametrize('pxe_loader', ['http_uefi'], indirect=True) +@pytest.mark.on_premises_provisioning +@pytest.mark.rhel_ver_match('[^6]') +def test_rhel_httpboot_provisioning( + request, + module_provisioning_sat, + module_sca_manifest_org, + module_location, + provisioning_host, + pxe_loader, + module_provisioning_rhel_content, + provisioning_hostgroup, + module_lce_library, + module_default_org_view, +): + """Provision a host using httpboot workflow - @pytest.mark.tier1 - def test_negative_create_with_too_long_description(self, target_sat): - """Try to create operating system entity providing too long - description value + :id: 98c2865e-5d21-402e-ad01-c474b7fc4eee - :id: fe5fc36a-5994-4d8a-91f6-0425765b8c39 + :steps: + 1. Configure satellite for provisioning + 2. provision a host using pxe loader as Grub2 UEFI HTTP + 3. Check that resulting host is registered to Satellite + 4. Check host is subscribed to Satellite - :expectedresults: Operating system entity is not created + :expectedresults: + 1. Provisioning via HTTP is successful + 2. Host installs right version of RHEL + 3. Satellite is able to run REX job on the host + 4. Host is registered to Satellite and subscription status is 'Success' - :BZ: 1328935 + :parametrized: yes - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(description=gen_string('alphanumeric', 256)).create() + :BZ: 2242925 + """ + sat = module_provisioning_sat.sat + # update grub2-efi package + sat.cli.Packages.update(packages='grub2-efi', options={'assumeyes': True}) - @pytest.mark.skip_if_open('BZ:2101435') - @pytest.mark.tier1 - @pytest.mark.parametrize('major_version', **parametrized((gen_string('numeric', 6), '', '-6'))) - def test_negative_create_with_invalid_major_version(self, major_version, target_sat): - """Try to create operating system entity providing incorrect - major version value (More than 5 characters, empty value, negative - number) + host_mac_addr = provisioning_host._broker_args['provisioning_nic_mac_addr'] + host = sat.api.Host( + hostgroup=provisioning_hostgroup, + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=host_mac_addr, + operatingsystem=module_provisioning_rhel_content.os, + subnet=module_provisioning_sat.subnet, + host_parameters_attributes=[ + {'name': 'remote_execution_connect_by_ip', 'value': 'true', 'parameter_type': 'boolean'} + ], + build=True, # put the host in build mode + ).create(create_missing=False) + # Clean up the host to free IP leases on Satellite. + # broker should do that as a part of the teardown, putting here just to make sure. + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) - :id: f2646bc2-d639-4079-bdcb-ff76679f1457 + # Start the VM, do not ensure that we can connect to SSHD + provisioning_host.power_control(ensure=False) + # check for proper HTTP requests + shell = module_provisioning_sat.session.shell() + shell.send('foreman-tail') + assert_host_logs(shell, f'GET /httpboot/grub2/grub.cfg-{host_mac_addr} with 200') + # Host should do call back to the Satellite reporting + # the result of the installation. Wait until Satellite reports that the host is installed. + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + host = host.read() + assert host.build_status_label == 'Installed' - :parametrized: yes + # Change the hostname of the host as we know it already. + # In the current infra environment we do not support + # addressing hosts using FQDNs, falling back to IP. + provisioning_host.hostname = host.ip + # Host is not blank anymore + provisioning_host.blank = False - :expectedresults: Operating system entity is not created + # Wait for the host to be rebooted and SSH daemon to be started. + provisioning_host.wait_for_connection() - :BZ: 2101435 + # Perform version check and check if root password is properly updated + host_os = host.operatingsystem.read() + expected_rhel_version = f'{host_os.major}.{host_os.minor}' + + if int(host_os.major) >= 9: + assert ( + provisioning_host.execute( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) + host_ssh_os = sat.execute( + f'sshpass -p {settings.provisioning.host_root_password} ' + 'ssh -o StrictHostKeyChecking=no -o PubkeyAuthentication=no -o PasswordAuthentication=yes ' + f'-o UserKnownHostsFile=/dev/null root@{provisioning_host.hostname} cat /etc/redhat-release' + ) + assert host_ssh_os.status == 0 + assert ( + expected_rhel_version in host_ssh_os.stdout + ), f'The installed OS version differs from the expected version {expected_rhel_version}' - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(major=major_version).create() + # Run a command on the host using REX to verify that Satellite's SSH key is present on the host + template_id = ( + sat.api.JobTemplate().search(query={'search': 'name="Run Command - Script Default"'})[0].id + ) + job = sat.api.JobInvocation().run( + data={ + 'job_template_id': template_id, + 'inputs': { + 'command': f'subscription-manager config | grep "hostname = {sat.hostname}"' + }, + 'search_query': f"name = {host.name}", + 'targeting_type': 'static_query', + }, + ) + assert job['result'] == 'success', 'Job invocation failed' - @pytest.mark.tier1 - @pytest.mark.parametrize('minor_version', **parametrized((gen_string('numeric', 17), '-5'))) - def test_negative_create_with_invalid_minor_version(self, minor_version, target_sat): - """Try to create operating system entity providing incorrect - minor version value (More than 16 characters and negative number) + # assert that the host is subscribed and consumes + # subsctiption provided by the activation key + assert provisioning_host.subscribed, 'Host is not subscribed' - :id: dec4b456-153c-4a66-8b8e-b12ac7800e51 - :parametrized: yes +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.on_premises_provisioning +@pytest.mark.rhel_ver_match('[^6]') +def test_rhel_pxe_provisioning_fips_enabled( + request, + module_provisioning_sat, + module_sca_manifest_org, + module_location, + provisioning_host, + pxe_loader, + module_provisioning_rhel_content, + provisioning_hostgroup, + module_lce_library, + module_default_org_view, +): + """Provision a host with host param fips_enabled set to true - :expectedresults: Operating system entity is not created + :id: 9e016e1d-757a-48e7-9159-131bb65dc4ef - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(minor=minor_version).create() + :steps: + 1. Configure satellite for provisioning + 2. Provision a host with host param fips_enabled + 3. Check that resulting host is registered to Satellite + 4. Check host is subscribed to Satellite - @pytest.mark.tier1 - def test_negative_create_with_invalid_password_hash(self, target_sat): - """Try to create operating system entity providing invalid - password hash value + :expectedresults: + 1. Provisioning with host param fips_enabled is successful + 2. Host installs right version of RHEL + 3. Satellite is able to run REX job on the fips_enabled host + 4. Host is registered to Satellite and subscription status is 'Success' - :id: 9cfcb6d4-0601-4fc7-bd1e-8b8327129a69 + :parametrized: yes - :expectedresults: Operating system entity is not created + :BZ: 2240076 + """ + sat = module_provisioning_sat.sat + host_mac_addr = provisioning_host._broker_args['provisioning_nic_mac_addr'] + # Verify password hashing algorithm SHA256 is set in OS used for provisioning + assert module_provisioning_rhel_content.os.password_hash == 'SHA256' - :CaseImportance: Critical - """ - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(password_hash='INVALID_HASH').create() + host = sat.api.Host( + hostgroup=provisioning_hostgroup, + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=host_mac_addr, + operatingsystem=module_provisioning_rhel_content.os, + subnet=module_provisioning_sat.subnet, + host_parameters_attributes=[ + { + 'name': 'remote_execution_connect_by_ip', + 'value': 'true', + 'parameter_type': 'boolean', + }, + {'name': 'fips_enabled', 'value': 'true', 'parameter_type': 'boolean'}, + ], + build=True, # put the host in build mode + ).create(create_missing=False) + # Clean up the host to free IP leases on Satellite. + # broker should do that as a part of the teardown, putting here just to make sure. + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + # Start the VM, do not ensure that we can connect to SSHD + provisioning_host.power_control(ensure=False) - @pytest.mark.tier1 - def test_negative_create_with_same_name_and_version(self, target_sat): - """Create operating system providing valid name and major - version. Then try to create operating system using the same name and - version + # TODO: Implement Satellite log capturing logic to verify that + # all the events are captured in the logs. - :id: 3f2ca323-7789-4d2b-bf21-2454317147ff + # Host should do call back to the Satellite reporting + # the result of the installation. Wait until Satellite reports that the host is installed. + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + host = host.read() + assert host.build_status_label == 'Installed' - :expectedresults: Second operating system entity is not created + # Change the hostname of the host as we know it already. + # In the current infra environment we do not support + # addressing hosts using FQDNs, falling back to IP. + provisioning_host.hostname = host.ip + # Host is not blank anymore + provisioning_host.blank = False - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem().create() - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(name=os.name, major=os.major).create() + # Wait for the host to be rebooted and SSH daemon to be started. + provisioning_host.wait_for_connection() - @pytest.mark.tier1 - @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) - def test_positive_update_name(self, new_name, target_sat): - """Create operating system entity providing the initial name, - then update its name to another valid name. + # Perform version check and check if root password is properly updated + host_os = host.operatingsystem.read() + expected_rhel_version = f'{host_os.major}.{host_os.minor}' + + if int(host_os.major) >= 9: + assert ( + provisioning_host.execute( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) + host_ssh_os = sat.execute( + f'sshpass -p {settings.provisioning.host_root_password} ' + 'ssh -o StrictHostKeyChecking=no -o PubkeyAuthentication=no -o PasswordAuthentication=yes ' + f'-o UserKnownHostsFile=/dev/null root@{provisioning_host.hostname} cat /etc/redhat-release' + ) + assert host_ssh_os.status == 0 + assert ( + expected_rhel_version in host_ssh_os.stdout + ), f'The installed OS version differs from the expected version {expected_rhel_version}' + + # Verify FIPS is enabled on host after provisioning is completed sucessfully + if int(host_os.major) >= 8: + result = provisioning_host.execute('fips-mode-setup --check') + fips_status = 'FIPS mode is disabled' if is_open('BZ:2240076') else 'FIPS mode is enabled' + assert fips_status in result.stdout + else: + result = provisioning_host.execute('cat /proc/sys/crypto/fips_enabled') + assert (0 if is_open('BZ:2240076') else 1) == int(result.stdout) - :id: 2898e16a-865a-4de6-b2a5-bb0934fc2b76 + # Run a command on the host using REX to verify that Satellite's SSH key is present on the host + template_id = ( + sat.api.JobTemplate().search(query={'search': 'name="Run Command - Script Default"'})[0].id + ) + job = sat.api.JobInvocation().run( + data={ + 'job_template_id': template_id, + 'inputs': { + 'command': f'subscription-manager config | grep "hostname = {sat.hostname}"' + }, + 'search_query': f"name = {host.name}", + 'targeting_type': 'static_query', + }, + ) + assert job['result'] == 'success', 'Job invocation failed' - :parametrized: yes + # assert that the host is subscribed and consumes + # subsctiption provided by the activation key + assert provisioning_host.subscribed, 'Host is not subscribed' - :expectedresults: Operating system entity is created and updated - properly - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem().create() - os = target_sat.api.OperatingSystem(id=os.id, name=new_name).update(['name']) - assert os.name == new_name +@pytest.mark.e2e +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.skip(reason='Skipping till we have destructive support') +@pytest.mark.on_premises_provisioning +@pytest.mark.rhel_ver_match('[^6]') +def test_capsule_pxe_provisioning( + request, + capsule_provisioning_sat, + module_capsule_configured, + capsule_provisioning_rhel_content, + module_sca_manifest_org, + module_location, + provisioning_host, + pxe_loader, + capsule_provisioning_hostgroup, + module_lce_library, + module_default_org_view, + capsule_provisioning_lce_sync_setup, +): + """Provision a host using external capsule - @pytest.mark.tier1 - @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) - def test_positive_update_description(self, new_desc, target_sat): - """Create operating entity providing the initial description, - then update that description to another valid one. + :id: d76cd326-af4e-4bd5-b20c-128348e042d3 - :id: c809700a-b6ab-4651-9bd0-d0d9bd6a47dd + :steps: + 1. Configure satellite and capsule for provisioning + 2. Provision a host using capsule as the content source + 3. Check that resulting host is registered to Satellite - :parametrized: yes + :expectedresults: + 1. Provisioning using external capsule is successful. + 1. Host installs right version of RHEL + 2. Satellite is able to run REX job on the host + 3. Host is registered to Satellite and subscription status is 'Success' - :expectedresults: Operating system entity is created and updated - properly + :parametrized: yes + """ + host_mac_addr = provisioning_host._broker_args['provisioning_nic_mac_addr'] + sat = capsule_provisioning_sat.sat + cap = module_capsule_configured + host = sat.api.Host( + hostgroup=capsule_provisioning_hostgroup, + organization=module_sca_manifest_org, + location=module_location, + content_facet_attributes={ + 'content_view_id': capsule_provisioning_rhel_content.cv.id, + 'lifecycle_environment_id': module_lce_library.id, + }, + name=gen_string('alpha').lower(), + mac=host_mac_addr, + operatingsystem=capsule_provisioning_rhel_content.os, + subnet=capsule_provisioning_sat.subnet, + host_parameters_attributes=[ + { + 'name': 'remote_execution_connect_by_ip', + 'value': 'true', + 'parameter_type': 'boolean', + }, + ], + build=True, # put the host in build mode + ).create(create_missing=False) + # Clean up the host to free IP leases on Satellite. + # broker should do that as a part of the teardown, putting here just to make sure. + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + # Start the VM, do not ensure that we can connect to SSHD + provisioning_host.power_control(ensure=False) + # Host should do call back to the Satellite reporting + # the result of the installation. Wait until Satellite reports that the host is installed. + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + host = host.read() + assert host.build_status_label == 'Installed' - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(description=gen_string('utf8')).create() - os = target_sat.api.OperatingSystem(id=os.id, description=new_desc).update(['description']) - assert os.description == new_desc + # Change the hostname of the host as we know it already. + # In the current infra environment we do not support + # addressing hosts using FQDNs, falling back to IP. + provisioning_host.hostname = host.ip + # Host is not blank anymore + provisioning_host.blank = False - @pytest.mark.tier1 - def test_positive_update_major_version(self, target_sat): - """Create operating entity providing the initial major version, - then update that version to another valid one. + # Wait for the host to be rebooted and SSH daemon to be started. + provisioning_host.wait_for_connection() - :id: e57fd4a3-f0ae-49fb-bd84-9a6ec606a2a2 + # Perform version check and check if root password is properly updated + host_os = host.operatingsystem.read() + expected_rhel_version = f'{host_os.major}.{host_os.minor}' + + if int(host_os.major) >= 9: + assert ( + provisioning_host.execute( + 'echo -e "\nPermitRootLogin yes" >> /etc/ssh/sshd_config; systemctl restart sshd' + ).status + == 0 + ) + host_ssh_os = sat.execute( + f'sshpass -p {settings.provisioning.host_root_password} ' + 'ssh -o StrictHostKeyChecking=no -o PubkeyAuthentication=no -o PasswordAuthentication=yes ' + f'-o UserKnownHostsFile=/dev/null root@{provisioning_host.hostname} cat /etc/redhat-release' + ) + assert host_ssh_os.status == 0 + assert ( + expected_rhel_version in host_ssh_os.stdout + ), f'The installed OS version differs from the expected version {expected_rhel_version}' - :expectedresults: Operating system entity is created and updated - properly + # Run a command on the host using REX to verify that Satellite's SSH key is present on the host + template_id = ( + sat.api.JobTemplate().search(query={'search': 'name="Run Command - Script Default"'})[0].id + ) + job = sat.api.JobInvocation().run( + data={ + 'job_template_id': template_id, + 'inputs': { + 'command': f'subscription-manager config | grep "hostname = {cap.hostname}"' + }, + 'search_query': f"name = {host.name}", + 'targeting_type': 'static_query', + }, + ) + assert job['result'] == 'success', 'Job invocation failed' - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem().create() - new_major_version = gen_string('numeric', 5) - os = target_sat.api.OperatingSystem(id=os.id, major=new_major_version).update(['major']) - assert os.major == new_major_version + # assert that the host is subscribed and consumes + # subsctiption provided by the activation key + assert provisioning_host.subscribed, 'Host is not subscribed' - @pytest.mark.tier1 - def test_positive_update_minor_version(self, target_sat): - """Create operating entity providing the initial minor version, - then update that version to another valid one. - :id: ca36f7cf-4487-4743-be06-52c5f47ffe71 - - :expectedresults: Operating system entity is created and updated - properly - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(minor=gen_string('numeric')).create() - new_minor_version = gen_string('numeric') - os = target_sat.api.OperatingSystem(id=os.id, minor=new_minor_version).update(['minor']) - assert os.minor == new_minor_version - - @pytest.mark.tier1 - def test_positive_update_os_family(self, target_sat): - """Create operating entity providing the initial os family, then - update that family to another valid one from the list. - - :id: 3d1f8fdc-d2de-4277-a0ba-07228a2fae82 - - :expectedresults: Operating system entity is created and updated - properly - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(family=OPERATING_SYSTEMS[0]).create() - new_os_family = OPERATING_SYSTEMS[random.randint(1, len(OPERATING_SYSTEMS) - 1)] - os = target_sat.api.OperatingSystem(id=os.id, family=new_os_family).update(['family']) - assert os.family == new_os_family - - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_update_arch(self, target_sat): - """Create an operating system that points at an architecture and - then update it to point to another architecture - - :id: ad69b4a3-6371-4516-b5ce-f6298edf35b3 - - :expectedresults: The operating system is updated and points at the - expected architecture. - - :CaseLevel: Integration - """ - arch_1 = target_sat.api.Architecture().create() - arch_2 = target_sat.api.Architecture().create() - os = target_sat.api.OperatingSystem(architecture=[arch_1]).create() - assert len(os.architecture) == 1 - assert os.architecture[0].id == arch_1.id - os = target_sat.api.OperatingSystem(id=os.id, architecture=[arch_2]).update( - ['architecture'] - ) - assert len(os.architecture) == 1 - assert os.architecture[0].id == arch_2.id - - @pytest.mark.tier2 - def test_positive_update_ptable(self, target_sat): - """Create an operating system that points at partition table and - then update it to point to another partition table - - :id: 0dde5372-4b90-4c83-b497-31e94065adab - - :expectedresults: The operating system is updated and points at the - expected partition table. - - :CaseLevel: Integration - """ - ptable_1 = target_sat.api.PartitionTable().create() - ptable_2 = target_sat.api.PartitionTable().create() - os = target_sat.api.OperatingSystem(ptable=[ptable_1]).create() - assert len(os.ptable) == 1 - assert os.ptable[0].id == ptable_1.id - os = target_sat.api.OperatingSystem(id=os.id, ptable=[ptable_2]).update(['ptable']) - assert len(os.ptable) == 1 - assert os.ptable[0].id == ptable_2.id - - @pytest.mark.tier2 - def test_positive_update_media(self, module_org, module_target_sat): - """Create an operating system that points at media entity and - then update it to point to another media - - :id: 18b5f6b5-52ab-4722-8412-f0de85ad20fe - - :expectedresults: The operating system is updated and points at the - expected media. - - :CaseLevel: Integration - """ - media_1 = module_target_sat.api.Media(organization=[module_org]).create() - media_2 = module_target_sat.api.Media(organization=[module_org]).create() - os = module_target_sat.api.OperatingSystem(medium=[media_1]).create() - assert len(os.medium) == 1 - assert os.medium[0].id == media_1.id - os = module_target_sat.api.OperatingSystem(id=os.id, medium=[media_2]).update(['medium']) - assert len(os.medium) == 1 - assert os.medium[0].id == media_2.id - - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_update_medias(self, module_org, module_target_sat): - """Create an operating system that points at media entity and - then update it to point to another multiple different medias. - - :id: 756c4aa8-278d-488e-b48f-a8d2ace4526e - - :expectedresults: The operating system is updated and points at the - expected medias. - - :CaseLevel: Integration - """ - initial_media = module_target_sat.api.Media(organization=[module_org]).create() - os = module_target_sat.api.OperatingSystem(medium=[initial_media]).create() - assert len(os.medium) == 1 - assert os.medium[0].id == initial_media.id - amount = range(random.randint(3, 5)) - medias = [module_target_sat.api.Media().create() for _ in amount] - os = module_target_sat.api.OperatingSystem(id=os.id, medium=medias).update(['medium']) - assert len(os.medium) == len(amount) - assert {medium.id for medium in os.medium} == {medium.id for medium in medias} - - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_update_template(self, module_org, module_target_sat): - """Create an operating system that points at provisioning template and - then update it to point to another template - - :id: 02125a7a-905a-492a-a49b-768adf4ac00c - - :expectedresults: The operating system is updated and points at the - expected provisioning template. - - :CaseLevel: Integration - """ - template_1 = module_target_sat.api.ProvisioningTemplate(organization=[module_org]).create() - template_2 = module_target_sat.api.ProvisioningTemplate(organization=[module_org]).create() - os = module_target_sat.api.OperatingSystem(provisioning_template=[template_1]).create() - assert len(os.provisioning_template) == 1 - assert os.provisioning_template[0].id == template_1.id - os = module_target_sat.api.OperatingSystem( - id=os.id, provisioning_template=[template_2] - ).update(['provisioning_template']) - assert len(os.provisioning_template) == 1 - assert os.provisioning_template[0].id == template_2.id - - @pytest.mark.tier1 - @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, new_name, target_sat): - """Create operating system entity providing the initial name, - then update its name to invalid one. - - :id: 3ba55d6e-99cb-4878-b41b-a59476d1db58 - - :parametrized: yes - - :expectedresults: Operating system entity is not updated - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem().create() - with pytest.raises(HTTPError): - os = target_sat.api.OperatingSystem(id=os.id, name=new_name).update(['name']) - - @pytest.mark.skip_if_open('BZ:2101435') - @pytest.mark.tier1 - def test_negative_update_major_version(self, target_sat): - """Create operating entity providing the initial major version, - then update that version to invalid one. - - :id: de07c2f7-0896-493d-976c-e9f3a8a57025 - - :expectedresults: Operating system entity is not updated - - :BZ: 2101435 - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem().create() - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(id=os.id, major='-20').update(['major']) - - @pytest.mark.tier1 - def test_negative_update_minor_version(self, target_sat): - """Create operating entity providing the initial minor version, - then update that version to invalid one. - - :id: 130d028f-302d-4c20-b35c-c7f024f3897b - - :expectedresults: Operating system entity is not updated - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(minor=gen_string('numeric')).create() - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(id=os.id, minor='INVALID_VERSION').update(['minor']) - - @pytest.mark.tier1 - def test_negative_update_os_family(self, target_sat): - """Create operating entity providing the initial os family, then - update that family to invalid one. - - :id: fc11506e-8a46-470b-bde0-6fc5db98463f - - :expectedresults: Operating system entity is not updated - - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(family=OPERATING_SYSTEMS[0]).create() - with pytest.raises(HTTPError): - target_sat.api.OperatingSystem(id=os.id, family='NON_EXISTENT_OS').update(['family']) - - @pytest.mark.tier1 - @pytest.mark.upgrade - @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_delete_os(self, name, target_sat): - """Create new operating system entity and then delete it. - - :id: 3dbffb56-ad99-441d-921c-0fad6504d257 +@pytest.mark.stubbed +def test_rhel_provisioning_using_realm(): + """Provision a host using realm - :parametrized: yes + :id: 687e7d71-7e46-46d5-939b-4562f88c4598 - :expectedresults: Operating System entity is deleted successfully + :steps: + 1. Configure satellite for provisioning + 2. Configure Satellite for Realm support + 3. Provision a Host + 4. Check host is subscribed to Satellite - :CaseImportance: Critical - """ - os = target_sat.api.OperatingSystem(name=name).create() - os.delete() - with pytest.raises(HTTPError): - os.read() + :expectedresults: + 1. Provisioning via Realm is successful + 2. Check if the provisioned host is automatically registered to IdM + 3. Host installs right version of RHEL + 4. Satellite is able to run REX job on the host + 5. Host is registered to Satellite and subscription status is 'Success' + """ diff --git a/tests/foreman/api/test_provisioning_puppet.py b/tests/foreman/api/test_provisioning_puppet.py index 2a317c071be..ec1791f1a17 100644 --- a/tests/foreman/api/test_provisioning_puppet.py +++ b/tests/foreman/api/test_provisioning_puppet.py @@ -4,22 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Medium -:Upstream: No """ -import pytest -import requests from fauxfactory import gen_string from packaging.version import Version +import pytest +import requests from wait_for import wait_for @@ -160,7 +155,8 @@ def test_host_provisioning_with_external_puppetserver( ).create(create_missing=False) # Clean up the host to free IP leases on Satellite. # broker should do that as a part of the teardown, putting here just to make sure. - request.addfinalizer(host.delete) + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + # Start the VM, do not ensure that we can connect to SSHD provisioning_host.power_control(ensure=False) @@ -185,17 +181,7 @@ def test_host_provisioning_with_external_puppetserver( provisioning_host.blank = False # Wait for the host to be rebooted and SSH daemon to be started. - try: - wait_for( - provisioning_host.connect, - fail_condition=lambda res: res is not None, - handle_exception=True, - raise_original=True, - timeout=180, - delay=1, - ) - except ConnectionRefusedError: - raise ConnectionRefusedError('Timed out waiting for SSH daemon to start on the host') + provisioning_host.wait_for_connection() # Perform version check host_os = host.operatingsystem.read() diff --git a/tests/foreman/api/test_provisioningtemplate.py b/tests/foreman/api/test_provisioningtemplate.py index 5db24fa37bf..b154898498d 100644 --- a/tests/foreman/api/test_provisioningtemplate.py +++ b/tests/foreman/api/test_provisioningtemplate.py @@ -11,61 +11,39 @@ :Team: Rocket -:TestType: Functional - -:CaseLevel: Integration - :CaseImportance: High -:Upstream: No """ from random import choice -import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_mac -from fauxfactory import gen_string +from fauxfactory import gen_choice, gen_integer, gen_mac, gen_string from nailgun import client -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.config import get_credentials -from robottelo.config import settings -from robottelo.config import user_nailgun_config -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import valid_data_list - +from robottelo.config import settings, user_nailgun_config +from robottelo.utils.datafactory import invalid_names_list, valid_data_list -@pytest.fixture(scope="module") -def module_location(module_location): - yield module_location - module_location.delete() - -@pytest.fixture(scope="module") -def module_org(module_org): - yield module_org - module_org.delete() - - -@pytest.fixture(scope="module") -def module_user(module_org, module_location): +@pytest.fixture(scope='module') +def module_user(module_target_sat, module_org, module_location): """Creates an org admin role and user""" user_login = gen_string('alpha') user_password = gen_string('alpha') - # Create user with Manager role - orig_role = entities.Role().search(query={'search': 'name="Organization admin"'})[0] - new_role_dict = entities.Role(id=orig_role.id).clone( + orig_role = module_target_sat.api.Role().search(query={'search': 'name="Organization admin"'})[ + 0 + ] + new_role_dict = module_target_sat.api.Role(id=orig_role.id).clone( data={ 'role': { - 'name': f"test_template_admin_{gen_string('alphanumeric', 3)}", + 'name': f'test_template_admin_{gen_string("alphanumeric", 3)}', 'organization_ids': [module_org.id], 'location_ids': [module_location.id], } } ) - new_role = entities.Role(id=new_role_dict['id']).read() - user = entities.User( + new_role = module_target_sat.api.Role(id=new_role_dict['id']).read() + user = module_target_sat.api.User( role=[new_role], admin=False, login=user_login, @@ -73,13 +51,14 @@ def module_user(module_org, module_location): organization=[module_org], location=[module_location], ).create() - yield (user, user_login, user_password) + user.password = user_password + yield user user.delete() new_role.delete() -@pytest.fixture(scope="function") -def tftpboot(module_org, target_sat): +@pytest.fixture +def tftpboot(module_org, module_target_sat): """This fixture removes the current deployed templates from TFTP, and sets up new ones. It manipulates the global defaults, so it shouldn't be used in concurrent environment @@ -106,53 +85,53 @@ def tftpboot(module_org, target_sat): }, 'ipxe': { 'setting': 'global_iPXE', - 'path': f'{target_sat.url}/unattended/iPXE?bootstrap=1', + 'path': f'{module_target_sat.url}/unattended/iPXE?bootstrap=1', 'kind': 'iPXE', }, } # we keep the value of these for the teardown - default_settings = entities.Setting().search(query={"search": "name ~ global_"}) - kinds = entities.TemplateKind().search(query={"search": "name ~ PXE"}) + default_settings = module_target_sat.api.Setting().search(query={'search': 'name ~ global_'}) + kinds = module_target_sat.api.TemplateKind().search(query={"search": "name ~ PXE"}) # clean the already-deployed default pxe configs - target_sat.execute('rm {}'.format(' '.join([i['path'] for i in default_templates.values()]))) + module_target_sat.execute(f'rm -f {" ".join([i["path"] for i in default_templates.values()])}') # create custom Templates per kind for template in default_templates.values(): - template['entity'] = entities.ProvisioningTemplate( + template['entity'] = module_target_sat.api.ProvisioningTemplate( name=gen_string('alpha'), organization=[module_org], snippet=False, template_kind=[i.id for i in kinds if i.name == template['kind']][0], - template=f"<%= foreman_server_url %> {template['kind']}", + template=f'<%= foreman_server_url %> {template["kind"]}', ).create(create_missing=False) # Update the global settings to use newly created template - template['setting_id'] = entities.Setting( + module_target_sat.api.Setting( id=[i.id for i in default_settings if i.name == template['setting']][0], value=template['entity'].name, ).update(fields=['value']) yield default_templates - # delete the deployed tftp files - target_sat.execute('rm {}'.format(' '.join([i['path'] for i in default_templates.values()]))) + # clean the already-deployed default pxe configs + module_target_sat.execute(f'rm -f {" ".join([i["path"] for i in default_templates.values()])}') + # set the settings back to defaults for setting in default_settings: if setting.value is None: setting.value = '' - setting.update(fields=['value'] or '') + setting.update(fields=['value']) class TestProvisioningTemplate: - """Tests for provisioning templates - - :CaseLevel: Acceptance - """ + """Tests for provisioning templates""" @pytest.mark.tier1 @pytest.mark.e2e @pytest.mark.upgrade - def test_positive_end_to_end_crud(self, module_org, module_location, module_user, target_sat): + def test_positive_end_to_end_crud( + self, module_org, module_location, module_user, module_target_sat + ): """Create a new provisioning template with several attributes, update them, clone the provisioning template and then delete it @@ -163,12 +142,12 @@ def test_positive_end_to_end_crud(self, module_org, module_location, module_user :CaseImportance: Critical """ - cfg = user_nailgun_config(module_user[1], module_user[2]) + cfg = user_nailgun_config(module_user.login, module_user.password) name = gen_string('alpha') new_name = gen_string('alpha') - template_kind = choice(target_sat.api.TemplateKind().search()) + template_kind = choice(module_target_sat.api.TemplateKind().search()) - template = target_sat.api.ProvisioningTemplate( + template = module_target_sat.api.ProvisioningTemplate( name=name, organization=[module_org], location=[module_location], @@ -176,19 +155,21 @@ def test_positive_end_to_end_crud(self, module_org, module_location, module_user template_kind=template_kind, ).create() assert template.name == name - assert len(template.organization) == 1, "Template should be assigned to a single org here" + assert len(template.organization) == 1, 'Template should be assigned to a single org here' assert template.organization[0].id == module_org.id - assert len(template.location) == 1, "Template should be assigned to a single location here" + assert len(template.location) == 1, 'Template should be assigned to a single location here' assert template.location[0].id == module_location.id - assert template.snippet is False, "Template snippet attribute is True instead of False" + assert template.snippet is False, 'Template snippet attribute is True instead of False' assert template.template_kind.id == template_kind.id # negative create with pytest.raises(HTTPError) as e1: - target_sat.api.ProvisioningTemplate(name=gen_choice(invalid_names_list())).create() + module_target_sat.api.ProvisioningTemplate( + name=gen_choice(invalid_names_list()) + ).create() assert e1.value.response.status_code == 422 - invalid = target_sat.api.ProvisioningTemplate(snippet=False) + invalid = module_target_sat.api.ProvisioningTemplate(snippet=False) invalid.create_missing() invalid.template_kind = None invalid.template_kind_name = gen_string('alpha') @@ -197,13 +178,12 @@ def test_positive_end_to_end_crud(self, module_org, module_location, module_user assert e2.value.response.status_code == 422 # update - assert template.template_kind.id == template_kind.id, "Template kind id doesn't match" - updated = target_sat.api.ProvisioningTemplate( + assert template.template_kind.id == template_kind.id, 'Template kind id does not match' + updated = module_target_sat.api.ProvisioningTemplate( server_config=cfg, id=template.id, name=new_name ).update(['name']) - assert updated.name == new_name, "The Provisioning template wasn't properly renamed" + assert updated.name == new_name, 'The Provisioning template was not properly renamed' # clone - template_origin = template.read_json() # remove unique keys unique_keys = ('updated_at', 'created_at', 'id', 'name') @@ -212,10 +192,10 @@ def test_positive_end_to_end_crud(self, module_org, module_location, module_user } dupe_name = gen_choice(list(valid_data_list().values())) - dupe_json = target_sat.api.ProvisioningTemplate( + dupe_json = module_target_sat.api.ProvisioningTemplate( id=template.clone(data={'name': dupe_name})['id'] ).read_json() - dupe_template = target_sat.api.ProvisioningTemplate(id=dupe_json['id']) + dupe_template = module_target_sat.api.ProvisioningTemplate(id=dupe_json['id']) dupe_json = {key: value for key, value in dupe_json.items() if key not in unique_keys} assert template_origin == dupe_json @@ -229,8 +209,7 @@ def test_positive_end_to_end_crud(self, module_org, module_location, module_user @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.run_in_one_thread - def test_positive_build_pxe_default(self, tftpboot, target_sat): + def test_positive_build_pxe_default(self, tftpboot, module_target_sat): """Call the "build_pxe_default" path. :id: ca19d9da-1049-4b39-823b-933fc1a0cebd @@ -238,33 +217,27 @@ def test_positive_build_pxe_default(self, tftpboot, target_sat): :expectedresults: The response is a JSON payload, all templates are deployed to TFTP/HTTP and are rendered correctly - :CaseLevel: Integration - :CaseImportance: Critical :BZ: 1202564 """ - response = client.post( - entities.ProvisioningTemplate().path('build_pxe_default'), - auth=get_credentials(), - verify=False, - ) - response.raise_for_status() - assert type(response.json()) == dict + # Build PXE default template to get default PXE file + module_target_sat.api.ProvisioningTemplate().build_pxe_default() + for template in tftpboot.values(): if template['path'].startswith('http'): r = client.get(template['path'], verify=False) r.raise_for_status() rendered = r.text else: - rendered = target_sat.execute(f'cat {template["path"]}').stdout.splitlines()[0] + rendered = module_target_sat.execute(f'cat {template["path"]}').stdout if template['kind'] == 'iPXE': - assert f'{target_sat.hostname}/unattended/iPXE' in r.text + assert f'{module_target_sat.hostname}/unattended/iPXE' in r.text else: assert ( - rendered == f"{settings.server.scheme}://" - f"{target_sat.hostname} {template['kind']}" + f'{settings.server.scheme}://{module_target_sat.hostname} {template["kind"]}' + in rendered ) @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) @@ -288,6 +261,8 @@ def test_positive_provision_template_check_net_interface( :BZ: 2148433 :customerscenario: true + + :parametrized: yes """ macaddress = gen_mac(multicast=False) capsule = module_target_sat.nailgun_smart_proxy @@ -307,8 +282,8 @@ def test_positive_provision_template_check_net_interface( 'lifecycle_environment_id': module_lce_library.id, }, ).create() - provision_template = host.read_template(data={'template_kind': 'provision'}) - assert 'ifcfg-$sanitized_real' in str(provision_template) + provision_template = host.read_template(data={'template_kind': 'provision'})['template'] + assert 'ifcfg-$sanitized_real' in provision_template @pytest.mark.e2e @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) @@ -334,6 +309,8 @@ def test_positive_template_check_ipxe( :BZ: 2149030 :customerscenario: true + + :parametrized: yes """ macaddress = gen_mac(multicast=False) capsule = module_target_sat.nailgun_smart_proxy @@ -353,11 +330,9 @@ def test_positive_template_check_ipxe( 'lifecycle_environment_id': module_lce_library.id, }, ).create() - ipxe_template = host.read_template(data={'template_kind': 'iPXE'}) - if module_sync_kickstart_content.rhel_ver <= 8: - assert str(ipxe_template).count('ks=') == 1 - else: - assert str(ipxe_template).count('inst.ks=') == 1 + ipxe_template = host.read_template(data={'template_kind': 'iPXE'})['template'] + ks_param = 'ks=' if module_sync_kickstart_content.rhel_ver <= 8 else 'inst.ks=' + assert ipxe_template.count(ks_param) == 1 @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) def test_positive_template_check_vlan_parameter( @@ -378,9 +353,11 @@ def test_positive_template_check_vlan_parameter( :expectedresults: The rendered templates should contain the "vlan" parameter expected for respective rhel hosts. - :BZ: 1607706 + :BZ: 1607706, 2075358 :customerscenario: true + + :parametrized: yes """ macaddress = gen_mac(multicast=False) capsule = module_target_sat.nailgun_smart_proxy @@ -419,7 +396,266 @@ def test_positive_template_check_vlan_parameter( }, ], ).create() - provision_template = host.read_template(data={'template_kind': 'provision'}) - assert f'interfacename=vlan{tag}' in str(provision_template) - ipxe_template = host.read_template(data={'template_kind': 'iPXE'}) - assert f'vlan=vlan{tag}:{identifier}' in str(ipxe_template) + provision_template = host.read_template(data={'template_kind': 'provision'})['template'] + assert f'interfacename=vlan{tag}' in provision_template + ipxe_template = host.read_template(data={'template_kind': 'iPXE'})['template'] + assert f'vlan={identifier}.{tag}:{identifier}' in ipxe_template + + @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) + @pytest.mark.parametrize('pxe_loader', ['uefi'], indirect=True) + @pytest.mark.parametrize('boot_mode', ['Static', 'DHCP']) + def test_positive_template_subnet_with_boot_mode( + self, + module_sync_kickstart_content, + module_target_sat, + module_sca_manifest_org, + module_location, + default_architecture, + default_partitiontable, + pxe_loader, + boot_mode, + ): + """Check whether boot mode paremeter in subnet respected when PXELoader UEFI is used, + and properly rendered in the provisioning templates + + :id: 2decc787-59b0-41e6-96be-5dd9371c8966 + + :expectedresults: templates should get render and contains boot mode used as set in subnet + + :BZ: 2168967, 1955861, 1784012 + + :customerscenario: true + + :parametrized: yes + """ + subnet = module_target_sat.api.Subnet( + name=gen_string('alpha'), + organization=[module_sca_manifest_org], + location=[module_location], + network='192.168.0.1', + mask='255.255.255.240', + boot_mode=boot_mode, + ).create() + host = module_target_sat.api.Host( + name=gen_string('alpha'), + organization=module_sca_manifest_org, + location=module_location, + subnet=subnet, + pxe_loader=pxe_loader.pxe_loader, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + architecture=default_architecture, + operatingsystem=module_sync_kickstart_content.os, + ).create() + # Verify provision templates for boot_mode in subnet, and check provision logs exists + rendered = host.read_template(data={'template_kind': 'provision'})['template'] + assert f'--bootproto {boot_mode.lower()}' in rendered + assert '/root/install.post.log' in rendered + assert '/mnt/sysimage/root/install.post.log' not in rendered + + # Verify PXE templates for boot_mode in subnet + pxe_templates = ['PXEGrub', 'PXEGrub2', 'PXELinux', 'iPXE'] + provision_url = f'http://{module_target_sat.hostname}/unattended/provision' + ks_param = provision_url + '?static=1' if boot_mode == 'Static' else provision_url + for template in pxe_templates: + rendered = host.read_template(data={'template_kind': f'{template}'})['template'] + assert f'ks={ks_param}' in rendered + + def test_positive_template_use_graphical_installer( + self, module_target_sat, module_sca_manifest_org, module_location, default_os + ): + """Check whether use_graphical_installer paremeter is properly rendered + in the provisioning templates + + :id: 2decc787-59b0-41e6-96be-5dd9371c8967 + + :expectedresults: Rendered template should contain value set as per use_graphical_installer + host parameter for respective rhel hosts. + + :BZ: 2106753 + + :customerscenario: true + """ + host = module_target_sat.api.Host( + name=gen_string('alpha'), + organization=module_sca_manifest_org, + location=module_location, + operatingsystem=default_os, + ).create() + # Host will default boot into text mode with kickstart's skipx command + render = host.read_template(data={'template_kind': 'provision'})['template'] + assert 'skipx' in render + assert 'text' in render + # Using use_graphical_installer host param to override and use graphical mode to boot + host.host_parameters_attributes = [ + {'name': 'use_graphical_installer', 'value': 'true', 'parameter_type': 'boolean'} + ] + host.update(['host_parameters_attributes']) + render = host.read_template(data={'template_kind': 'provision'})['template'] + assert 'graphical' in render + assert 'skipx' not in render + + @pytest.mark.parametrize('module_sync_kickstart_content', [8], indirect=True) + def test_positive_template_check_aap_snippet( + self, + module_sync_kickstart_content, + module_target_sat, + module_sca_manifest_org, + module_location, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, + ): + """Read the kickstart default template and verify ansible_provisioning_callback + snippet is rendered correctly + + :id: 065ef48f-bec5-4535-8be7-d8527fa21564 + + :expectedresults: Rendered template should contain values set for AAP snippet + host parameter for respective rhel hosts. + + :BZ: 2024175 + + :customerscenario: true + """ + aap_fqdn = 'env-aap.example.com' + template_id = gen_integer(1, 10) + extra_vars_dict = '{"package_install": "zsh"}' + config_key = gen_string('alpha') + host_params = [ + {'name': 'ansible_tower_provisioning', 'value': 'true', 'parameter_type': 'boolean'}, + {'name': 'ansible_tower_fqdn', 'value': aap_fqdn, 'parameter_type': 'string'}, + {'name': 'ansible_host_config_key', 'value': config_key, 'parameter_type': 'string'}, + {'name': 'ansible_job_template_id', 'value': template_id, 'parameter_type': 'integer'}, + {'name': 'ansible_extra_vars', 'value': extra_vars_dict, 'parameter_type': 'string'}, + ] + host = module_target_sat.api.Host( + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + operatingsystem=module_sync_kickstart_content.os, + architecture=default_architecture, + domain=module_sync_kickstart_content.domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + content_facet_attributes={ + 'content_source_id': module_target_sat.nailgun_smart_proxy.id, + 'content_view_id': module_default_org_view.id, + 'lifecycle_environment_id': module_lce_library.id, + }, + host_parameters_attributes=host_params, + ).create() + render = host.read_template(data={'template_kind': 'provision'})['template'] + assert f'https://{aap_fqdn}/api/v2/job_templates/{template_id}/callback/' in render + assert 'systemctl enable ansible-callback' in render + assert f'"host_config_key":"{config_key}"' in render + assert '{"package_install": "zsh"}' in render + + @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) + def test_positive_template_check_fips_enabled( + self, + module_sync_kickstart_content, + module_target_sat, + module_sca_manifest_org, + module_location, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, + ): + """Read provision/PXE templates, verify fips packages to install and kernel cmdline option + fips=1, set by kickstart_kernel_options snippet while using host param fips_enabled + is rendered correctly + + :id: 065ef48f-bec5-4535-8be7-d8527fa21565 + + :expectedresults: Rendered template should contain correct FIPS packages and boot parameter + set by snippet while using host param fips_enabled for rhel host + + :parametrized: yes + """ + host_params = [{'name': 'fips_enabled', 'value': 'true', 'parameter_type': 'boolean'}] + host = module_target_sat.api.Host( + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + operatingsystem=module_sync_kickstart_content.os, + architecture=default_architecture, + domain=module_sync_kickstart_content.domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + content_facet_attributes={ + 'content_source_id': module_target_sat.nailgun_smart_proxy.id, + 'content_view_id': module_default_org_view.id, + 'lifecycle_environment_id': module_lce_library.id, + }, + host_parameters_attributes=host_params, + ).create() + render = host.read_template(data={'template_kind': 'provision'})['template'] + assert 'dracut-fips' in render + assert '-prelink' in render + for kind in ['PXELinux', 'PXEGrub', 'PXEGrub2', 'iPXE', 'kexec']: + render = host.read_template(data={'template_kind': kind})['template'] + assert 'fips=1' in render + + @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) + def test_positive_template_check_rex_pull_mode_snippet( + self, + module_sync_kickstart_content, + module_target_sat, + module_provisioning_capsule, + module_sca_manifest_org, + module_location, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, + ): + """Read the provision template and verify the host params and REX pull mode snippet rendered correctly. + + :id: e5212c46-d269-4bce-8e03-9d00c086e69m + + :steps: + 1. Create a host by setting host param enable-remote-execution-pull/host_registration_remote_execution_pull + 2. Read the template to verify the host param and REX pull mode snippet for respective rhel hosts + + :expectedresults: The rendered template has the host params set and correct home directory permissions for the rex user + + :parametrized: yes + """ + host = module_target_sat.api.Host( + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=gen_mac(multicast=False), + operatingsystem=module_sync_kickstart_content.os, + architecture=default_architecture, + domain=module_sync_kickstart_content.domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + host_parameters_attributes=[ + { + 'name': 'host_registration_remote_execution_pull', + 'value': True, + 'parameter_type': 'boolean', + }, + { + 'name': 'enable-remote-execution-pull', + 'value': True, + 'parameter_type': 'boolean', + }, + ], + ).create() + rex_snippet = host.read_template(data={'template_kind': 'provision'})['template'] + assert 'chmod +x /root/remote_execution_pull_setup.sh' in rex_snippet + + rex_snippet = host.read_template(data={'template_kind': 'host_init_config'})['template'] + assert 'Starting deployment of REX pull provider' in rex_snippet + pkg_manager = 'yum' if module_sync_kickstart_content.rhel_ver < 8 else 'dnf' + assert f'{pkg_manager} -y install foreman_ygg_worker' in rex_snippet + assert 'broker = ["mqtts://$SERVER_NAME:1883"]' in rex_snippet + assert 'systemctl try-restart yggdrasild' in rex_snippet + assert 'systemctl enable --now yggdrasild' in rex_snippet + assert 'yggdrasil status' in rex_snippet + assert 'Remote execution pull provider successfully configured!' in rex_snippet diff --git a/tests/foreman/api/test_registration.py b/tests/foreman/api/test_registration.py index fca8c7ff8ca..ae50d84364c 100644 --- a/tests/foreman/api/test_registration.py +++ b/tests/foreman/api/test_registration.py @@ -2,8 +2,6 @@ :Requirement: Registration -:CaseLevel: Acceptance - :CaseComponent: Registration :CaseAutomation: Automated @@ -12,16 +10,18 @@ :Team: Rocket -:TestType: Functional - -:Upstream: No """ import uuid +from fauxfactory import gen_ipaddr, gen_mac, gen_string import pytest +from requests import HTTPError -from robottelo.constants import CLIENT_PORT -from robottelo.constants import ENVIRONMENT +from robottelo import constants +from robottelo.config import ( + settings, + user_nailgun_config, +) pytestmark = pytest.mark.tier1 @@ -29,9 +29,9 @@ @pytest.mark.e2e @pytest.mark.no_containers def test_host_registration_end_to_end( - module_org, + module_entitlement_manifest_org, module_location, - module_ak_with_synced_repo, + module_activation_key, module_target_sat, module_capsule_configured, rhel_contenthost, @@ -49,9 +49,10 @@ def test_host_registration_end_to_end( :customerscenario: true """ + org = module_entitlement_manifest_org command = module_target_sat.api.RegistrationCommand( - organization=module_org, - activation_keys=[module_ak_with_synced_repo.name], + organization=org, + activation_keys=[module_activation_key.name], location=module_location, ).create() @@ -61,17 +62,17 @@ def test_host_registration_end_to_end( # Verify server.hostname and server.port from subscription-manager config assert module_target_sat.hostname == rhel_contenthost.subscription_config['server']['hostname'] - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] + assert rhel_contenthost.subscription_config['server']['port'] == constants.CLIENT_PORT # Update module_capsule_configured to include module_org/module_location nc = module_capsule_configured.nailgun_smart_proxy - module_target_sat.api.SmartProxy(id=nc.id, organization=[module_org]).update(['organization']) + module_target_sat.api.SmartProxy(id=nc.id, organization=[org]).update(['organization']) module_target_sat.api.SmartProxy(id=nc.id, location=[module_location]).update(['location']) command = module_target_sat.api.RegistrationCommand( smart_proxy=nc, - organization=module_org, - activation_keys=[module_ak_with_synced_repo.name], + organization=org, + activation_keys=[module_activation_key.name], location=module_location, force=True, ).create() @@ -85,37 +86,281 @@ def test_host_registration_end_to_end( module_capsule_configured.hostname == rhel_contenthost.subscription_config['server']['hostname'] ) - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] - - @pytest.mark.tier3 - def test_positive_allow_reregistration_when_dmi_uuid_changed( - self, module_org, rhel_contenthost, target_sat - ): - """Register a content host with a custom DMI UUID, unregistering it, change - the DMI UUID, and re-registering it again - - :id: 7f431cb2-5a63-41f7-a27f-62b86328b50d - - :expectedresults: The content host registers successfully - - :customerscenario: true - - :BZ: 1747177 - - :CaseLevel: Integration - """ - uuid_1 = str(uuid.uuid1()) - uuid_2 = str(uuid.uuid4()) - rhel_contenthost.install_katello_ca(target_sat) - target_sat.execute( - f'echo \'{{"dmi.system.uuid": "{uuid_1}"}}\' > /etc/rhsm/facts/uuid.facts' - ) - result = rhel_contenthost.register_contenthost(module_org.label, lce=ENVIRONMENT) - assert result.status == 0 - result = rhel_contenthost.execute('subscription-manager clean') - assert result.status == 0 - target_sat.execute( - f'echo \'{{"dmi.system.uuid": "{uuid_2}"}}\' > /etc/rhsm/facts/uuid.facts' - ) - result = rhel_contenthost.register_contenthost(module_org.label, lce=ENVIRONMENT) - assert result.status == 0 + assert rhel_contenthost.subscription_config['server']['port'] == constants.CLIENT_PORT + + +@pytest.mark.tier3 +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_allow_reregistration_when_dmi_uuid_changed( + module_entitlement_manifest_org, + rhel_contenthost, + target_sat, + module_activation_key, + module_location, +): + """Register a content host with a custom DMI UUID, unregistering it, change + the DMI UUID, and re-registering it again + + :id: 7f431cb2-5a63-41f7-a27f-62b86328b50d + + :expectedresults: The content host registers successfully + + :customerscenario: true + + :BZ: 1747177,2229112 + """ + uuid_1 = str(uuid.uuid1()) + uuid_2 = str(uuid.uuid4()) + org = module_entitlement_manifest_org + target_sat.execute(f'echo \'{{"dmi.system.uuid": "{uuid_1}"}}\' > /etc/rhsm/facts/uuid.facts') + command = target_sat.api.RegistrationCommand( + organization=org, + activation_keys=[module_activation_key.name], + location=module_location, + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0 + result = rhel_contenthost.execute('subscription-manager clean') + assert result.status == 0 + target_sat.execute(f'echo \'{{"dmi.system.uuid": "{uuid_2}"}}\' > /etc/rhsm/facts/uuid.facts') + command = target_sat.api.RegistrationCommand( + organization=org, + activation_keys=[module_activation_key.name], + location=module_location, + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0 + + +def test_positive_update_packages_registration( + module_target_sat, + module_entitlement_manifest_org, + module_location, + rhel8_contenthost, + module_activation_key, +): + """Test package update on host post registration + + :id: 3d0a3252-ab81-4acf-bca6-253b746f26bb + + :expectedresults: Package update is successful on host post registration. + """ + org = module_entitlement_manifest_org + org = module_entitlement_manifest_org + command = module_target_sat.api.RegistrationCommand( + organization=org, + location=module_location, + activation_keys=[module_activation_key.name], + update_packages=True, + ).create() + result = rhel8_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + package = constants.FAKE_7_CUSTOM_PACKAGE + repo_url = settings.repos.yum_3['url'] + rhel8_contenthost.create_custom_repos(fake_yum=repo_url) + result = rhel8_contenthost.execute(f"yum install -y {package}") + assert result.status == 0 + + +@pytest.mark.no_containers +def test_positive_rex_interface_for_global_registration( + module_target_sat, + module_entitlement_manifest_org, + module_location, + rhel8_contenthost, + module_activation_key, +): + """Test remote execution interface is set for global registration + + :id: 982de593-dd1a-4c6c-81fe-728f40a7ad4d + + :steps: + 1. Register host with global registration template to Satellite specifying remote execution interface parameter. + + :expectedresults: remote execution interface passed in the registration command is properly set for the host. + + :BZ: 1841048 + + :customerscenario: true + """ + mac_address = gen_mac(multicast=False) + ip = gen_ipaddr() + # Create eth1 interface on the host + add_interface_command = f'ip link add eth1 type dummy;ifconfig eth1 hw ether {mac_address};ip addr add {ip}/24 brd + dev eth1 label eth1:1;ip link set dev eth1 up' + result = rhel8_contenthost.execute(add_interface_command) + assert result.status == 0 + org = module_entitlement_manifest_org + command = module_target_sat.api.RegistrationCommand( + organization=org, + location=module_location, + activation_keys=[module_activation_key.name], + update_packages=True, + remote_execution_interface='eth1', + ).create() + result = rhel8_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + host = module_target_sat.api.Host().search( + query={'search': f'name={rhel8_contenthost.hostname}'} + )[0] + # Check if eth1 interface is set for remote execution + for interface in host.read_json()['interfaces']: + if 'eth1' in str(interface): + assert interface['execution'] is True + assert interface['ip'] == ip + assert interface['mac'] == mac_address + + +@pytest.mark.tier1 +def test_negative_global_registration_without_ak(module_target_sat): + """Attempt to register a host without ActivationKey + + :id: e48a6260-97e0-4234-a69c-77bbbcde85de + + :expectedresults: Generate command is disabled without ActivationKey + """ + with pytest.raises(HTTPError) as context: + module_target_sat.api.RegistrationCommand().create() + assert 'Missing activation key!' in context.value.response.text + + +def test_negative_capsule_without_registration_enabled( + module_target_sat, + module_capsule_configured, + module_ak_with_cv, + module_entitlement_manifest_org, + module_location, +): + """Verify registration with Capsule, when registration isn't configured in installer + + :id: a2f23e42-648d-4428-a961-6e0b933c6dff + + :steps: + 1. Get a configured capsule + 2. The registration is set to False on capsule by default + 3. Try to register host with that capsule + + :expectedresults: Registration fails with HTTP error code 422 and an error message. + """ + org = module_entitlement_manifest_org + + nc = module_capsule_configured.nailgun_smart_proxy + module_target_sat.api.SmartProxy(id=nc.id, organization=[org]).update(['organization']) + module_target_sat.api.SmartProxy(id=nc.id, location=[module_location]).update(['location']) + + res = module_capsule_configured.install( + cmd_args={}, + cmd_kwargs={'foreman-proxy-registration': 'false', 'foreman-proxy-templates': 'true'}, + ) + assert res.status == 0 + error_message = '422 Client Error' + with pytest.raises(HTTPError, match=f'{error_message}') as context: + module_target_sat.api.RegistrationCommand( + smart_proxy=nc, + organization=org, + location=module_location, + activation_keys=[module_ak_with_cv.name], + insecure=True, + ).create() + assert ( + "Proxy lacks one of the following features: 'Registration', 'Templates'" + in context.value.response.text + ) + + +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_host_registration_with_non_admin_user_with_setup_false( + module_org, + module_location, + module_activation_key, + module_target_sat, + rhel_contenthost, +): + """Verify host registration with non admin user with setup false + + :id: 02bdda6a-010d-4098-a7e0-e4b5e8416ce3 + + :steps: + 1. Sync the content repositories, add and publish it in CV + 2. Create a non-admin user and assign "Register Hosts" role to it. + 3. Create an activation key and assign CV and LCE to it. + 4. Create new user and generate curl command to register host + + :expectedresults: Host registered successfully with all setup options set to 'NO' with non-admin user + + :BZ: 2211484 + + :customerscenario: true + """ + register_host_role = module_target_sat.api.Role().search( + query={'search': 'name="Register hosts"'} + ) + login = gen_string('alphanumeric') + password = gen_string('alphanumeric') + module_target_sat.api.User( + role=register_host_role, + admin=False, + login=login, + password=password, + organization=[module_org], + location=[module_location], + ).create() + user_cfg = user_nailgun_config(login, password) + command = module_target_sat.api.RegistrationCommand( + server_config=user_cfg, + organization=module_org, + activation_keys=[module_activation_key.name], + location=module_location, + setup_insights=False, + setup_remote_execution=False, + setup_remote_execution_pull=False, + update_packages=False, + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # verify package install for insights-client didn't run when Setup Insights is false + assert 'dnf -y install insights-client' not in result.stdout + # verify package install for foreman_ygg_worker didn't run when REX pull mode is false + assert 'dnf -y install foreman_ygg_worker' not in result.stdout + # verify packages aren't getting updated when Update packages is false + assert '# Updating packages' not in result.stdout + # verify foreman-proxy ssh pubkey isn't present when Setup REX is false + assert rhel_contenthost.execute('cat ~/.ssh/authorized_keys | grep foreman-proxy').status == 1 + + +@pytest.mark.rhel_ver_match('[^6]') +def test_negative_verify_bash_exit_status_failing_host_registration( + module_sca_manifest_org, + module_location, + module_target_sat, + rhel_contenthost, +): + """Verify status code, when curl command registration fail intentionally + + :id: 4789e8da-6391-4ea4-aa0d-73c93220ce44 + + :steps: + 1. Generate a curl command and make the registration fail intentionally. + 2. Check the exit code for the command. + + :expectedresults: Exit code returns 1 if registration fails. + + :BZ: 2155444 + + :customerscenario: true + + :parametrized: yes + """ + ak = module_target_sat.api.ActivationKey(name=gen_string('alpha')).create() + # Try registration command generated with AK not in same as selected organization + command = module_target_sat.api.RegistrationCommand( + organization=module_sca_manifest_org, + activation_keys=[ak.name], + location=module_location, + ).create() + result = rhel_contenthost.execute(command) + + # verify status code when registrationCommand fails to register on host + assert result.status == 1 + assert 'Couldn\'t find activation key' in result.stderr diff --git a/tests/foreman/api/test_remoteexecution.py b/tests/foreman/api/test_remoteexecution.py index 05535ae591a..f0c887a80f8 100644 --- a/tests/foreman/api/test_remoteexecution.py +++ b/tests/foreman/api/test_remoteexecution.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -77,7 +72,13 @@ def test_positive_run_capsule_upgrade_playbook(module_capsule_configured, target @pytest.mark.tier3 @pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6].*') +@pytest.mark.rhel_ver_list('8') +@pytest.mark.parametrize( + 'setting_update', + ['remote_execution_global_proxy=False'], + ids=["no_global_proxy"], + indirect=True, +) def test_negative_time_to_pickup( module_org, module_target_sat, @@ -85,6 +86,7 @@ def test_negative_time_to_pickup( module_ak_with_cv, module_capsule_configured_mqtt, rhel_contenthost, + setting_update, ): """Time to pickup setting is honored for host registered to mqtt @@ -95,7 +97,7 @@ def test_negative_time_to_pickup( :CaseImportance: High - :bz: 2158738, 2118651 + :bz: 2158738 :parametrized: yes """ @@ -132,35 +134,10 @@ def test_negative_time_to_pickup( # check mqtt client is running result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' - # check that longrunning command is not affected by time_to_pickup BZ#2158738 - job = module_target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'organization': module_org.name, - 'location': smart_proxy_location.name, - 'inputs': { - 'command': 'echo start; sleep 10; echo done', - }, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - 'time_to_pickup': '5', - }, - ) - module_target_sat.wait_for_tasks(f'resource_type = JobInvocation and resource_id = {job["id"]}') - result = module_target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 # stop yggdrasil client on host result = rhel_contenthost.execute('systemctl stop yggdrasild') assert result.status == 0, f'Failed to stop yggdrasil on client: {result.stderr}' - # Make sure the job is executed by the registered-trough capsule - global_ttp = module_target_sat.api.Setting().search( - query={'search': 'name="remote_execution_global_proxy"'} - )[0] - global_ttp.value = False - global_ttp.update(['value']) - # run script provider rex command with time_to_pickup job = module_target_sat.api.JobInvocation().run( synchronous=False, @@ -190,6 +167,7 @@ def test_negative_time_to_pickup( global_ttp = module_target_sat.api.Setting().search( query={'search': 'name="remote_execution_time_to_pickup"'} )[0] + default_global_ttp = global_ttp.value global_ttp.value = '10' global_ttp.update(['value']) job = module_target_sat.api.JobInvocation().run( @@ -214,3 +192,60 @@ def test_negative_time_to_pickup( query={'search': f'resource_type = JobInvocation and resource_id = {job["id"]}'} ) assert 'The job was not picked up in time' in result[0].humanized['output'] + global_ttp.value = default_global_ttp + global_ttp.update(['value']) + # start yggdrasil client on host + result = rhel_contenthost.execute('systemctl start yggdrasild') + assert result.status == 0, f'Failed to start on client: {result.stderr}' + result = rhel_contenthost.execute('systemctl status yggdrasild') + assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' + rhel_contenthost.execute('yggdrasil status') + + +@pytest.mark.tier3 +@pytest.mark.no_containers +@pytest.mark.rhel_ver_list('8') +def test_positive_check_longrunning_job( + module_org, + module_target_sat, + smart_proxy_location, + module_ak_with_cv, + module_capsule_configured_mqtt, + rhel_contenthost, +): + """Time to pickup setting doesn't disrupt longrunning jobs + + :id: e6eeb948-faa5-4b76-9a86-5e934f7bee77 + + :expectedresults: Time to pickup doesn't aborts the long running job if + it already started + + :CaseImportance: Medium + + :bz: 2118651 + + :parametrized: yes + """ + template_id = ( + module_target_sat.api.JobTemplate() + .search(query={'search': 'name="Run Command - Script Default"'})[0] + .id + ) + # check that longrunning command is not affected by time_to_pickup BZ#2158738 + job = module_target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'organization': module_org.name, + 'location': smart_proxy_location.name, + 'inputs': { + 'command': 'echo start; sleep 25; echo done', + }, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + 'time_to_pickup': '20', + }, + ) + module_target_sat.wait_for_tasks(f'resource_type = JobInvocation and resource_id = {job["id"]}') + result = module_target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index 21f08882740..497c27feefe 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -4,32 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Reporting :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker from fauxfactory import gen_string -from nailgun import entities +import pytest from requests import HTTPError from wait_for import wait_for -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.config import settings +from robottelo.constants import ( + DEFAULT_SUBSCRIPTION_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, + FAKE_2_CUSTOM_PACKAGE, + PRDS, + REPOS, + REPOSET, +) from robottelo.hosts import ContentHost -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import parametrized, valid_data_list from robottelo.utils.issue_handlers import is_open @@ -44,24 +43,24 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() - custom_repo = entities.Repository( - product=entities.Product(organization=org).create(), + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=org).create(), ).create() custom_repo.sync() - lce = entities.LifecycleEnvironment(organization=org).create() - cv = entities.ContentView( + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() + cv = module_target_sat.api.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=cv, max_hosts=100, organization=org, environment=lce, auto_attach=True ).create() - subscription = entities.Subscription(organization=org).search( + subscription = module_target_sat.api.Subscription(organization=org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) @@ -73,7 +72,7 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_CRUDL(name): +def test_positive_CRUDL(name, target_sat): """Create, Read, Update, Delete, List :id: a2a577db-144e-4761-a42e-e83885464786 @@ -96,27 +95,27 @@ def test_positive_CRUDL(name): """ # Create template1 = gen_string('alpha') - rt = entities.ReportTemplate(name=name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=name, template=template1).create() # List - res = entities.ReportTemplate().search(query={'search': f'name="{name}"'}) + res = target_sat.api.ReportTemplate().search(query={'search': f'name="{name}"'}) assert name in list(map(lambda x: x.name, res)) # Read - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert name == rt.name assert template1 == rt.template # Update template2 = gen_string('alpha') - entities.ReportTemplate(id=rt.id, template=template2).update(['template']) - rt = entities.ReportTemplate(id=rt.id).read() + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(['template']) + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert template2 == rt.template # Delete - entities.ReportTemplate(id=rt.id).delete() + target_sat.api.ReportTemplate(id=rt.id).delete() with pytest.raises(HTTPError): - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() @pytest.mark.tier1 -def test_positive_generate_report_nofilter(): +def test_positive_generate_report_nofilter(target_sat): """Generate Host - Statuses report :id: a4b687db-144e-4761-a42e-e93887464986 @@ -132,8 +131,10 @@ def test_positive_generate_report_nofilter(): :CaseImportance: Critical """ host_name = gen_string('alpha').lower() - entities.Host(name=host_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate() for column_name in [ 'Name', @@ -159,7 +160,7 @@ def test_positive_generate_report_nofilter(): @pytest.mark.tier2 -def test_positive_generate_report_filter(): +def test_positive_generate_report_filter(target_sat): """Generate Host - Statuses report :id: a4b677cb-144e-4761-a42e-e93887464986 @@ -176,9 +177,11 @@ def test_positive_generate_report_filter(): """ host1_name = gen_string('alpha').lower() host2_name = gen_string('alpha').lower() - entities.Host(name=host1_name).create() - entities.Host(name=host2_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host1_name).create() + target_sat.api.Host(name=host2_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate(data={"input_values": {"hosts": host2_name}}) for column_name in [ 'Name', @@ -205,7 +208,7 @@ def test_positive_generate_report_filter(): @pytest.mark.tier2 -def test_positive_report_add_userinput(): +def test_positive_report_add_userinput(target_sat): """Add user input to template, use it in template, generate template :id: a4a577db-144e-4761-a42e-e86887464986 @@ -225,21 +228,21 @@ def test_positive_report_add_userinput(): input_value = gen_string('alpha').lower() template_name = gen_string('alpha').lower() template = f'<%= "value=\\"" %><%= input(\'{input_name}\') %><%= "\\"" %>' - entities.Host(name=host_name).create() - rt = entities.ReportTemplate(name=template_name, template=template).create() - entities.TemplateInput( + target_sat.api.Host(name=host_name).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template).create() + target_sat.api.TemplateInput( name=input_name, input_type="user", template=rt.id, ).create() - ti = entities.TemplateInput(template=rt.id).search()[0].read() + ti = target_sat.api.TemplateInput(template=rt.id).search()[0].read() assert input_name == ti.name res = rt.generate(data={"input_values": {input_name: input_value}}) assert f'value="{input_value}"' in res @pytest.mark.tier2 -def test_positive_lock_clone_nodelete_unlock_report(): +def test_positive_lock_clone_nodelete_unlock_report(target_sat): """Lock report template. Check it can be cloned and can't be deleted or edited. Unlock. Check it can be deleted and edited. @@ -269,15 +272,15 @@ def test_positive_lock_clone_nodelete_unlock_report(): template_clone_name = gen_string('alpha').lower() template1 = gen_string('alpha') template2 = gen_string('alpha') - rt = entities.ReportTemplate(name=template_name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template1).create() # 2. Lock template - entities.ReportTemplate(id=rt.id, locked=True).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=True).update(["locked"]) rt = rt.read() assert rt.locked is True # 3. Clone template, check cloned data rt.clone(data={'name': template_clone_name}) cloned_rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': f'name="{template_clone_name}"'})[0] .read() ) @@ -289,24 +292,28 @@ def test_positive_lock_clone_nodelete_unlock_report(): rt.delete() # In BZ1680458, exception is thrown but template is deleted anyway assert ( - len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) != 0 + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + != 0 ) # 5. Try to edit template with pytest.raises(HTTPError): - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template1 == rt.template # 6. Unlock template - entities.ReportTemplate(id=rt.id, locked=False).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=False).update(["locked"]) rt = rt.read() assert rt.locked is False # 7. Edit template - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template2 == rt.template # 8. Delete template rt.delete() - assert len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) == 0 + assert ( + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + == 0 + ) @pytest.mark.tier2 @@ -350,8 +357,7 @@ def test_positive_generate_report_sanitized(): @pytest.mark.tier2 -@pytest.mark.stubbed -def test_negative_create_report_without_name(): +def test_negative_create_report_without_name(module_target_sat): """Try to create a report template with empty name :id: a4b577db-144e-4771-a42e-e93887464986 @@ -366,25 +372,85 @@ def test_negative_create_report_without_name(): :CaseImportance: Medium """ + with pytest.raises(HTTPError) as report_response: + module_target_sat.api.ReportTemplate(name=' ', template=gen_string('alpha')).create() + assert "Name can't be blank" in report_response.value.response.text @pytest.mark.tier2 -@pytest.mark.stubbed -def test_positive_applied_errata(): +@pytest.mark.rhel_ver_match(r'^(?!6$)\d+$') +@pytest.mark.no_containers +def test_positive_applied_errata( + function_org, function_location, function_lce, rhel_contenthost, target_sat +): """Generate an Applied Errata report :id: a4b577db-141e-4871-a42e-e93887464986 - :setup: User with reporting access rights, some host with applied errata + :setup: A Host with some applied errata. :steps: - 1. POST /api/report_templates/:id/generate + 1. Generate an Applied Errata report :expectedresults: A report is generated with all applied errata listed :CaseImportance: Medium """ + activation_key = target_sat.api.ActivationKey( + environment=function_lce, organization=function_org + ).create() + cv = target_sat.api.ContentView(organization=function_org).create() + ERRATUM_ID = str(settings.repos.yum_6.errata[2]) + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': settings.repos.yum_9.url, + 'organization-id': function_org.id, + 'content-view-id': cv.id, + 'lifecycle-environment-id': function_lce.id, + 'activationkey-id': activation_key.id, + } + ) + result = rhel_contenthost.register( + function_org, function_location, activation_key.name, target_sat + ) + assert f'The registered system name is: {rhel_contenthost.hostname}' in result.stdout + assert rhel_contenthost.subscribed + rhel_contenthost.execute(r'subscription-manager repos --enable \*') + assert rhel_contenthost.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 + assert rhel_contenthost.execute(f'rpm -q {FAKE_1_CUSTOM_PACKAGE}').status == 0 + task_id = target_sat.api.JobInvocation().run( + data={ + 'feature': 'katello_errata_install', + 'inputs': {'errata': ERRATUM_ID}, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + 'organization_id': function_org.id, + }, + )['id'] + target_sat.wait_for_tasks( + search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), + search_rate=15, + max_tries=10, + ) + rt = ( + target_sat.api.ReportTemplate() + .search(query={'search': 'name="Host - Applied Errata"'})[0] + .read() + ) + res = rt.generate( + data={ + 'organization_id': function_org.id, + 'report_format': 'json', + 'input_values': { + 'Filter Errata Type': 'all', + 'Include Last Reboot': 'no', + 'Status': 'all', + }, + } + ) + assert res[0]['erratum_id'] == ERRATUM_ID + assert res[0]['issued'] @pytest.mark.tier2 @@ -531,7 +597,7 @@ def test_positive_generate_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) @@ -570,7 +636,7 @@ def test_positive_schedule_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) @@ -652,3 +718,118 @@ def test_positive_generate_job_report(setup_content, target_sat, rhel7_contentho ) assert res[0]['Host'] == rhel7_contenthost.hostname assert '/root' in res[0]['stdout'] + + +@pytest.mark.tier2 +@pytest.mark.no_containers +@pytest.mark.rhel_ver_match(r'^(?!6$)\d+$') +def test_positive_installable_errata( + target_sat, function_org, function_lce, function_location, rhel_contenthost +): + """Generate an Installable Errata report using the Report Template - Available Errata, + with the option of 'Installable'. + + :id: 6263a0fa-5021-4553-939b-84fb71c81d59 + + :setup: A Host with some applied errata + + :steps: + 1. Install an outdated package version + 2. Apply some errata which updates the package + 3. Downgrade the package impacted by the erratum + 4. Perform a search for any Available Errata + 5. Generate an Installable Report from the Available Errata + + :expectedresults: A report is generated with the installable errata listed + + :CaseImportance: Medium + + :customerscenario: true + + :BZ: 1726504 + """ + activation_key = target_sat.api.ActivationKey( + environment=function_lce, organization=function_org + ).create() + custom_cv = target_sat.api.ContentView(organization=function_org).create() + ERRATUM_ID = str(settings.repos.yum_6.errata[2]) + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': settings.repos.yum_6.url, + 'organization-id': function_org.id, + 'content-view-id': custom_cv.id, + 'lifecycle-environment-id': function_lce.id, + 'activationkey-id': activation_key.id, + } + ) + result = rhel_contenthost.register( + function_org, function_location, activation_key.name, target_sat + ) + assert f'The registered system name is: {rhel_contenthost.hostname}' in result.stdout + assert rhel_contenthost.subscribed + + # Remove package if already installed on this host + rhel_contenthost.execute(f'yum remove -y {FAKE_1_CUSTOM_PACKAGE_NAME}') + # Install the outdated package version + rhel_contenthost.execute(r'subscription-manager repos --enable \*') + assert rhel_contenthost.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 + assert ( + rhel_contenthost.execute(f'rpm -q {FAKE_1_CUSTOM_PACKAGE_NAME}').stdout.strip() + == FAKE_1_CUSTOM_PACKAGE + ) + + # Install/Apply the errata + task_id = target_sat.api.JobInvocation().run( + data={ + 'feature': 'katello_errata_install', + 'inputs': {'errata': ERRATUM_ID}, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + 'organization_id': function_org.id, + }, + )['id'] + target_sat.wait_for_tasks( + search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), + search_rate=15, + max_tries=10, + ) + # Check that applying erratum updated the package + assert ( + rhel_contenthost.execute(f'rpm -q {FAKE_1_CUSTOM_PACKAGE_NAME}').stdout.strip() + == FAKE_2_CUSTOM_PACKAGE + ) + # Downgrade the package + assert rhel_contenthost.execute(f'yum downgrade -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 + + # Data to generate Installable Errata report + _rt_input_data = { + 'organization_id': function_org.id, + 'report_format': "json", + 'input_values': { + 'Installability': 'installable', + }, + } + + # Gather Errata using the template 'Available Errata', may take some time + # When condition is met, newest Report Template will have Errata entries + wait_for( + lambda: ( + target_sat.api.ReportTemplate() + .search(query={'search': 'name="Host - Available Errata"'})[0] + .read() + .generate(data=_rt_input_data) + != [] + ), + timeout=120, + delay=10, + ) + report = ( + target_sat.api.ReportTemplate() + .search(query={'search': 'name="Host - Available Errata"'})[0] + .read() + .generate(data=_rt_input_data) + ) + assert report + installable_errata = report[0] + assert FAKE_1_CUSTOM_PACKAGE_NAME in installable_errata['Packages'] + assert installable_errata['Erratum'] == ERRATUM_ID diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 844b53928fa..cce7e307cc4 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -4,28 +4,27 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ -import pytest from manifester import Manifester -from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout +import pytest from requests.exceptions import HTTPError from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings -from robottelo.constants import MIRRORING_POLICIES +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + MIRRORING_POLICIES, + REPOS, +) +from robottelo.constants.repos import FAKE_ZST_REPO +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import parametrized @@ -36,7 +35,7 @@ def test_negative_disable_repository_with_cv(module_entitlement_manifest_org, ta :id: e521a7a4-2502-4fe2-b297-a13fc99e679b - :Steps: + :steps: 1. Enable and sync a RH Repo 2. Create a Content View with Repository attached 3. Publish Content View @@ -65,10 +64,10 @@ def test_negative_disable_repository_with_cv(module_entitlement_manifest_org, ta with pytest.raises(HTTPError) as error: reposet.disable(data=data) # assert error.value.response.status_code == 500 - assert ( - 'Repository cannot be deleted since it has already been ' - 'included in a published Content View' in error.value.response.text - ) + assert ( + 'Repository cannot be deleted since it has already been ' + 'included in a published Content View' in error.value.response.text + ) @pytest.mark.tier1 @@ -78,7 +77,7 @@ def test_positive_update_repository_metadata(module_org, target_sat): :id: 6fe7bb3f-1640-4904-a223-b4764534afe8 - :Steps: + :steps: 1. Create a Product and Yum Repository 2. Sync the Repository and returns its content_counts for rpm 3. Update the url to a different Repo and re-sync the Repository @@ -128,7 +127,7 @@ def test_positive_epel_repositories_with_mirroring_policy( :id: 5c4e0ba4-4486-4eaf-b6ad-62831b7353a4 - :Steps: + :steps: 1. Create a Epel repository with mirroring_policy set 2. Sync the Repository and return its content_counts for rpm 3. Assert content was synced and mirroring policy type is correct @@ -144,7 +143,7 @@ def test_positive_epel_repositories_with_mirroring_policy( @pytest.mark.tier4 -def test_positive_sync_kickstart_repo(self, module_entitlement_manifest_org, target_sat): +def test_positive_sync_kickstart_repo(module_entitlement_manifest_org, target_sat): """No encoding gzip errors on kickstart repositories sync. @@ -152,8 +151,6 @@ def test_positive_sync_kickstart_repo(self, module_entitlement_manifest_org, tar :expectedresults: No encoding gzip errors present in /var/log/messages. - :CaseLevel: Integration - :customerscenario: true :steps: @@ -175,11 +172,11 @@ def test_positive_sync_kickstart_repo(self, module_entitlement_manifest_org, tar basearch='x86_64', org_id=module_entitlement_manifest_org.id, product=constants.REPOS['kickstart'][distro]['product'], - reposet=constants.REPOSET['kickstart'][distro], + reposet=constants.REPOS['kickstart'][distro]['reposet'], repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update(['download_policy']) @@ -194,13 +191,46 @@ def test_positive_sync_kickstart_repo(self, module_entitlement_manifest_org, tar assert rh_repo.content_counts['rpm'] > 0 +def test_positive_sync_upstream_repo_with_zst_compression( + module_org, module_product, module_target_sat +): + """Sync upstream repo having zst compression and verify it succeeds. + + :id: 1eddff2a-b6b5-420b-a0e8-ba6a05c11ca4 + + :expectedresults: Repo sync is successful and no zst type compression errors are present in /var/log/messages. + + :steps: + + 1. Sync upstream repository having zst type compression. + 2. Assert that no errors related to compression type are present in + /var/log/messages. + 3. Assert that sync was executed properly. + + :BZ: 2241934 + + :customerscenario: true + """ + repo = module_target_sat.api.Repository( + product=module_product, content_type='yum', url=FAKE_ZST_REPO + ).create() + assert repo.read().content_counts['rpm'] == 0 + sync = module_product.sync() + assert sync['result'] == 'success' + assert repo.read().content_counts['rpm'] > 0 + result = module_target_sat.execute( + 'grep pulp /var/log/messages | grep "Cannot detect compression type"' + ) + assert result.status == 1 + + @pytest.mark.tier1 def test_negative_upload_expired_manifest(module_org, target_sat): """Upload an expired manifest and attempt to refresh it :id: d6e652d8-5f46-4d15-9191-d842466d45d0 - :Steps: + :steps: 1. Upload a manifest 2. Delete the Subscription Allocation on RHSM 3. Attempt to refresh the manifest @@ -217,3 +247,118 @@ def test_negative_upload_expired_manifest(module_org, target_sat): "The manifest doesn't exist on console.redhat.com. " "Please create and import a new manifest." in error.value.stderr ) + + +@pytest.mark.tier1 +def test_positive_multiple_orgs_with_same_repo(target_sat): + """Test that multiple organizations with the same repository synced have matching metadata + + :id: 39cff8ea-969d-4b8f-9fb4-33b1ba768ff2 + + :steps: + 1. Create multiple organizations + 2. Sync the same repository to each organization + 3. Assert that each repository from each organization contain the same content counts + + :expectedresults: Each repository in each organziation should have the same content counts + """ + repos = [] + orgs = [target_sat.api.Organization().create() for _ in range(3)] + for org in orgs: + product = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository( + content_type='yum', product=product, url=settings.repos.module_stream_1.url + ).create() + repo.sync() + repo_counts = target_sat.api.Repository(id=repo.id).read().content_counts + repos.append(repo_counts) + assert repos[0] == repos[1] == repos[2] + + +def test_positive_sync_mulitple_large_repos(module_target_sat, module_entitlement_manifest_org): + """Enable and bulk sync multiple large repositories + + :id: b51c4a3d-d532-4342-be61-e868f7c3a723 + + :steps: + 1. Enabled multiple large Repositories + Red Hat Enterprise Linux 8 for x86_64 - AppStream RPMs 8 + Red Hat Enterprise Linux 8 for x86_64 - BaseOS RPMs 8 + Red Hat Enterprise Linux 8 for x86_64 - AppStream Kickstart 8 + Red Hat Enterprise Linux 8 for x86_64 - BaseOS Kickstart 8 + 2. Sync all four repositories at the same time + 3. Assert that the bulk sync succeeds + + :expectedresults: All repositories should sync with no errors + + :BZ: 2224031 + """ + repo_names = ['rhel8_bos', 'rhel8_aps'] + kickstart_names = ['rhel8_bos', 'rhel8_aps'] + for name in repo_names: + module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=REPOS[name]['product'], + repo=REPOS[name]['name'], + reposet=REPOS[name]['reposet'], + releasever=REPOS[name]['releasever'], + ) + + for name in kickstart_names: + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=constants.REPOS['kickstart'][name]['product'], + repo=constants.REPOS['kickstart'][name]['name'], + reposet=constants.REPOS['kickstart'][name]['reposet'], + releasever=constants.REPOS['kickstart'][name]['version'], + ) + rh_repos = module_target_sat.api.Repository(id=rh_repo_id).read() + rh_product = module_target_sat.api.Product(id=rh_repos.product.id).read() + assert len(rh_product.repository) == 4 + res = module_target_sat.api.ProductBulkAction().sync( + data={'ids': [rh_product.id]}, timeout=2000 + ) + assert res['result'] == 'success' + + +def test_positive_available_repositories_endpoint(module_sca_manifest_org, target_sat): + """Attempt to hit the /available_repositories endpoint with no failures + + :id: f4c9d4a0-9a82-4f06-b772-b1f7e3f45e7d + + :steps: + 1. Enable a Red Hat Repository + 2. Attempt to hit the enpoint: + GET /katello/api/repository_sets/:id/available_repositories + 3. Verify Actions::Katello::RepositorySet::ScanCdn task is run + 4. Verify there are no failures when scanning for repository + + + :expectedresults: Actions::Katello::RepositorySet::ScanCdn task should succeed and + not fail when scanning for repositories + + :customerscenario: true + + :BZ: 2030445 + """ + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=constants.REPOS['rhel7_extra']['product'], + repo=constants.REPOS['rhel7_extra']['name'], + reposet=constants.REPOS['rhel7_extra']['reposet'], + releasever=None, + ) + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() + product = target_sat.api.Product(id=rh_repo.product.id).read() + reposet = target_sat.api.RepositorySet( + name=constants.REPOSET['rhel7_extra'], product=product + ).search()[0] + touch_endpoint = target_sat.api.RepositorySet.available_repositories(reposet) + assert touch_endpoint['total'] != 0 + results = target_sat.execute('tail -15 /var/log/foreman/production.log').stdout + assert 'Actions::Katello::RepositorySet::ScanCdn' in results + assert 'result: success' in results + assert 'Failed at scanning for repository' not in results diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index 25753b46a8a..e3b93fc7089 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -4,38 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import re +from string import punctuation import tempfile import time -from string import punctuation -from urllib.parse import urljoin -from urllib.parse import urlparse -from urllib.parse import urlunparse +from urllib.parse import urljoin, urlparse, urlunparse -import pytest from fauxfactory import gen_string from nailgun import client -from nailgun import entities -from nailgun.entity_mixins import call_entity_method_with_timeout -from nailgun.entity_mixins import TaskFailedError +from nailgun.entity_mixins import TaskFailedError, call_entity_method_with_timeout +import pytest from requests.exceptions import HTTPError from robottelo import constants from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import repos as repo_constants +from robottelo.constants import DataFile, repos as repo_constants from robottelo.content_info import get_repo_files_by_url from robottelo.logging import logger from robottelo.utils import datafactory @@ -52,24 +42,18 @@ def repo_options(request, module_org, module_product): @pytest.fixture -def repo_options_custom_product(request, module_org): +def repo_options_custom_product(request, module_org, module_target_sat): """Return the options that were passed as indirect parameters.""" options = getattr(request, 'param', {}).copy() options['organization'] = module_org - options['product'] = entities.Product(organization=module_org).create() + options['product'] = module_target_sat.api.Product(organization=module_org).create() return options @pytest.fixture -def env(module_org): - """Create a new puppet environment.""" - return entities.Environment(organization=[module_org]).create() - - -@pytest.fixture -def repo(repo_options): +def repo(repo_options, module_target_sat): """Create a new repository.""" - return entities.Repository(**repo_options).create() + return module_target_sat.api.Repository(**repo_options).create() class TestRepository: @@ -202,7 +186,7 @@ def test_positive_create_with_download_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_positive_create_with_default_download_policy(self, repo): + def test_positive_create_with_default_download_policy(self, repo, target_sat): """Verify if the default download policy is assigned when creating a YUM repo without `download_policy` field @@ -215,7 +199,7 @@ def test_positive_create_with_default_download_policy(self, repo): :CaseImportance: Critical """ - default_dl_policy = entities.Setting().search( + default_dl_policy = target_sat.api.Setting().search( query={'search': 'name=default_download_policy'} ) assert default_dl_policy @@ -315,25 +299,24 @@ def test_positive_create_unprotected(self, repo_options, repo): assert repo.unprotected == repo_options['unprotected'] @pytest.mark.tier2 - def test_positive_create_with_gpg(self, module_org, module_product): + def test_positive_create_with_gpg(self, module_org, module_product, module_target_sat): """Create a repository and provide a GPG key ID. :id: 023cf84b-74f3-4e63-a9d7-10afee6c1990 :expectedresults: A repository is created with the given GPG key ID. - :CaseLevel: Integration """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key).create() # Verify that the given GPG key ID is used. assert repo.gpg_key.id == gpg_key.id @pytest.mark.tier2 - def test_positive_create_same_name_different_orgs(self, repo): + def test_positive_create_same_name_different_orgs(self, repo, target_sat): """Create two repos with the same name in two different organizations. :id: bd1bd7e3-e393-44c8-a6d0-42edade40f60 @@ -341,11 +324,10 @@ def test_positive_create_same_name_different_orgs(self, repo): :expectedresults: The two repositories are successfully created and have given name. - :CaseLevel: Integration """ - org_2 = entities.Organization().create() - product_2 = entities.Product(organization=org_2).create() - repo_2 = entities.Repository(product=product_2, name=repo.name).create() + org_2 = target_sat.api.Organization().create() + product_2 = target_sat.api.Product(organization=org_2).create() + repo_2 = target_sat.api.Repository(product=product_2, name=repo.name).create() assert repo_2.name == repo.name @pytest.mark.tier1 @@ -354,7 +336,7 @@ def test_positive_create_same_name_different_orgs(self, repo): **datafactory.parametrized([{'name': name} for name in datafactory.invalid_values_list()]), indirect=True, ) - def test_negative_create_name(self, repo_options): + def test_negative_create_name(self, repo_options, target_sat): """Attempt to create repository with invalid names only. :id: 24947c92-3415-43df-add6-d6eb38afd8a3 @@ -366,7 +348,7 @@ def test_negative_create_name(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -376,7 +358,7 @@ def test_negative_create_name(self, repo_options): ), indirect=True, ) - def test_negative_create_with_same_name(self, repo_options, repo): + def test_negative_create_with_same_name(self, repo_options, repo, target_sat): """Attempt to create a repository providing a name of already existent entity @@ -389,10 +371,10 @@ def test_negative_create_with_same_name(self, repo_options, repo): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 - def test_negative_create_label(self, module_product): + def test_negative_create_label(self, module_product, module_target_sat): """Attempt to create repository with invalid label. :id: f646ae84-2660-41bd-9883-331285fa1c9a @@ -402,7 +384,9 @@ def test_negative_create_label(self, module_product): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(product=module_product, label=gen_string('utf8')).create() + module_target_sat.api.Repository( + product=module_product, label=gen_string('utf8') + ).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -410,7 +394,7 @@ def test_negative_create_label(self, module_product): **datafactory.parametrized([{'url': url} for url in datafactory.invalid_names_list()]), indirect=True, ) - def test_negative_create_url(self, repo_options): + def test_negative_create_url(self, repo_options, target_sat): """Attempt to create repository with invalid url. :id: 0bb9fc3f-d442-4437-b5d8-83024bc7ceab @@ -422,7 +406,7 @@ def test_negative_create_url(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.skipif( @@ -433,7 +417,7 @@ def test_negative_create_url(self, repo_options): **datafactory.parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}.com'}]), indirect=True, ) - def test_negative_create_with_url_with_special_characters(self, repo_options): + def test_negative_create_with_url_with_special_characters(self, repo_options, target_sat): """Verify that repository URL cannot contain unquoted special characters :id: 2ffaa412-e5e5-4bec-afaa-9ea54315df49 @@ -445,7 +429,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -455,7 +439,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): ), indirect=True, ) - def test_negative_create_with_invalid_download_policy(self, repo_options): + def test_negative_create_with_invalid_download_policy(self, repo_options, target_sat): """Verify that YUM repository cannot be created with invalid download policy @@ -469,13 +453,13 @@ def test_negative_create_with_invalid_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_negative_update_to_invalid_download_policy(self, repo): + def test_negative_update_to_invalid_download_policy(self, repo, target_sat): """Verify that YUM repository cannot be updated to invalid download policy @@ -495,16 +479,15 @@ def test_negative_update_to_invalid_download_policy(self, repo): @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', - **datafactory.parametrized( - [ - {'content_type': content_type, 'download_policy': 'on_demand'} - for content_type in constants.REPO_TYPE.keys() - if content_type != 'yum' - ] - ), + [ + {'content_type': content_type, 'download_policy': 'on_demand'} + for content_type in constants.REPO_TYPE + if content_type != 'yum' + ], indirect=True, + ids=lambda x: x['content_type'], ) - def test_negative_create_non_yum_with_download_policy(self, repo_options): + def test_negative_create_non_yum_with_download_policy(self, repo_options, target_sat): """Verify that non-YUM repositories cannot be created with download policy @@ -518,7 +501,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -528,7 +511,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): ), indirect=True, ) - def test_negative_create_checksum(self, repo_options): + def test_negative_create_checksum(self, repo_options, target_sat): """Attempt to create repository with invalid checksum type. :id: c49a3c49-110d-4b74-ae14-5c9494a4541c @@ -540,7 +523,7 @@ def test_negative_create_checksum(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -552,7 +535,7 @@ def test_negative_create_checksum(self, repo_options): ids=['sha1', 'sha256'], indirect=True, ) - def test_negative_create_checksum_with_on_demand_policy(self, repo_options): + def test_negative_create_checksum_with_on_demand_policy(self, repo_options, target_sat): """Attempt to create repository with checksum and on_demand policy. :id: de8b157c-ed62-454b-94eb-22659ce1158e @@ -564,7 +547,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -674,24 +657,23 @@ def test_positive_update_unprotected(self, repo): assert repo.unprotected is True @pytest.mark.tier2 - def test_positive_update_gpg(self, module_org, module_product): + def test_positive_update_gpg(self, module_org, module_product, module_target_sat): """Create a repository and update its GPGKey :id: 0e9319dc-c922-4ecf-9f83-d221cfdf54c2 :expectedresults: The updated repository points to a new GPG key. - :CaseLevel: Integration """ # Create a repo and make it point to a GPG key. - gpg_key_1 = entities.GPGKey( + gpg_key_1 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key_1).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key_1).create() # Update the repo and make it point to a new GPG key. - gpg_key_2 = entities.GPGKey( + gpg_key_2 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -700,24 +682,9 @@ def test_positive_update_gpg(self, module_org, module_product): repo = repo.update(['gpg_key']) assert repo.gpg_key.id == gpg_key_2.id - @pytest.mark.tier2 - def test_positive_update_contents(self, repo): - """Create a repository and upload RPM contents. - - :id: 8faa64f9-b620-4c0a-8c80-801e8e6436f1 - - :expectedresults: The repository's contents include one RPM. - - :CaseLevel: Integration - """ - # Upload RPM content. - repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) - # Verify the repository's contents. - assert repo.read().content_counts['rpm'] == 1 - @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_upload_delete_srpm(self, repo): + def test_positive_upload_delete_srpm(self, repo, target_sat): """Create a repository and upload, delete SRPM contents. :id: e091a725-048f-44ca-90cc-c016c450ced9 @@ -731,12 +698,12 @@ def test_positive_upload_delete_srpm(self, repo): :BZ: 1378442 """ # upload srpm - entities.ContentUpload(repository=repo).upload( + target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) assert repo.read().content_counts['srpm'] == 1 - srpm_detail = entities.Srpms().search(query={'repository_id': repo.id}) + srpm_detail = target_sat.api.Srpms().search(query={'repository_id': repo.id}) assert len(srpm_detail) == 1 # Delete srpm @@ -755,7 +722,7 @@ def test_positive_upload_delete_srpm(self, repo): ids=['yum_fake'], indirect=True, ) - def test_positive_create_delete_srpm_repo(self, repo): + def test_positive_create_delete_srpm_repo(self, repo, target_sat): """Create a repository, sync SRPM contents and remove repo :id: e091a725-042f-43ca-99cc-c017c450ced9 @@ -768,7 +735,7 @@ def test_positive_create_delete_srpm_repo(self, repo): """ repo.sync() assert repo.read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'repository_id': repo.id})) == 3 + assert len(target_sat.api.Srpms().search(query={'repository_id': repo.id})) == 3 repo.delete() with pytest.raises(HTTPError): repo.read() @@ -783,7 +750,7 @@ def test_positive_create_delete_srpm_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_remove_contents(self, repo): + def test_positive_remove_contents(self, repo, target_sat): """Synchronize a repository and remove rpm content. :id: f686b74b-7ee9-4806-b999-bc05ffe61a9d @@ -800,7 +767,7 @@ def test_positive_remove_contents(self, repo): repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 @@ -870,7 +837,6 @@ def test_positive_synchronize(self, repo): :expectedresults: The repo has at least one RPM. - :CaseLevel: Integration """ repo.sync() assert repo.read().content_counts['rpm'] >= 1 @@ -904,7 +870,6 @@ def test_positive_synchronize_auth_yum_repo(self, repo): :expectedresults: Repository is created and synced - :CaseLevel: Integration """ # Verify that repo is not yet synced assert repo.content_counts['rpm'] == 0 @@ -943,7 +908,6 @@ def test_negative_synchronize_auth_yum_repo(self, repo): :expectedresults: Repository is created but synchronization fails - :CaseLevel: Integration """ with pytest.raises(TaskFailedError): repo.sync() @@ -958,7 +922,7 @@ def test_negative_synchronize_auth_yum_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_resynchronize_rpm_repo(self, repo): + def test_positive_resynchronize_rpm_repo(self, repo, target_sat): """Check that repository content is resynced after packages were removed from repository @@ -970,13 +934,12 @@ def test_positive_resynchronize_rpm_repo(self, repo): :BZ: 1459845, 1318004 - :CaseLevel: Integration """ # Synchronize it repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 # Re-synchronize repository @@ -1026,7 +989,6 @@ def test_positive_delete_rpm(self, repo): :expectedresults: The repository deleted successfully. - :CaseLevel: Integration """ repo.sync() # Check that there is at least one package @@ -1061,8 +1023,6 @@ def test_positive_access_protected_repository(self, module_org, repo, target_sat :BZ: 1242310 - :CaseLevel: Integration - :CaseImportance: High """ repo.sync() @@ -1103,8 +1063,6 @@ def test_positive_access_unprotected_repository(self, module_org, repo, target_s :expectedresults: The repository data file is successfully accessed. - :CaseLevel: Integration - :CaseImportance: Medium """ repo.sync() @@ -1178,7 +1136,7 @@ def test_positive_recreate_pulp_repositories(self, module_entitlement_manifest_o reposet=constants.REPOSET['rhst7'], releasever=None, ) - call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) + call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) with target_sat.session.shell() as sh: sh.send('foreman-rake console') time.sleep(30) # sleep to allow time for console to open @@ -1188,11 +1146,12 @@ def test_positive_recreate_pulp_repositories(self, module_entitlement_manifest_o identifier = results.stdout.split('version_href\n"', 1)[1].split('version')[0] target_sat.execute( f'curl -X DELETE {target_sat.url}/{identifier}' - f' --cert /etc/pki/katello/certs/pulp-client.crt' - f' --key /etc/pki/katello/private/pulp-client.key' + f' --cert /etc/foreman/client_cert.pem' + f' --key /etc/foreman/client_key.pem' ) command_output = target_sat.execute('foreman-rake katello:correct_repositories COMMIT=true') - assert 'Recreating' in command_output.stdout and 'TaskError' not in command_output.stdout + assert 'Recreating' in command_output.stdout + assert 'TaskError' not in command_output.stdout @pytest.mark.tier2 def test_positive_mirroring_policy(self, target_sat): @@ -1210,14 +1169,13 @@ def test_positive_mirroring_policy(self, target_sat): :expectedresults: 1. The resync restores the original content properly. - :CaseLevel: System """ repo_url = settings.repos.yum_0.url packages_count = constants.FAKE_0_YUM_REPO_PACKAGES_COUNT - org = entities.Organization().create() - prod = entities.Product(organization=org).create() - repo = entities.Repository( + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository( download_policy='immediate', mirroring_policy='mirror_complete', product=prod, @@ -1228,7 +1186,7 @@ def test_positive_mirroring_policy(self, target_sat): assert repo.content_counts['rpm'] == packages_count # remove all packages from the repo and upload another one - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -1249,13 +1207,100 @@ def test_positive_mirroring_policy(self, target_sat): assert len(files) == packages_count assert constants.RPM_TO_UPLOAD not in files + @pytest.mark.tier3 + @pytest.mark.parametrize('policy', ['additive', 'mirror_content_only']) + def test_positive_sync_with_treeinfo_ignore( + self, target_sat, function_entitlement_manifest_org, policy + ): + """Verify that the treeinfo file is not synced when added to ignorable content + and synced otherwise. Check for applicable mirroring policies. + + :id: d7becf1d-3883-468d-88c4-d513a2e2e90a + + :parametrized: yes + + :steps: + 1. Enable RHEL8 BaseOS KS repo. + 2. Add `treeinfo` to ignorable content and sync, check it's missing. + 3. Remove the `treeinfo` from ignorable content, resync, check again. + + :expectedresults: + 1. The sync should succeed. + 2. The treeinfo file should be missing when in ignorable content and present otherwise. + + :customerscenario: true + + :BZ: 2174912, 2135215 + + """ + distro = 'rhel8_bos' + repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch='x86_64', + org_id=function_entitlement_manifest_org.id, + product=constants.REPOS['kickstart'][distro]['product'], + reposet=constants.REPOS['kickstart'][distro]['reposet'], + repo=constants.REPOS['kickstart'][distro]['name'], + releasever=constants.REPOS['kickstart'][distro]['version'], + ) + repo = target_sat.api.Repository(id=repo_id).read() + + repo.mirroring_policy = policy + repo.ignorable_content = ['treeinfo'] + repo = repo.update(['mirroring_policy', 'ignorable_content']) + repo.sync() + with pytest.raises(AssertionError): + target_sat.md5_by_url(f'{repo.full_path}.treeinfo') + + repo.ignorable_content = [] + repo = repo.update(['ignorable_content']) + repo.sync() + assert target_sat.md5_by_url( + f'{repo.full_path}.treeinfo' + ), 'The treeinfo file is missing in the KS repo but it should be there.' + + @pytest.mark.tier3 + @pytest.mark.parametrize('client_repo', ['rhsclient7', 'rhsclient8', 'rhsclient9']) + def test_positive_katello_agent_availability( + self, target_sat, function_sca_manifest_org, client_repo + ): + """Verify katello-agent package remains available in the RH Satellite Client repos + for older Satellite versions (Sat 6.14 is the last one supporting it). + + :id: cd5b4e5b-e4f1-4d00-b171-30cd5b1e7ce8 + + :parametrized: yes + + :steps: + 1. Enable RH Satellite Client repo and sync it. + 2. Read the repo packages. + + :expectedresults: + 1. Katello-agent remains available in the Client repos. + + """ + # Enable RH Satellite Client repo and sync it. + repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch='x86_64', + org_id=function_sca_manifest_org.id, + product=constants.REPOS[client_repo]['product'], + reposet=constants.REPOS[client_repo]['reposet'], + repo=constants.REPOS[client_repo]['name'], + releasever=constants.REPOS[client_repo]['version'], + ) + repo = target_sat.api.Repository(id=repo_id).read() + repo.sync() + # Make sure katello-agent lives in the repo happily. + repo_pkgs = target_sat.api.Repository(id=repo_id).packages()['results'] + agent_pkgs = [pkg['filename'] for pkg in repo_pkgs if 'katello-agent' in pkg['name']] + assert len(agent_pkgs), 'Katello-agent package is missing from the RH Client repo!' + @pytest.mark.run_in_one_thread class TestRepositorySync: """Tests for ``/katello/api/repositories/:id/sync``.""" @pytest.mark.tier2 - def test_positive_sync_repos_with_lots_files(self): + def test_positive_sync_repos_with_lots_files(self, target_sat): """Attempt to synchronize repository containing a lot of files inside rpms. @@ -1265,17 +1310,16 @@ def test_positive_sync_repos_with_lots_files(self): :BZ: 1404345 - :CaseLevel: Integration - :expectedresults: repository was successfully synchronized """ - org = entities.Organization().create() - product = entities.Product(organization=org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_8.url).create() + org = target_sat.api.Organization().create() + product = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository(product=product, url=settings.repos.yum_8.url).create() response = repo.sync() assert response, f"Repository {repo} failed to sync." @pytest.mark.tier2 + @pytest.mark.build_sanity def test_positive_sync_rh(self, module_entitlement_manifest_org, target_sat): """Sync RedHat Repository. @@ -1283,7 +1327,6 @@ def test_positive_sync_rh(self, module_entitlement_manifest_org, target_sat): :expectedresults: Synced repo should fetch the data successfully. - :CaseLevel: Integration """ repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', @@ -1293,7 +1336,7 @@ def test_positive_sync_rh(self, module_entitlement_manifest_org, target_sat): reposet=constants.REPOSET['rhst7'], releasever=None, ) - entities.Repository(id=repo_id).sync() + target_sat.api.Repository(id=repo_id).sync() @pytest.mark.tier2 @pytest.mark.skipif( @@ -1315,8 +1358,6 @@ def test_positive_sync_yum_with_string_based_version(self, repo): :expectedresults: Synced repo should fetch the data successfully and parse versions as string. - :CaseLevel: Integration - :customerscenario: true :BZ: 1741011 @@ -1337,7 +1378,6 @@ def test_positive_sync_rh_app_stream(self): :expectedresults: Synced repo should fetch the data successfully and it should contain the module streams. - :CaseLevel: Integration """ pass @@ -1369,19 +1409,19 @@ def test_positive_bulk_cancel_sync(self, target_sat, module_entitlement_manifest releasever=repo['releasever'], ) repo_ids.append(repo_id) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update() sync_ids = [] for repo_id in repo_ids: - sync_task = entities.Repository(id=repo_id).sync(synchronous=False) + sync_task = target_sat.api.Repository(id=repo_id).sync(synchronous=False) sync_ids.append(sync_task['id']) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) # Give some time for sync cancels to calm down time.sleep(30) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) for sync_id in sync_ids: - sync_result = entities.ForemanTask(id=sync_id).poll(canceled=True) + sync_result = target_sat.api.ForemanTask(id=sync_id).poll(must_succeed=False) assert ( 'Task canceled' in sync_result['humanized']['errors'] or 'No content added' in sync_result['humanized']['output'] @@ -1404,7 +1444,7 @@ def test_positive_bulk_cancel_sync(self, target_sat, module_entitlement_manifest indirect=True, ) def test_positive_sync_sha_repo(self, repo, target_sat): - """Sync a 'sha' repo successfully + """Sync repository with 'sha' checksum, which uses 'sha1' in particular actually :id: b842a21d-639a-48aa-baf3-9244d8bc1415 @@ -1414,7 +1454,7 @@ def test_positive_sync_sha_repo(self, repo, target_sat): :BZ: 2024889 - :SubComponent: Candlepin + :SubComponent: Pulp """ sync_result = repo.sync() assert sync_result['result'] == 'success' @@ -1493,15 +1533,61 @@ def test_positive_sync_kickstart_check_os( repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.sync() major, minor = constants.REPOS['kickstart'][distro]['version'].split('.') - os = entities.OperatingSystem().search( + os = target_sat.api.OperatingSystem().search( query={'search': f'name="RedHat" AND major="{major}" AND minor="{minor}"'} ) assert len(os) + @pytest.mark.tier2 + @pytest.mark.parametrize( + 'repo_options', + **datafactory.parametrized( + {'yum': {'content_type': 'yum', 'unprotected': True, 'url': 'http://example.com'}} + ), + indirect=True, + ) + def test_missing_content_id(self, repo, function_entitlement_manifest_org, target_sat): + """Handle several cases of missing content ID correctly + + :id: f507790a-933b-4b3f-ac93-cade6967fbd2 + + :parametrized: yes + + :setup: + 1. Create product and repo, sync repo + + :steps: + 1. Try to update repo URL + 2. Attempt to delete repo + 3. Refresh manifest file + + :expectedresults: Repo URL can be updated, repo can be deleted and manifest refresh works after repo delete + + :BZ:2032040 + """ + # Wait for async metadata generate task to finish + time.sleep(5) + # Get rid of the URL + repo.url = '' + repo = repo.update(['url']) + assert repo.url is None + # Now change the URL back + repo.url = 'http://example.com' + repo = repo.update(['url']) + assert repo.url == 'http://example.com' + # Now delete the Repo + repo.delete() + with pytest.raises(HTTPError): + repo.read() + output = target_sat.cli.Subscription.refresh_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) + assert 'Candlepin job status: SUCCESS' in output, 'Failed to refresh manifest' + class TestDockerRepository: """Tests specific to using ``Docker`` repositories.""" @@ -1536,37 +1622,6 @@ def test_positive_create(self, repo_options, repo): for k in 'name', 'docker_upstream_name', 'content_type': assert getattr(repo, k) == repo_options[k] - @pytest.mark.tier1 - @pytest.mark.parametrize( - 'repo_options', - **datafactory.parametrized( - { - constants.CONTAINER_UPSTREAM_NAME: { - 'content_type': 'docker', - 'docker_upstream_name': constants.CONTAINER_UPSTREAM_NAME, - 'name': gen_string('alphanumeric', 10), - 'url': constants.CONTAINER_REGISTRY_HUB, - } - } - ), - indirect=True, - ) - def test_positive_synchronize(self, repo): - """Create and sync a Docker-type repository - - :id: 27653663-e5a7-4700-a3c1-f6eab6468adf - - :parametrized: yes - - :expectedresults: A repository is created with a Docker repository and - it is synchronized. - - :CaseImportance: Critical - """ - # TODO: add timeout support to sync(). This repo needs more than the default 300 seconds. - repo.sync() - assert repo.read().content_counts['docker_manifest'] >= 1 - @pytest.mark.tier3 @pytest.mark.parametrize( 'repo_options', @@ -1584,7 +1639,7 @@ def test_positive_synchronize(self, repo): ), indirect=True, ) - def test_positive_cancel_docker_repo_sync(self, repo): + def test_positive_cancel_docker_repo_sync(self, repo, target_sat): """Cancel a large, syncing Docker-type repository :id: 86534979-be49-40ad-8290-05ac71c801b2 @@ -1607,8 +1662,8 @@ def test_positive_cancel_docker_repo_sync(self, repo): sync_task = repo.sync(synchronous=False) # Need to wait for sync to actually start up time.sleep(2) - entities.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) - sync_task = entities.ForemanTask(id=sync_task['id']).poll(canceled=True) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) + sync_task = target_sat.api.ForemanTask(id=sync_task['id']).poll(must_succeed=False) assert 'Task canceled' in sync_task['humanized']['errors'] assert 'No content added' in sync_task['humanized']['output'] @@ -1627,7 +1682,9 @@ def test_positive_cancel_docker_repo_sync(self, repo): ), indirect=True, ) - def test_positive_delete_product_with_synced_repo(self, repo_options_custom_product): + def test_positive_delete_product_with_synced_repo( + self, repo_options_custom_product, target_sat + ): """Create and sync a Docker-type repository, delete the product. :id: c3d33836-54df-484d-97e1-f9fc9e22d23c @@ -1642,7 +1699,7 @@ def test_positive_delete_product_with_synced_repo(self, repo_options_custom_prod :BZ: 1867287 """ - repo = entities.Repository(**repo_options_custom_product).create() + repo = target_sat.api.Repository(**repo_options_custom_product).create() repo.sync(timeout=600) assert repo.read().content_counts['docker_manifest'] >= 1 assert repo.product.delete() @@ -1697,57 +1754,17 @@ def test_positive_synchronize_private_registry(self, repo): :parametrized: yes - :expectedresults: A repository is created with a private Docker \ + :expectedresults: A repository is created with a private Docker repository and it is synchronized. :customerscenario: true :BZ: 1475121 - :CaseLevel: Integration """ repo.sync() assert repo.read().content_counts['docker_manifest'] >= 1 - @pytest.mark.tier2 - @pytest.mark.parametrize( - 'repo_options', - **datafactory.parametrized( - { - 'private_registry': { - 'content_type': 'docker', - 'docker_upstream_name': settings.docker.private_registry_name, - 'name': gen_string('alpha'), - 'upstream_username': settings.docker.private_registry_username, - 'upstream_password': 'ThisIsaWrongPassword', - 'url': settings.docker.private_registry_url, - } - } - ), - indirect=True, - ) - def test_negative_synchronize_private_registry_wrong_password(self, repo_options, repo): - """Create and try to sync a Docker-type repository from a private - registry providing wrong credentials the sync must fail with - reasonable error message. - - :id: 2857fce2-fed7-49fc-be20-bf2e4726c9f5 - - :parametrized: yes - - :expectedresults: A repository is created with a private Docker \ - repository and sync fails with reasonable error message. - - :customerscenario: true - - :BZ: 1475121, 1580510 - - :CaseLevel: Integration - """ - msg = "401, message=\'Unauthorized\'" - with pytest.raises(TaskFailedError, match=msg): - repo.sync() - @pytest.mark.tier2 @pytest.mark.parametrize( 'repo_options', @@ -1781,7 +1798,6 @@ def test_negative_synchronize_private_registry_wrong_repo(self, repo_options, re :BZ: 1475121, 1580510 - :CaseLevel: Integration """ msg = "404, message=\'Not Found\'" with pytest.raises(TaskFailedError, match=msg): @@ -1821,14 +1837,13 @@ def test_negative_synchronize_private_registry_no_passwd( :BZ: 1475121, 1580510 - :CaseLevel: Integration """ with pytest.raises( HTTPError, match='422 Client Error: Unprocessable Entity for url: ' f'{target_sat.url}:443/katello/api/v2/repositories', ): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier2 @pytest.mark.upgrade @@ -2089,8 +2104,6 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, # # :expectedresults: Synced repo should fetch the data successfully. # -# :CaseLevel: Integration -# # :customerscenario: true # # :BZ: 1625783 @@ -2105,7 +2118,7 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, # releasever=None, # basearch=None, # ) -# call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) +# call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) class TestSRPMRepository: @@ -2114,7 +2127,9 @@ class TestSRPMRepository: @pytest.mark.skip_if_open("BZ:2016047") @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): + def test_positive_srpm_upload_publish_promote_cv( + self, module_org, module_lce, repo, module_target_sat + ): """Upload SRPM to repository, add repository to content view and publish, promote content view @@ -2122,25 +2137,31 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): :expectedresults: srpms can be listed in organization, content view, Lifecycle env """ - entities.ContentUpload(repository=repo).upload( + module_target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = module_target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 1 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 1 + assert ( + len(module_target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 1 + ) assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) == 1 + len( + module_target_sat.api.Srpms().search( + query={'content_view_version_id': cv.version[0].id} + ) + ) + == 1 ) @pytest.mark.upgrade @pytest.mark.tier2 - @pytest.mark.skip('Uses deprecated SRPM repository') @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @@ -2149,7 +2170,7 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): **datafactory.parametrized({'fake_srpm': {'url': repo_constants.FAKE_YUM_SRPM_REPO}}), indirect=True, ) - def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): + def test_positive_repo_sync_publish_promote_cv(self, module_org, module_lce, repo, target_sat): """Synchronize repository with SRPMs, add repository to content view and publish, promote content view @@ -2161,19 +2182,20 @@ def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): """ repo.sync() - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 3 + assert len(target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 3 assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) >= 3 + len(target_sat.api.Srpms().search(query={'content_view_version_id': cv.version[0].id})) + >= 3 ) - cv.version[0].promote(data={'environment_ids': env.id, 'force': False}) - assert len(entities.Srpms().search(query={'environment_id': env.id})) == 3 + cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) + assert len(target_sat.api.Srpms().search(query={'environment_id': module_lce.id})) >= 3 class TestSRPMRepositoryIgnoreContent: @@ -2182,8 +2204,6 @@ class TestSRPMRepositoryIgnoreContent: In particular sync of duplicate SRPMs would fail when using the flag ``ignorable_content``. - :CaseLevel: Integration - :CaseComponent: Pulp :customerscenario: true @@ -2288,12 +2308,12 @@ class TestFileRepository: **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_upload_file_to_file_repo(self, repo): + def test_positive_upload_file_to_file_repo(self, repo, target_sat): """Check arbitrary file can be uploaded to File Repository :id: fdb46481-f0f4-45aa-b075-2a8f6725e51b - :Steps: + :steps: 1. Create a File Repository 2. Upload an arbitrary file to it @@ -2303,32 +2323,14 @@ def test_positive_upload_file_to_file_repo(self, repo): :CaseAutomation: Automated """ - repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) + with open(DataFile.FAKE_FILE_NEW_NAME, 'rb') as handle: + repo.upload_content(files={'content': handle}) assert repo.read().content_counts['file'] == 1 - filesearch = entities.File().search(query={"search": f"name={constants.RPM_TO_UPLOAD}"}) - assert constants.RPM_TO_UPLOAD == filesearch[0].name - - @pytest.mark.stubbed - @pytest.mark.tier1 - def test_positive_file_permissions(self): - """Check file permissions after file upload to File Repository - - :id: 03b4b7dd-0505-4302-ae00-5de33ad420b0 - - :Setup: - 1. Create a File Repository - 2. Upload an arbitrary file to it - - :Steps: Retrieve file permissions from File Repository - - :expectedresults: uploaded file permissions are kept after upload - - :CaseImportance: Critical - - :CaseAutomation: NotAutomated - """ - pass + filesearch = target_sat.api.File().search( + query={"search": f"name={constants.FAKE_FILE_NEW_NAME}"} + ) + assert filesearch[0].name == constants.FAKE_FILE_NEW_NAME @pytest.mark.tier1 @pytest.mark.upgrade @@ -2337,7 +2339,7 @@ def test_positive_file_permissions(self): **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_remove_file(self, repo): + def test_positive_remove_file(self, repo, target_sat): """Check arbitrary file can be removed from File Repository :id: 65068b4c-9018-4baa-b87b-b6e9d7384a5d @@ -2346,7 +2348,7 @@ def test_positive_remove_file(self, repo): 1. Create a File Repository 2. Upload an arbitrary file to it - :Steps: Remove a file from File Repository + :steps: Remove a file from File Repository :expectedresults: file is not listed under File Repository after removal @@ -2358,87 +2360,11 @@ def test_positive_remove_file(self, repo): repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) assert repo.read().content_counts['file'] == 1 - file_detail = entities.File().search(query={'repository_id': repo.id}) + file_detail = target_sat.api.File().search(query={'repository_id': repo.id}) repo.remove_content(data={'ids': [file_detail[0].id], 'content_type': 'file'}) assert repo.read().content_counts['file'] == 0 - @pytest.mark.stubbed - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_remote_directory_sync(self): - """Check an entire remote directory can be synced to File Repository - through http - - :id: 5c29b758-004a-4c71-a860-7087a0e96747 - - :Setup: - 1. Create a directory to be synced with a pulp manifest on its root - 2. Make the directory available through http - - :Steps: - 1. Create a File Repository with url pointing to http url - created on setup - 2. Initialize synchronization - - - :expectedresults: entire directory is synced over http - - :CaseAutomation: NotAutomated - """ - pass - - @pytest.mark.stubbed - @pytest.mark.tier1 - def test_positive_local_directory_sync(self): - """Check an entire local directory can be synced to File Repository - - :id: 178145e6-62e1-4cb9-b825-44d3ab41e754 - - :Setup: - 1. Create a directory to be synced with a pulp manifest on its root - locally (on the Satellite/Foreman host) - - :Steps: - 1. Create a File Repository with url pointing to local url - created on setup - 2. Initialize synchronization - - - :expectedresults: entire directory is synced - - :CaseImportance: Critical - - :CaseAutomation: NotAutomated - """ - pass - - @pytest.mark.stubbed - @pytest.mark.tier1 - def test_positive_symlinks_sync(self): - """Check synlinks can be synced to File Repository - - :id: 438a8e21-3502-4995-86db-c67ba0f3c469 - - :Setup: - 1. Create a directory to be synced with a pulp manifest on its root - locally (on the Satellite/Foreman host) - 2. Make sure it contains synlinks - - :Steps: - 1. Create a File Repository with url pointing to local url - created on setup - 2. Initialize synchronization - - :expectedresults: entire directory is synced, including files - referred by symlinks - - :CaseImportance: Critical - - :CaseAutomation: NotAutomated - """ - pass - @pytest.mark.skip_if_not_set('container_repo') class TestTokenAuthContainerRepository: @@ -2452,7 +2378,9 @@ class TestTokenAuthContainerRepository: """ @pytest.mark.tier2 - def test_positive_create_with_long_token(self, module_org, module_product, request): + def test_positive_create_with_long_token( + self, module_org, module_product, request, module_target_sat + ): """Create and sync Docker-type repo from the Red Hat Container registry Using token based auth, with very long tokens (>255 characters). @@ -2473,6 +2401,17 @@ def test_positive_create_with_long_token(self, module_org, module_product, reque container_repo = container_repos[0] except IndexError: pytest.skip('No registries with "long_pass" set to true') + + to_clean = [] + + @request.addfinalizer + def clean_repos(): + for repo in to_clean: + try: + repo.delete(synchronous=False) + except Exception: + logger.exception(f'Exception cleaning up docker repo:\n{repo}') + for docker_repo_name in container_repo.repos_to_sync: repo_options = dict( content_type='docker', @@ -2489,19 +2428,13 @@ def test_positive_create_with_long_token(self, module_org, module_product, reque if not len(repo_options['upstream_password']) > 255: pytest.skip('The "long_pass" registry does not meet length requirement') - repo = entities.Repository(**repo_options).create() - - @request.addfinalizer - def clean_repo(): - try: - repo.delete(synchronous=False) - except Exception: - logger.exception('Exception cleaning up docker repo:') + repo = module_target_sat.api.Repository(**repo_options).create() + to_clean.append(repo) repo = repo.read() for field in 'name', 'docker_upstream_name', 'content_type', 'upstream_username': assert getattr(repo, field) == repo_options[field] - repo.sync(timeout=600) + repo.sync(timeout=900) assert repo.read().content_counts['docker_manifest'] > 1 try: @@ -2511,7 +2444,9 @@ def clean_repo(): @pytest.mark.tier2 @pytest.mark.parametrize('repo_key', container_repo_keys) - def test_positive_tag_whitelist(self, request, repo_key, module_org, module_product): + def test_positive_tag_whitelist( + self, request, repo_key, module_org, module_product, module_target_sat + ): """Create and sync Docker-type repos from multiple supported registries with a tag whitelist :id: 4f8ea85b-4c69-4da6-a8ef-bd467ee35147 @@ -2521,6 +2456,17 @@ def test_positive_tag_whitelist(self, request, repo_key, module_org, module_prod :expectedresults: multiple products and repos are created """ container_repo = getattr(settings.container_repo.registries, repo_key) + + to_clean = [] + + @request.addfinalizer + def clean_repos(): + for repo in to_clean: + try: + repo.delete(synchronous=False) + except Exception: + logger.exception(f'Exception cleaning up docker repo:\n{repo}') + for docker_repo_name in container_repo.repos_to_sync: repo_options = dict( content_type='docker', @@ -2534,14 +2480,8 @@ def test_positive_tag_whitelist(self, request, repo_key, module_org, module_prod repo_options['organization'] = module_org repo_options['product'] = module_product - repo = entities.Repository(**repo_options).create() - - @request.addfinalizer - def clean_repo(): - try: - repo.delete(synchronous=False) - except Exception: - logger.exception('Exception cleaning up docker repo:') + repo = module_target_sat.api.Repository(**repo_options).create() + to_clean.append(repo) for field in 'name', 'docker_upstream_name', 'content_type', 'upstream_username': assert getattr(repo, field) == repo_options[field] diff --git a/tests/foreman/api/test_repository_set.py b/tests/foreman/api/test_repository_set.py index 7dd560c9cb9..8f8c56ea04e 100644 --- a/tests/foreman/api/test_repository_set.py +++ b/tests/foreman/api/test_repository_set.py @@ -7,23 +7,16 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities -from robottelo.constants import PRDS -from robottelo.constants import REPOSET +from robottelo.constants import PRDS, REPOSET pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.tier1] @@ -34,17 +27,17 @@ @pytest.fixture -def product(function_entitlement_manifest_org): +def product(function_entitlement_manifest_org, module_target_sat): """Find and return the product matching PRODUCT_NAME.""" - return entities.Product( + return module_target_sat.api.Product( name=PRODUCT_NAME, organization=function_entitlement_manifest_org ).search()[0] @pytest.fixture -def reposet(product): +def reposet(product, module_target_sat): """Find and return the repository set matching REPOSET_NAME and product.""" - return entities.RepositorySet(name=REPOSET_NAME, product=product).search()[0] + return module_target_sat.api.RepositorySet(name=REPOSET_NAME, product=product).search()[0] @pytest.fixture diff --git a/tests/foreman/api/test_rhc.py b/tests/foreman/api/test_rhc.py new file mode 100644 index 00000000000..391277bfb38 --- /dev/null +++ b/tests/foreman/api/test_rhc.py @@ -0,0 +1,99 @@ +"""Test class for RH Cloud connector - rhc + +:Requirement: Remoteexecution + +:CaseAutomation: Automated + +:CaseComponent: RHCloud + +:Team: Phoenix-subscriptions + +:CaseImportance: High + +""" +from fauxfactory import gen_string +import pytest + +from robottelo.utils.issue_handlers import is_open + + +@pytest.fixture +def fixture_enable_rhc_repos(target_sat): + """Enable repos required for configuring RHC.""" + # subscribe rhc satellite to cdn. + target_sat.register_to_cdn() + if target_sat.os_version.major > 7: + target_sat.enable_repo(target_sat.REPOS['rhel_bos']['id']) + target_sat.enable_repo(target_sat.REPOS['rhel_aps']['id']) + else: + target_sat.enable_repo(target_sat.REPOS['rhscl']['id']) + target_sat.enable_repo(target_sat.REPOS['rhel']['id']) + + +@pytest.mark.e2e +@pytest.mark.tier3 +@pytest.mark.destructive +def test_positive_configure_cloud_connector(target_sat, default_org, fixture_enable_rhc_repos): + """ + Enable RH Cloud Connector through API + + :id: 1338dc6a-12e0-4378-9a51-a33f4679ba30 + + :steps: + + 1. Enable RH Cloud Connector + 2. Check if the task is completed successfully + + :expectedresults: The Cloud Connector has been installed and the service is running + + :CaseImportance: Critical + """ + + # Delete old satellite hostname if BZ#2130173 is open + if is_open('BZ:2130173'): + host = target_sat.api.Host().search(query={'search': f"! {target_sat.hostname}"})[0] + host.delete() + + # Copy foreman-proxy user's key to root@localhost user's authorized_keys + target_sat.add_rex_key(satellite=target_sat) + + # Set Host parameter source_display_name to something random. + # To avoid 'name has already been taken' error when run multiple times + # on a machine with the same hostname. + host = target_sat.api.Host().search(query={'search': target_sat.hostname})[0] + parameters = [{'name': 'source_display_name', 'value': gen_string('alpha')}] + host.host_parameters_attributes = parameters + host.update(['host_parameters_attributes']) + + enable_connector = target_sat.api.RHCloud(organization=default_org).enable_connector() + + template_name = 'Configure Cloud Connector' + invocation_id = ( + target_sat.api.JobInvocation() + .search(query={'search': f'description="{template_name}"'})[0] + .id + ) + task_id = enable_connector['task_id'] + target_sat.wait_for_tasks( + search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'), + search_rate=15, + ) + + job_output = target_sat.cli.JobInvocation.get_output( + {'id': invocation_id, 'host': target_sat.hostname} + ) + # get rhc status + rhc_status = target_sat.execute('rhc status') + # get rhcd log + rhcd_log = target_sat.execute('journalctl --unit=rhcd') + + assert target_sat.api.JobInvocation(id=invocation_id).read().status == 0 + assert "Install yggdrasil-worker-forwarder and rhc" in job_output + assert "Restart rhcd" in job_output + assert 'Exit status: 0' in job_output + + assert rhc_status.status == 0 + assert "Connected to Red Hat Subscription Management" in rhc_status.stdout + assert "The Remote Host Configuration daemon is active" in rhc_status.stdout + + assert "error" not in rhcd_log.stdout diff --git a/tests/foreman/api/test_rhcloud_inventory.py b/tests/foreman/api/test_rhcloud_inventory.py index 98e2f3355cf..40b36f4fcab 100644 --- a/tests/foreman/api/test_rhcloud_inventory.py +++ b/tests/foreman/api/test_rhcloud_inventory.py @@ -1,50 +1,21 @@ """API tests for RH Cloud - Inventory, also known as Insights Inventory Upload -:Requirement: RH Cloud - Inventory +:Requirement: RHCloud :CaseAutomation: Automated -:CaseLevel: System +:CaseComponent: RHCloud -:CaseComponent: RHCloud-Inventory - -:Team: Platform - -:TestType: Functional +:Team: Phoenix-subscriptions :CaseImportance: High -:Upstream: No """ -from datetime import datetime - +from fauxfactory import gen_alphanumeric, gen_string import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string -from wait_for import wait_for from robottelo.config import robottelo_tmp_dir -from robottelo.utils.io import get_local_file_data -from robottelo.utils.io import get_report_data -from robottelo.utils.io import get_report_metadata - -generate_report_task = 'ForemanInventoryUpload::Async::UploadReportJob' - - -def generate_inventory_report(satellite, org): - """Function to generate inventory report.""" - timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') - satellite.api.Organization(id=org.id).rh_cloud_generate_report() - wait_for( - lambda: satellite.api.ForemanTask() - .search(query={'search': f'{generate_report_task} and started_at >= "{timestamp}"'})[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) +from robottelo.utils.io import get_local_file_data, get_report_data, get_report_metadata def common_assertion(report_path): @@ -66,7 +37,10 @@ def common_assertion(report_path): @pytest.mark.tier3 @pytest.mark.e2e def test_rhcloud_inventory_api_e2e( - inventory_settings, organization_ak_setup, rhcloud_registered_hosts, rhcloud_sat_host + inventory_settings, + rhcloud_manifest_org, + rhcloud_registered_hosts, + module_target_sat, ): """Generate report using rh_cloud plugin api's and verify its basic properties. @@ -84,35 +58,38 @@ def test_rhcloud_inventory_api_e2e( 8. metadata contains source and foreman_rh_cloud_version keys. 9. Assert Hostnames, IP addresses, infrastructure type, and installed packages are present in report. + 10. Assert that system_purpose_sla field is present in the inventory report. :CaseImportance: Critical - :BZ: 1807829, 1926100, 1965234, 1824183, 1879453 + :BZ: 1807829, 1926100, 1965234, 1824183, 1879453, 1845113 :customerscenario: true """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org virtual_host, baremetal_host = rhcloud_registered_hosts local_report_path = robottelo_tmp_dir.joinpath(f'{gen_alphanumeric()}_{org.id}.tar.xz') # Generate report - generate_inventory_report(rhcloud_sat_host, org) + module_target_sat.generate_inventory_report(org) # Download report - rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_download_report( + module_target_sat.api.Organization(id=org.id).rh_cloud_download_report( destination=local_report_path ) common_assertion(local_report_path) - # Assert Hostnames, IP addresses, and installed packages are present in report. json_data = get_report_data(local_report_path) json_meta_data = get_report_metadata(local_report_path) - prefix = 'tfm-' if rhcloud_sat_host.os_version.major < 8 else '' - package_version = rhcloud_sat_host.run( + # Verify that metadata contains source and foreman_rh_cloud_version keys. + prefix = 'tfm-' if module_target_sat.os_version.major < 8 else '' + package_version = module_target_sat.run( f'rpm -qa --qf "%{{VERSION}}" {prefix}rubygem-foreman_rh_cloud' ).stdout.strip() assert json_meta_data['source_metadata']['foreman_rh_cloud_version'] == str(package_version) assert json_meta_data['source'] == 'Satellite' + # Verify Hostnames are present in report. hostnames = [host['fqdn'] for host in json_data['hosts']] assert virtual_host.hostname in hostnames assert baremetal_host.hostname in hostnames + # Verify IP addresses are present in report. ip_addresses = [ host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0] for host in json_data['hosts'] @@ -122,30 +99,36 @@ def test_rhcloud_inventory_api_e2e( assert baremetal_host.ip_addr in ip_addresses assert virtual_host.ip_addr in ipv4_addresses assert baremetal_host.ip_addr in ipv4_addresses - + # Verify infrastructure type. infrastructure_type = [ host['system_profile']['infrastructure_type'] for host in json_data['hosts'] ] - assert 'physical' and 'virtual' in infrastructure_type - + assert 'physical' in infrastructure_type + assert 'virtual' in infrastructure_type + # Verify installed packages are present in report. all_host_profiles = [host['system_profile'] for host in json_data['hosts']] for host_profiles in all_host_profiles: assert 'installed_packages' in host_profiles assert len(host_profiles['installed_packages']) > 1 + # Verify that system_purpose_sla field is present in the inventory report. + for host in json_data['hosts']: + assert host['facts'][0]['facts']['system_purpose_role'] == 'test-role' + assert host['facts'][0]['facts']['system_purpose_sla'] == 'Self-Support' + assert host['facts'][0]['facts']['system_purpose_usage'] == 'test-usage' @pytest.mark.e2e @pytest.mark.tier3 def test_rhcloud_inventory_api_hosts_synchronization( - organization_ak_setup, + rhcloud_manifest_org, rhcloud_registered_hosts, - rhcloud_sat_host, + module_target_sat, ): """Test RH Cloud plugin api to synchronize list of available hosts from cloud. :id: 7be22e1c-906b-4ae5-93dd-5f79f395601c - :Steps: + :steps: 1. Prepare machine and upload its data to Insights. 2. Sync inventory status using RH Cloud plugin api. @@ -154,30 +137,20 @@ def test_rhcloud_inventory_api_hosts_synchronization( 5. Assert inventory status for the host. :expectedresults: - 1. Task detail should contain should contain number of hosts + 1. Task detail should contain number of hosts synchronized and disconnected. :BZ: 1970223 :CaseAutomation: Automated """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org virtual_host, baremetal_host = rhcloud_registered_hosts # Generate report - generate_inventory_report(rhcloud_sat_host, org) + module_target_sat.generate_inventory_report(org) # Sync inventory status - inventory_sync = rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_inventory_sync() - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search(query={'search': f'id = {inventory_sync["task"]["id"]}'})[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - task_output = rhcloud_sat_host.api.ForemanTask().search( + inventory_sync = module_target_sat.sync_inventory_status(org) + task_output = module_target_sat.api.ForemanTask().search( query={'search': f'id = {inventory_sync["task"]["id"]}'} ) assert task_output[0].output['host_statuses']['sync'] == 2 @@ -185,78 +158,6 @@ def test_rhcloud_inventory_api_hosts_synchronization( # To Do: Add support in Nailgun to get Insights and Inventory host properties. -@pytest.mark.stubbed -def test_rhcloud_inventory_mtu_field(): - """Verify that the hosts having mtu field value as string in foreman's Nic object - is present in the inventory report. - - :id: df6d5f4f-5ee1-4f34-bf24-b93fbd089322 - - :customerscenario: true - - :Steps: - 1. Register a content host. - 2. If value of mtu field is not a string then use foreman-rake to change it. - 3. Generate inventory report. - 4. Assert that host is listed in the inventory report. - 5. Assert that value of mtu field in generated report is a number. - - :CaseImportance: Medium - - :expectedresults: - 1. Host having string mtu field value is present in the inventory report. - 2. Value of mtu field in generated inventory report is a number. - - :BZ: 1893439 - - :CaseAutomation: ManualOnly - """ - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier2 -def test_system_purpose_sla_field( - inventory_settings, organization_ak_setup, rhcloud_registered_hosts, rhcloud_sat_host -): - """Verify that system_purpose_sla field is present in the inventory report - for the host subscribed using Activation key with service level set in it. - - :id: 3974338c-3a66-41ac-af32-ee76e3c37aef - - :customerscenario: true - - :Steps: - 1. Create an activation key with service level set in it. - 2. Register a content host using the created activation key. - 3. Generate inventory report. - 4. Assert that host is listed in the inventory report. - 5. Assert that system_purpose_sla field is present in the inventory report. - - :CaseImportance: Medium - - :expectedresults: - 1. Host is present in the inventory report. - 2. system_purpose_sla field is present in the inventory report. - - :BZ: 1845113 - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - virtual_host, baremetal_host = rhcloud_registered_hosts - local_report_path = robottelo_tmp_dir.joinpath(f'{gen_alphanumeric()}_{org.id}.tar.xz') - generate_inventory_report(rhcloud_sat_host, org) - # Download report - rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_download_report( - destination=local_report_path - ) - json_data = get_report_data(local_report_path) - for host in json_data['hosts']: - assert host['facts'][0]['facts']['system_purpose_role'] == 'test-role' - assert host['facts'][0]['facts']['system_purpose_sla'] == 'Self-Support' - assert host['facts'][0]['facts']['system_purpose_usage'] == 'test-usage' - - @pytest.mark.stubbed def test_rhcloud_inventory_auto_upload_setting(): """Verify that Automatic inventory upload setting works as expected. @@ -265,7 +166,7 @@ def test_rhcloud_inventory_auto_upload_setting(): :customerscenario: true - :Steps: + :steps: 1. Register a content host with satellite. 2. Enable "Automatic inventory upload" setting. 3. Verify that satellite automatically generate and upload @@ -280,7 +181,7 @@ def test_rhcloud_inventory_auto_upload_setting(): 2. If "Automatic inventory upload" setting is disable then satellite does not generate and upload inventory report automatically. - :BZ: 1793017 + :BZ: 1793017, 1865879 :CaseAutomation: ManualOnly """ @@ -295,10 +196,10 @@ def test_inventory_upload_with_http_proxy(): :customerscenario: true - :Steps: + :steps: 1. Create a http proxy which is using port 80. - 2. Register a content host with satellite. - 3. Set Default HTTP Proxy setting. + 2. Update general and content proxy in Satellite settings. + 3. Register a content host with satellite. 4. Generate and upload inventory report. 5. Assert that host is listed in the inventory report. 6. Assert that upload process finished successfully. @@ -316,145 +217,73 @@ def test_inventory_upload_with_http_proxy(): @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_include_parameter_tags_setting( - inventory_settings, organization_ak_setup, rhcloud_registered_hosts, rhcloud_sat_host + inventory_settings, + rhcloud_manifest_org, + rhcloud_registered_hosts, + module_target_sat, ): """Verify that include_parameter_tags setting doesn't cause invalid report to be generated. :id: 3136a1e3-f844-416b-8334-75b27fd9e3a1 - :Steps: + :steps: 1. Enable include_parameter_tags setting. 2. Register a content host with satellite. - 3. Generate inventory report. - 4. Assert that generated report contains valid json file. + 3. Create a host parameter with long text value. + 4. Create Hostcollection with name containing double quotes. + 5. Generate inventory report. + 6. Assert that generated report contains valid json file. + 7. Observe the tag generated from the parameter. :expectedresults: 1. Valid json report is created. 2. satellite_parameter values are string. + 3. Parameter tag value must not be created after the + allowed length. + 4. Tag value is escaped properly. - :BZ: 1981869, 1967438 + :BZ: 1981869, 1967438, 2035204, 1874587, 1874619 :customerscenario: true :CaseAutomation: Automated """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org virtual_host, baremetal_host = rhcloud_registered_hosts - local_report_path = robottelo_tmp_dir.joinpath(f'{gen_alphanumeric()}_{org.id}.tar.xz') - rhcloud_sat_host.update_setting('include_parameter_tags', True) - generate_inventory_report(rhcloud_sat_host, org) - # Download report - rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_download_report( - destination=local_report_path - ) - json_data = get_report_data(local_report_path) - common_assertion(local_report_path) - for host in json_data['hosts']: - for tag in host['tags']: - if tag['namespace'] == 'satellite_parameter': - assert type(tag['value']) is str - break - - -@pytest.mark.tier3 -def test_rh_cloud_tag_values( - inventory_settings, organization_ak_setup, rhcloud_sat_host, rhcloud_registered_hosts -): - """Verify that tag values are escaped properly when hostgroup name - contains " (double quote) in it. - - :id: ea7cd7ca-4157-4aac-ad8e-e66b88740ce3 - - :customerscenario: true - - :Steps: - 1. Create Hostcollection with name containing double quotes. - 2. Register a content host with satellite. - 3. Add a content host to hostgroup. - 4. Generate inventory report. - 5. Assert that generated report contains valid json file. - 6. Assert that hostcollection tag value is escaped properly. - - :expectedresults: - 1. Valid json report is created. - 2. Tag value is escaped properly. - - :BZ: 1874587, 1874619 - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - + # Create a host parameter with long text value. + param_name = gen_string('alpha') + param_value = gen_string('alpha', length=260) + module_target_sat.api.CommonParameter(name=param_name, value=param_value).create() + # Create Hostcollection with name containing double quotes. host_col_name = gen_string('alpha') host_name = rhcloud_registered_hosts[0].hostname - host = rhcloud_sat_host.api.Host().search(query={'search': host_name})[0] - host_collection = rhcloud_sat_host.api.HostCollection( + host = module_target_sat.api.Host().search(query={'search': host_name})[0] + host_collection = module_target_sat.api.HostCollection( organization=org, name=f'"{host_col_name}"', host=[host] ).create() - assert len(host_collection.host) == 1 + # Generate inventory report local_report_path = robottelo_tmp_dir.joinpath(f'{gen_alphanumeric()}_{org.id}.tar.xz') - # Generate report - generate_inventory_report(rhcloud_sat_host, org) - rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_download_report( - destination=local_report_path - ) - common_assertion(local_report_path) - json_data = get_report_data(local_report_path) - for host in json_data['hosts']: - if host['fqdn'] == host_name: - for tag in host['tags']: - if tag['key'] == 'host_collection': - assert tag['value'] == f'"{host_col_name}"' - break - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier2 -def test_positive_tag_values_max_length( - inventory_settings, - organization_ak_setup, - rhcloud_registered_hosts, - rhcloud_sat_host, - target_sat, -): - """Verify that tags values are truncated properly for the host parameter - with max length. - - :id: dbcc7245-88af-4c35-87b8-92de01030cb5 - - :Steps: - 1. Enable include_parameter_tags setting - 2. Create a host parameter with long text value. - 3. Generate a rh_cloud report. - 4. Observe the tag generated from the parameter. - - :expectedresults: - 1. Parameter tag value must not be created after the - allowed length. - - :BZ: 2035204 - - :CaseAutomation: Automated - """ - - param_name = gen_string('alpha') - param_value = gen_string('alpha', length=260) - target_sat.api.CommonParameter(name=param_name, value=param_value).create() - - org, ak = organization_ak_setup - local_report_path = robottelo_tmp_dir.joinpath(f'{gen_alphanumeric()}_{org.id}.tar.xz') - rhcloud_sat_host.update_setting('include_parameter_tags', True) - generate_inventory_report(rhcloud_sat_host, org) + # Enable include_parameter_tags setting + module_target_sat.update_setting('include_parameter_tags', True) + module_target_sat.generate_inventory_report(org) # Download report - rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_download_report( + module_target_sat.api.Organization(id=org.id).rh_cloud_download_report( destination=local_report_path ) json_data = get_report_data(local_report_path) common_assertion(local_report_path) + # Verify that parameter tag value is not be created. for host in json_data['hosts']: for tag in host['tags']: if tag['key'] == param_name: assert tag['value'] == "Original value exceeds 250 characters" break + # Verify that hostcollection tag value is escaped properly. + for host in json_data['hosts']: + if host['fqdn'] == host_name: + for tag in host['tags']: + if tag['key'] == 'host_collection': + assert tag['value'] == f'"{host_col_name}"' + break diff --git a/tests/foreman/api/test_rhsm.py b/tests/foreman/api/test_rhsm.py index 9229fb355df..096163a8719 100644 --- a/tests/foreman/api/test_rhsm.py +++ b/tests/foreman/api/test_rhsm.py @@ -8,25 +8,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SubscriptionManagement :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import http -import pytest from nailgun import client +import pytest -from robottelo.config import get_credentials -from robottelo.config import get_url +from robottelo.config import get_credentials, get_url @pytest.mark.tier1 @@ -50,4 +44,4 @@ def test_positive_path(): response = client.get(path, auth=get_credentials(), verify=False) assert response.status_code == http.client.OK assert 'application/json' in response.headers['content-type'] - assert type(response.json()) is list + assert isinstance(response.json(), list) diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index e669dd9d19b..7d5a12bc591 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -8,29 +8,20 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from nailgun import entities from nailgun.config import ServerConfig +import pytest from requests.exceptions import HTTPError -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import parametrized +from robottelo.config import settings +from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE +from robottelo.utils.datafactory import gen_string, generate_strings_list, parametrized from robottelo.utils.issue_handlers import is_open @@ -40,10 +31,10 @@ class TestRole: @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', - **parametrized(list(zip(generate_strings_list(), generate_strings_list()))), + ('name', 'new_name'), + **parametrized(list(zip(generate_strings_list(), generate_strings_list(), strict=True))), ) - def test_positive_crud(self, name, new_name): + def test_positive_crud(self, name, new_name, target_sat): """Create, update and delete role with name ``name_generator()``. :id: 02c7d04d-a52c-4bc2-9e17-9938ab2ee5b2 @@ -57,7 +48,7 @@ def test_positive_crud(self, name, new_name): :CaseImportance: Critical """ - role = entities.Role(name=name).create() + role = target_sat.api.Role(name=name).create() assert role.name == name role.name = new_name assert role.update(['name']).name == new_name @@ -69,7 +60,7 @@ def test_positive_crud(self, name, new_name): class TestCannedRole: """Implements Canned Roles tests from API""" - def create_org_admin_role(self, name=None, orgs=None, locs=None): + def create_org_admin_role(self, target_sat, name=None, orgs=None, locs=None): """Helper function to create org admin role for particular organizations and locations by cloning 'Organization admin' role. @@ -81,18 +72,20 @@ def create_org_admin_role(self, name=None, orgs=None, locs=None): :return dict: This function returns dict representation of cloned role data returned from 'clone' function """ - name = gen_string('alpha') if not name else name - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + name = name if name else gen_string('alpha') + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': {'name': name, 'organization_ids': orgs or [], 'location_ids': locs or []} } ) if 'role' in org_admin: - return entities.Role(id=org_admin['role']['id']).read() - return entities.Role(id=org_admin['id']).read() + return target_sat.api.Role(id=org_admin['role']['id']).read() + return target_sat.api.Role(id=org_admin['id']).read() - def create_org_admin_user(self, role_taxos, user_taxos): + def create_org_admin_user(self, target_sat, role_taxos, user_taxos): """Helper function to create an Org Admin user by assigning org admin role and assign taxonomies to Role and User @@ -107,12 +100,12 @@ def create_org_admin_user(self, role_taxos, user_taxos): """ # Create Org Admin Role org_admin = self.create_org_admin_role( - orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] + target_sat, orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] ) # Create Org Admin User user_login = gen_string('alpha') user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_passwd, organization=[user_taxos['org'].id], @@ -122,7 +115,7 @@ def create_org_admin_user(self, role_taxos, user_taxos): user.passwd = user_passwd return user - def create_simple_user(self, filter_taxos, role=None): + def create_simple_user(self, target_sat, filter_taxos, role=None): """Creates simple user and assigns taxonomies :param dict filter_taxos: Filter taxonomiest specified as dictionary containing @@ -132,7 +125,7 @@ def create_simple_user(self, filter_taxos, role=None): passwd attr """ user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=gen_string('alpha'), password=user_passwd, organization=[filter_taxos['org'].id], @@ -142,14 +135,16 @@ def create_simple_user(self, filter_taxos, role=None): user.passwd = user_passwd return user - def create_domain(self, orgs, locs): + def create_domain(self, target_sat, orgs, locs): """Creates domain in given orgs and locs :param list orgs: List of Organization ids :param list locs: List of Location ids :return Domain: Returns the ```nailgun.entities.Domain``` object """ - return entities.Domain(name=gen_string('alpha'), organization=orgs, location=locs).create() + return target_sat.api.Domain( + name=gen_string('alpha'), organization=orgs, location=locs + ).create() def user_config(self, user, satellite): """Returns The ```nailgun.confin.ServerConfig``` for given user @@ -157,22 +152,24 @@ def user_config(self, user, satellite): :param user: The nailgun.entities.User object of an user with passwd parameter """ - return ServerConfig(auth=(user.login, user.passwd), url=satellite.url, verify=False) + return ServerConfig( + auth=(user.login, user.passwd), url=satellite.url, verify=settings.server.verify_ca + ) @pytest.fixture - def role_taxonomies(self): + def role_taxonomies(self, target_sat): """Create role taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture - def filter_taxonomies(self): + def filter_taxonomies(self, target_sat): """Create filter taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture @@ -184,9 +181,9 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): sat_url=target_sat.url, ldap_user_name=ad_data['ldap_user_name'], ldap_user_passwd=ad_data['ldap_user_passwd'], - authsource=entities.AuthSourceLDAP( + authsource=target_sat.api.AuthSourceLDAP( onthefly_register=True, - account=ad_data['ldap_user_name'], + account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], base_dn=ad_data['base_dn'], groups_base=ad_data['group_base_dn'], @@ -207,10 +204,10 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): query={'search': f'login={ad_data["ldap_user_name"]}'} ): user.delete() - LDAPAuthSource.delete({'name': authsource_name}) + target_sat.cli.LDAPAuthSource.delete({'name': authsource_name}) @pytest.mark.tier1 - def test_positive_create_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_role_with_taxonomies(self, role_taxonomies, target_sat): """create role with taxonomies :id: fa449217-889c-429b-89b5-0b6c018ffd9e @@ -222,7 +219,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], @@ -232,7 +229,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): assert role_taxonomies['loc'].id == role.location[0].id @pytest.mark.tier1 - def test_positive_create_role_without_taxonomies(self): + def test_positive_create_role_without_taxonomies(self, target_sat): """Create role without taxonomies :id: fe65a691-1b04-4bfe-a24b-adb48feb31d1 @@ -244,13 +241,13 @@ def test_positive_create_role_without_taxonomies(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name, organization=[], location=[]).create() + role = target_sat.api.Role(name=role_name, organization=[], location=[]).create() assert role.name == role_name assert not role.organization assert not role.location @pytest.mark.tier1 - def test_positive_create_filter_without_override(self, role_taxonomies): + def test_positive_create_filter_without_override(self, role_taxonomies, target_sat): """Create filter in role w/o overriding it :id: 1aadb7ea-ff76-4171-850f-188ba6f87021 @@ -269,27 +266,27 @@ def test_positive_create_filter_without_override(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id assert role_taxonomies['org'].id == filtr.organization[0].id assert role_taxonomies['loc'].id == filtr.location[0].id assert not filtr.override @pytest.mark.tier1 - def test_positive_create_non_overridable_filter(self): + def test_positive_create_non_overridable_filter(self, target_sat): """Create non overridable filter in role :id: f891e2e1-76f8-4edf-8c96-b41d05483298 :steps: Create a filter to which taxonomies cannot be associated. - e.g Architecture filter + e.g. Architecture filter :expectedresults: @@ -299,21 +296,23 @@ def test_positive_create_non_overridable_filter(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) - filtr = entities.Filter(permission=arch_perm, role=role.id).create() + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) + filtr = target_sat.api.Filter(permission=arch_perm, role=role.id).create() assert role.id == filtr.role.id assert not filtr.override @pytest.mark.tier1 - def test_negative_override_non_overridable_filter(self, filter_taxonomies): + def test_negative_override_non_overridable_filter(self, filter_taxonomies, target_sat): """Override non overridable filter :id: 7793be96-e8eb-451b-a986-51a46a1ab4f9 :steps: Attempt to override a filter to which taxonomies cannot be - associated. e.g Architecture filter + associated. e.g. Architecture filter :expectedresults: Filter is not overrided as taxonomies cannot be applied to that filter @@ -321,11 +320,13 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) with pytest.raises(HTTPError): - entities.Filter( + target_sat.api.Filter( permission=arch_perm, role=[role.id], override=True, @@ -335,7 +336,9 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxonomies): + def test_positive_create_overridable_filter( + self, role_taxonomies, filter_taxonomies, target_sat + ): """Create overridable filter in role :id: c7ea9377-9b9e-495e-accd-3576166d504e @@ -355,14 +358,14 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -377,7 +380,7 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono assert role_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which applies to its non-overrided filters :id: 902dcb32-2126-4ff4-b733-3e86749ccd1e @@ -390,29 +393,29 @@ def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id role.organization = [filter_taxonomies['org']] role.location = [filter_taxonomies['loc']] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert filter_taxonomies['org'].id == role.organization[0].id assert filter_taxonomies['loc'].id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert filter_taxonomies['org'].id == filtr.organization[0].id assert filter_taxonomies['loc'].id == filtr.location[0].id @pytest.mark.tier1 - def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which doesnt applies to its overrided filters :id: 9f3bf95a-f71a-4063-b51c-12610bc655f2 @@ -426,14 +429,14 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -442,23 +445,23 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie ).create() assert role.id == filtr.role.id # Creating new Taxonomies - org_new = entities.Organization().create() - loc_new = entities.Location().create() + org_new = target_sat.api.Organization().create() + loc_new = target_sat.api.Location().create() # Updating Taxonomies role.organization = [org_new] role.location = [loc_new] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert org_new.id == role.organization[0].id assert loc_new.id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert org_new.id != filtr.organization[0].id assert loc_new.id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies): + def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies, target_sat): """Unsetting override flag resets filter taxonomies :id: eaa7b921-7c12-45c5-989b-d82aa2b6e3a6 @@ -477,14 +480,14 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -500,7 +503,7 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi assert filter_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_create_org_admin_from_clone(self): + def test_positive_create_org_admin_from_clone(self, target_sat): """Create Org Admin role which has access to most of the resources within organization @@ -515,14 +518,16 @@ def test_positive_create_org_admin_from_clone(self): :BZ: 1637436 """ - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = self.create_org_admin_role() - default_filters = entities.Role(id=default_org_admin[0].id).read().filters - orgadmin_filters = entities.Role(id=org_admin.id).read().filters + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = self.create_org_admin_role(target_sat) + default_filters = target_sat.api.Role(id=default_org_admin[0].id).read().filters + orgadmin_filters = target_sat.api.Role(id=org_admin.id).read().filters assert len(default_filters) == len(orgadmin_filters) @pytest.mark.tier1 - def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies, target_sat): """Taxonomies can be assigned to cloned role :id: 31079015-5ede-439a-a062-e20d1ffd66df @@ -540,9 +545,9 @@ def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): :CaseImportance: Critical """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - org_admin = entities.Role(id=org_admin.id).read() + org_admin = target_sat.api.Role(id=org_admin.id).read() assert role_taxonomies['org'].id == org_admin.organization[0].id assert role_taxonomies['loc'].id == org_admin.location[0].id @@ -566,16 +571,17 @@ def test_negative_access_entities_from_org_admin( :expectedresults: User should not be able to access any resources and permissions in taxonomies selected in Org Admin role - :CaseLevel: System """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=filter_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=filter_taxonomies + ) domain = self.create_domain( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_user( @@ -597,19 +603,20 @@ def test_negative_access_entities_from_user( :expectedresults: User should not be able to access any resources and permissions in its own taxonomies - :CaseLevel: System """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=filter_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=filter_taxonomies + ) domain = self.create_domain( - orgs=[filter_taxonomies['org'].id], locs=[filter_taxonomies['loc'].id] + target_sat, orgs=[filter_taxonomies['org'].id], locs=[filter_taxonomies['loc'].id] ) sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() @pytest.mark.tier2 - def test_positive_override_cloned_role_filter(self, role_taxonomies): + def test_positive_override_cloned_role_filter(self, role_taxonomies, target_sat): """Cloned role filter overrides :id: 8a32ed5f-b93f-4f31-aff4-16602fbe7fab @@ -622,30 +629,29 @@ def test_positive_override_cloned_role_filter(self, role_taxonomies): :expectedresults: Filter in cloned role should be overridden - :CaseLevel: Integration """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() + role = target_sat.api.Role(name=role_name).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() cloned_role_name = gen_string('alpha') - cloned_role = entities.Role(id=role.id).clone(data={'name': cloned_role_name}) + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': cloned_role_name}) assert cloned_role_name == cloned_role['name'] - filter_cloned_id = entities.Role(id=cloned_role['id']).read().filters[0].id - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned_id = target_sat.api.Role(id=cloned_role['id']).read().filters[0].id + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() filter_cloned.override = True filter_cloned.organization = [role_taxonomies['org']] filter_cloned.location = [role_taxonomies['loc']] filter_cloned.update(['override', 'organization', 'location']) # Updated Filter - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() assert filter_cloned.override assert role_taxonomies['org'].id == filter_cloned.organization[0].id assert role_taxonomies['loc'].id == filter_cloned.location[0].id @pytest.mark.tier2 def test_positive_emptiness_of_filter_taxonomies_on_role_clone( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """Taxonomies of filters in cloned role are set to None for filters that are overridden in parent role @@ -665,31 +671,30 @@ def test_positive_emptiness_of_filter_taxonomies_on_role_clone( None in cloned role 2. Override flag is set to True in cloned role filter - :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - filter_cloned = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + filter_cloned = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not filter_cloned.organization assert not filter_cloned.location assert filter_cloned.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_with_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Unlimited and Override flag sets on filter for filter that is overridden in parent role @@ -708,36 +713,35 @@ def test_positive_clone_role_having_overridden_filter_with_taxonomies( :expectedresults: Unlimited and Override flags should be set to True on filter for filter that is overridden in parent role - :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone( + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( - self, role_taxonomies + self, role_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is not overridden in parent @@ -756,29 +760,30 @@ def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( :expectedresults: Both unlimited and override flag should be set to False on filter for filter that is not overridden in parent role - :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_taxonomies): + def test_positive_clone_role_having_unlimited_filter_with_taxonomies( + self, role_taxonomies, target_sat + ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is unlimited in parent role @@ -795,30 +800,29 @@ def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_ :expectedresults: Both unlimited and override flags should be set to False on filter for filter that is unlimited in parent role - :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_without_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): # noqa """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is overridden in parent role @@ -836,29 +840,30 @@ def test_positive_clone_role_having_overridden_filter_without_taxonomies( :expectedresults: Both unlimited and Override flags should be set to True on filter for filter that is overridden in parent role - :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_non_overided_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, only unlimited but not override flag sets on filter for filter that is overridden in parent role @@ -877,27 +882,27 @@ def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_t 1. Unlimited flag should be set to True 2. Override flag should be set to False - :CaseLevel: Integration - :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_unlimited_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is unlimited in parent role @@ -915,22 +920,20 @@ def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxo 1. Unlimited flag should be set to True 2. Override flag should be set to False - :CaseLevel: Integration - :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @@ -948,20 +951,19 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta 2. Assign an organization A and Location A to the Org Admin role 3. Create two users without assigning roles while creating them 4. Assign Organization A and Location A to both users - 5. Create an user group with above two users + 5. Create a user group with above two users 6. Assign Org Admin role to User Group :expectedresults: Both the user should have access to the resources of organization A and Location A - :CaseLevel: System """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) userone_login = gen_string('alpha') userone_pass = gen_string('alphanumeric') - user_one = entities.User( + user_one = target_sat.api.User( login=userone_login, password=userone_pass, organization=[role_taxonomies['org'].id], @@ -970,7 +972,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta assert userone_login == user_one.login usertwo_login = gen_string('alpha') usertwo_pass = gen_string('alphanumeric') - user_two = entities.User( + user_two = target_sat.api.User( login=usertwo_login, password=usertwo_pass, organization=[role_taxonomies['org'].id], @@ -978,31 +980,33 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ).create() assert usertwo_login == user_two.login ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name # Creating Subnets and Domains to verify if user really can access them - subnet = entities.Subnet( + subnet = target_sat.api.Subnet( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], ).create() - domain = entities.Domain( + domain = target_sat.api.Domain( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], ).create() for login, password in ((userone_login, userone_pass), (usertwo_login, usertwo_pass)): - sc = ServerConfig(auth=(login, password), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(login, password), url=target_sat.url, verify=settings.server.verify_ca + ) try: - entities.Domain(sc).search( + target_sat.api.Domain(server_config=sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, } ) - entities.Subnet(sc).search( + target_sat.api.Subnet(server_config=sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1010,8 +1014,8 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ) except HTTPError as err: pytest.fail(str(err)) - assert domain.id in [dom.id for dom in entities.Domain(sc).search()] - assert subnet.id in [sub.id for sub in entities.Subnet(sc).search()] + assert domain.id in [dom.id for dom in target_sat.api.Domain(server_config=sc).search()] + assert subnet.id in [sub.id for sub in target_sat.api.Subnet(server_config=sc).search()] @pytest.mark.tier3 def test_positive_user_group_users_access_contradict_as_org_admins(self): @@ -1037,7 +1041,6 @@ def test_positive_user_group_users_access_contradict_as_org_admins(self): 2. User assigned to Organization A and Location A should have access to the resources of organization A and Location A - :CaseLevel: System """ @pytest.mark.tier2 @@ -1060,23 +1063,24 @@ def test_negative_assign_org_admin_to_user_group( :expectedresults: Both the user shouldn't have access to the resources of organization A,B and Location A,B - :CaseLevel: System """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - user_one = self.create_simple_user(filter_taxos=filter_taxonomies) - user_two = self.create_simple_user(filter_taxos=filter_taxonomies) + user_one = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) + user_two = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name - dom = self.create_domain(orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id]) + dom = self.create_domain( + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + ) for user in [user_one, user_two]: sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.Domain(sc, id=dom.id).read() + target_sat.api.Domain(server_config=sc, id=dom.id).read() @pytest.mark.tier2 def test_negative_assign_taxonomies_by_org_admin( @@ -1102,20 +1106,19 @@ def test_negative_assign_taxonomies_by_org_admin( :expectedresults: Org Admin should not be able to assign the organizations to any of its resources - :CaseLevel: Integration """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']] ).create() assert dom_name == dom.name user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1123,15 +1126,17 @@ def test_negative_assign_taxonomies_by_org_admin( location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) # Getting the domain from user1 - dom = entities.Domain(sc, id=dom.id).read() + dom = target_sat.api.Domain(server_config=sc, id=dom.id).read() dom.organization = [filter_taxonomies['org']] with pytest.raises(HTTPError): dom.update(['organization']) @pytest.mark.tier1 - def test_positive_remove_org_admin_role(self, role_taxonomies): + def test_positive_remove_org_admin_role(self, role_taxonomies, target_sat): """Super Admin user can remove Org Admin role :id: 03fac76c-22ac-43cf-9068-b96e255b3c3c @@ -1148,25 +1153,27 @@ def test_positive_remove_org_admin_role(self, role_taxonomies): :CaseImportance: Critical """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User(login=user_login, password=user_pass, role=[org_admin.id]).create() + user = target_sat.api.User( + login=user_login, password=user_pass, role=[org_admin.id] + ).create() assert user_login == user.login try: - entities.Role(id=org_admin.id).delete() + target_sat.api.Role(id=org_admin.id).delete() except HTTPError as err: pytest.fail(str(err)) # Getting updated user - user = entities.User(id=user.id).read() + user = target_sat.api.User(id=user.id).read() assert org_admin.id not in [role.id for role in user.role] @pytest.mark.tier2 def test_positive_taxonomies_control_to_superadmin_with_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin :id: 37db0b40-ed35-4e70-83e8-83cff27caae2 @@ -1175,26 +1182,27 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( 1. Create Org Admin role and assign organization A and Location A 2. Create User and assign above Org Admin role 3. Login with SuperAdmin who created the above Org Admin role and - access entities in Organization A and Location A + access target_sat.api in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin - :CaseLevel: Integration """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=role_taxonomies + ) sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( - sc, + dom = target_sat.api.Domain( + server_config=sc, name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert dom_name == dom.name try: - entities.Subnet().search( + target_sat.api.Subnet().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1207,7 +1215,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( def test_positive_taxonomies_control_to_superadmin_without_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin role/user :id: 446f66a5-16e0-4298-b326-262913502955 @@ -1220,30 +1228,32 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( 4. Login with SuperAdmin who created the above Org Admin role and access entities in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin - :CaseLevel: Integration """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=role_taxonomies + ) sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( - sc, + dom = target_sat.api.Domain( + server_config=sc, name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert dom_name == dom.name - user_role = entities.Role(id=user.role[0].id).read() - entities.Role(id=user_role.id).delete() - entities.User(id=user.id).delete() + user_role = target_sat.api.Role(id=user.role[0].id).read() + target_sat.api.Role(id=user_role.id).delete() + target_sat.api.User(id=user.id).delete() with pytest.raises(HTTPError): user_role.read() + with pytest.raises(HTTPError): user.read() try: - entities.Domain().search( + target_sat.api.Domain().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1255,7 +1265,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( @pytest.mark.tier1 @pytest.mark.upgrade def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): - """Org Admin doesnt have permissions to create new roles + """Org Admin doesn't have permissions to create new roles :id: 806ecc16-0dc7-405b-90d3-0584eced27a3 @@ -1270,11 +1280,11 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): create new role """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1282,11 +1292,13 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) role_name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Role( - sc, + target_sat.api.Role( + server_config=sc, name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], @@ -1308,13 +1320,15 @@ def test_negative_modify_roles_by_org_admin(self, role_taxonomies, target_sat): :expectedresults: Org Admin should not have permissions to update existing roles """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) - test_role = entities.Role().create() + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=role_taxonomies + ) + test_role = target_sat.api.Role().create() sc = self.user_config(user, target_sat) - test_role = entities.Role(sc, id=test_role.id).read() + test_role = target_sat.api.Role(server_config=sc, id=test_role.id).read() + test_role.organization = [role_taxonomies['org']] + test_role.location = [role_taxonomies['loc']] with pytest.raises(HTTPError): - test_role.organization = [role_taxonomies['org']] - test_role.location = [role_taxonomies['loc']] test_role.update(['organization', 'location']) @pytest.mark.tier2 @@ -1332,14 +1346,13 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s :expectedresults: Org Admin should not have access of Admin user - :CaseLevel: Integration """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1347,9 +1360,11 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) with pytest.raises(HTTPError): - entities.User(sc, id=1).read() + target_sat.api.User(server_config=sc, id=1).read() @pytest.mark.tier2 @pytest.mark.upgrade @@ -1377,14 +1392,13 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): :customerscenario: true - :CaseLevel: Integration """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1392,11 +1406,13 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc_user = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc_user = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( - sc_user, + user = target_sat.api.User( + server_config=sc_user, login=user_login, password=user_pass, role=[org_admin.id], @@ -1407,7 +1423,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): assert org_admin.id == user.role[0].id if not is_open('BZ:1825698'): name = gen_string('alphanumeric') - location = entities.Location(sc_user, name=name).create() + location = target_sat.api.Location(sc_user, name=name).create() assert location.name == name @pytest.mark.tier2 @@ -1429,13 +1445,14 @@ def test_positive_access_users_inside_org_admin_taxonomies(self, role_taxonomies :expectedresults: Org Admin should be able to access users inside its taxonomies - :CaseLevel: Integration """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) - test_user = self.create_simple_user(filter_taxos=role_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=role_taxonomies + ) + test_user = self.create_simple_user(target_sat, filter_taxos=role_taxonomies) sc = self.user_config(user, target_sat) try: - entities.User(sc, id=test_user.id).read() + target_sat.api.User(server_config=sc, id=test_user.id).read() except HTTPError as err: pytest.fail(str(err)) @@ -1458,24 +1475,27 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): :expectedresults: after adding the needed permissions, user should be able to create nested locations - :CaseLevel: Integration """ user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) user.role = [org_admin] user = user.update(['role']) - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) name = gen_string('alphanumeric') - location = entities.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() + location = target_sat.api.Location( + server_config=sc, name=name, parent=role_taxonomies['loc'].id + ).create() assert location.name == name @pytest.mark.tier2 @@ -1499,13 +1519,14 @@ def test_negative_access_users_outside_org_admin_taxonomies( :expectedresults: Org Admin should not be able to access users outside its taxonomies - :CaseLevel: Integration """ - user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) - test_user = self.create_simple_user(filter_taxos=filter_taxonomies) + user = self.create_org_admin_user( + target_sat, role_taxos=role_taxonomies, user_taxos=role_taxonomies + ) + test_user = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.User(sc, id=test_user.id).read() + target_sat.api.User(server_config=sc, id=test_user.id).read() @pytest.mark.tier1 def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_sat): @@ -1526,10 +1547,10 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s 1. Org Admin should not have access to create organizations 2. Org Admin should have access to create locations """ - org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) + org_admin = self.create_org_admin_role(target_sat, orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1537,13 +1558,15 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) with pytest.raises(HTTPError): - entities.Organization(sc, name=gen_string('alpha')).create() + target_sat.api.Organization(server_config=sc, name=gen_string('alpha')).create() if not is_open("BZ:1825698"): try: loc_name = gen_string('alpha') - loc = entities.Location(sc, name=loc_name).create() + loc = target_sat.api.Location(server_config=sc, name=loc_name).create() except HTTPError as err: pytest.fail(str(err)) assert loc_name == loc.name @@ -1553,7 +1576,7 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s def test_positive_access_all_global_entities_by_org_admin( self, role_taxonomies, filter_taxonomies, target_sat ): - """Org Admin can access all global entities in any taxonomies + """Org Admin can access all global target_sat.api in any taxonomies regardless of its own assigned taxonomies :id: add5feb3-7a3f-45a1-a633-49f1141b029b @@ -1564,16 +1587,16 @@ def test_positive_access_all_global_entities_by_org_admin( 2. Create new user and assign Org A,B and Location A,B 3. Assign Org Admin role to User 4. Login with Org Admin user - 5. Attempt to create all the global entities in org B and Loc B - e.g Architectures, Operating System + 5. Attempt to create all the global target_sat.api in org B and Loc B + e.g. Architectures, Operating System :expectedresults: Org Admin should have access to all the global - entities in any taxonomies + target_sat.api in any taxonomies """ - org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) + org_admin = self.create_org_admin_role(target_sat, orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1581,25 +1604,29 @@ def test_positive_access_all_global_entities_by_org_admin( location=[role_taxonomies['loc'], filter_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) try: for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: - entity(sc).search() + entity(server_config=sc).search() except HTTPError as err: pytest.fail(str(err)) @pytest.mark.tier3 - def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, create_ldap): + def test_negative_access_entities_from_ldap_org_admin( + self, role_taxonomies, create_ldap, target_sat + ): """LDAP User can not access resources in taxonomies assigned to role if its own taxonomies are not same as its role @@ -1616,34 +1643,34 @@ def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, cre :expectedresults: LDAP User should not be able to access resources and permissions in taxonomies selected in Org Admin role - :CaseLevel: System - :CaseAutomation: Automated """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) # Creating Domain resource in same taxonomies as Org Admin role to access later domain = self.create_domain( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(server_config=sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the domain resource created in org admin role with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_ldap_user( - self, role_taxonomies, create_ldap, module_location, module_org + self, role_taxonomies, create_ldap, module_location, module_org, target_sat ): """LDAP User can not access resources within its own taxonomies if assigned role does not have permissions for same taxonomies @@ -1661,31 +1688,33 @@ def test_negative_access_entities_from_ldap_user( :expectedresults: LDAP User should not be able to access any resources and permissions in its own taxonomies - :CaseLevel: System - :CaseAutomation: Automated """ org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) # Creating Domain resource in different taxonomies to access later - domain = self.create_domain(orgs=[module_org.id], locs=[module_location.id]) + domain = self.create_domain(target_sat, orgs=[module_org.id], locs=[module_location.id]) sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(server_config=sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() @pytest.mark.tier3 - def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, create_ldap): + def test_positive_assign_org_admin_to_ldap_user_group( + self, role_taxonomies, create_ldap, target_sat + ): """Users in LDAP usergroup can access to the resources in taxonomies if the taxonomies of Org Admin role are same @@ -1703,23 +1732,23 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre resources in taxonomies if the taxonomies of Org Admin role are same - :CaseLevel: System - :CaseAutomation: Automated """ group_name = gen_string("alpha") password = gen_string("alpha") org_admin = self.create_org_admin_role( + target_sat, orgs=[create_ldap['authsource'].organization[0].id], locs=[create_ldap['authsource'].location[0].id], ) # Creating Domain resource in same taxonomies as Org Admin role to access later domain = self.create_domain( + target_sat, orgs=[create_ldap['authsource'].organization[0].id], locs=[create_ldap['authsource'].location[0].id], ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1727,9 +1756,11 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to the usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1737,13 +1768,15 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre sc = ServerConfig( auth=(user.login, password), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) # Accessing the Domain resource - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() @pytest.mark.tier3 - def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_taxonomies): + def test_negative_assign_org_admin_to_ldap_user_group( + self, create_ldap, role_taxonomies, target_sat + ): """Users in LDAP usergroup can not have access to the resources in taxonomies if the taxonomies of Org Admin role is not same @@ -1761,21 +1794,19 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta resources in taxonomies if the taxonomies of Org Admin role is not same - :CaseLevel: System - :CaseAutomation: Automated """ group_name = gen_string("alpha") password = gen_string("alpha") org_admin = self.create_org_admin_role( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) # Creating Domain resource in same taxonomies as Org Admin role to access later domain = self.create_domain( - orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] + target_sat, orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1783,9 +1814,11 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1793,11 +1826,11 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta sc = ServerConfig( auth=(user.login, password), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(server_config=sc, id=domain.id).read() class TestRoleSearchFilter: diff --git a/tests/foreman/api/test_settings.py b/tests/foreman/api/test_settings.py index e9dd50f25ff..4cbb5609bd0 100644 --- a/tests/foreman/api/test_settings.py +++ b/tests/foreman/api/test_settings.py @@ -4,28 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Settings :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random import pytest -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + filtered_datapoint, + generate_strings_list, + parametrized, + valid_data_list, +) @filtered_datapoint @@ -49,7 +45,6 @@ def test_positive_update_login_page_footer_text(setting_update): :parametrized: yes :expectedresults: Parameter is updated successfully - """ login_text_value = random.choice(list(valid_data_list().values())) setting_update.value = login_text_value @@ -68,7 +63,6 @@ def test_positive_update_login_page_footer_text_without_value(setting_update): :parametrized: yes :expectedresults: login_text has empty value after update - """ setting_update.value = "" setting_update = setting_update.update({'value'}) @@ -87,7 +81,6 @@ def test_positive_update_login_page_footer_text_with_long_string(setting_update) :parametrized: yes :expectedresults: Parameter is updated - """ login_text_value = random.choice(list(generate_strings_list(1000))) setting_update.value = login_text_value @@ -125,7 +118,6 @@ def test_positive_update_hostname_prefix_without_value(setting_update): :BZ: 1911228 :expectedresults: Error should be raised on setting empty value for discovery_prefix setting - """ setting_update.value = "" with pytest.raises(HTTPError): @@ -186,12 +178,12 @@ def test_negative_discover_host_with_invalid_prefix(): @pytest.mark.tier2 @pytest.mark.parametrize('download_policy', ["immediate", "on_demand"]) @pytest.mark.parametrize('setting_update', ['default_download_policy'], indirect=True) -def test_positive_custom_repo_download_policy(setting_update, download_policy): +def test_positive_custom_repo_download_policy(setting_update, download_policy, target_sat): """Check the set custom repository download policy for newly created custom repository. :id: d5150cce-ba85-4ea0-a8d1-6a54d0d29571 - :Steps: + :steps: 1. Create a product, Organization 2. Update the Default Custom Repository download policy in the setting. 3. Create a custom repo under the created organization. @@ -204,14 +196,12 @@ def test_positive_custom_repo_download_policy(setting_update, download_policy): repository. :CaseImportance: Medium - - :CaseLevel: Acceptance """ - org = entities.Organization().create() - prod = entities.Product(organization=org).create() + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() setting_update.value = download_policy setting_update.update({'value'}) - repo = entities.Repository(product=prod, content_type='yum', organization=org).create() + repo = target_sat.api.Repository(product=prod, content_type='yum', organization=org).create() assert repo.download_policy == download_policy repo.delete() prod.delete() diff --git a/tests/foreman/api/test_smartproxy.py b/tests/foreman/api/test_smartproxy.py deleted file mode 100644 index a18634c45d2..00000000000 --- a/tests/foreman/api/test_smartproxy.py +++ /dev/null @@ -1,377 +0,0 @@ -"""Tests for the ``smart_proxies`` paths. - -:Requirement: Smartproxy - -:CaseAutomation: Automated - -:CaseLevel: Component - -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional - -:CaseImportance: Critical - -:Upstream: No -""" -import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url -from requests import HTTPError - -from robottelo.config import user_nailgun_config -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.issue_handlers import is_open - - -pytestmark = [pytest.mark.run_in_one_thread] - - -@pytest.fixture(scope='module') -def module_proxy_attrs(module_target_sat): - """Find a ``SmartProxy``. - - Every Satellite has a built-in smart proxy, so searching for an - existing smart proxy should always succeed. - """ - smart_proxy = module_target_sat.api.SmartProxy().search( - query={'search': f'url = {module_target_sat.url}:9090'} - ) - # Check that proxy is found and unpack it from the list - assert len(smart_proxy) > 0, "No smart proxy is found" - smart_proxy = smart_proxy[0] - return set(smart_proxy.update_json([]).keys()) - - -def _create_smart_proxy(request, target_sat, **kwargs): - """Create a Smart Proxy and add the finalizer""" - proxy = target_sat.api.SmartProxy(**kwargs).create() - - @request.addfinalizer - def _cleanup(): - target_sat.api.SmartProxy(id=proxy.id).delete() - - return proxy - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_negative_create_with_url(target_sat): - """Proxy creation with random URL - - :id: e48a6260-97e0-4234-a69c-77bbbcde85d6 - - :expectedresults: Proxy is not created - - :CaseLevel: Component - - """ - # Create a random proxy - with pytest.raises(HTTPError) as context: - target_sat.api.SmartProxy(url=gen_url(scheme='https')).create() - assert 'Unable to communicate' in context.value.response.text - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -@pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_name(request, target_sat, name): - """Proxy creation with valid name - - :id: 0ffe0dc5-675e-45f4-b7e1-a14d3dd81f6e - - :expectedresults: Proxy is created - - :CaseLevel: Component - - :Parametrized: Yes - - :BZ: 2084661 - """ - if is_open('BZ:2084661') and 'html' in request.node.name: - pytest.skip() - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy = _create_smart_proxy(request, target_sat, name=name, url=url) - assert proxy.name == name - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -@pytest.mark.upgrade -def test_positive_delete(target_sat): - """Proxy deletion - - :id: 872bf12e-736d-43d1-87cf-2923966b59d0 - - :expectedresults: Proxy is deleted - - :CaseLevel: Component - - :BZ: 1398695 - """ - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy = target_sat.api.SmartProxy(url=url).create() - proxy.delete() - with pytest.raises(HTTPError): - proxy.read() - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_update_name(request, target_sat): - """Proxy name update - - :id: f279640e-d7e9-48a3-aed8-7bf406e9d6f2 - - :expectedresults: Proxy has the name updated - - :CaseLevel: Component - - """ - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy = _create_smart_proxy(request, target_sat, url=url) - for new_name in valid_data_list(): - proxy.name = new_name - proxy = proxy.update(['name']) - assert proxy.name == new_name - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_update_url(request, target_sat): - """Proxy url update - - :id: 0305fd54-4e0c-4dd9-a537-d342c3dc867e - - :expectedresults: Proxy has the url updated - - :CaseLevel: Component - - """ - # Create fake capsule - port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, port) as url: - proxy = _create_smart_proxy(request, target_sat, url=url) - # Open another tunnel to update url - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy.url = url - proxy = proxy.update(['url']) - assert proxy.url == url - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_update_organization(request, target_sat): - """Proxy name update with the home proxy - - :id: 62631275-7a92-4d34-a949-c56e0c4063f1 - - :expectedresults: Proxy has the name updated - - :CaseLevel: Component - - """ - organizations = [target_sat.api.Organization().create() for _ in range(2)] - newport = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, newport) as url: - proxy = _create_smart_proxy(request, target_sat, url=url) - proxy.organization = organizations - proxy = proxy.update(['organization']) - assert {org.id for org in proxy.organization} == {org.id for org in organizations} - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_update_location(request, target_sat): - """Proxy name update with the home proxy - - :id: e08eaaa9-7c11-4cda-bbe7-6d1f7c732569 - - :expectedresults: Proxy has the name updated - - :CaseLevel: Component - - """ - locations = [target_sat.api.Location().create() for _ in range(2)] - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy = _create_smart_proxy(request, target_sat, url=url) - proxy.location = locations - proxy = proxy.update(['location']) - assert {loc.id for loc in proxy.location} == {loc.id for loc in locations} - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier2 -@pytest.mark.upgrade -def test_positive_refresh_features(request, target_sat): - """Refresh smart proxy features, search for proxy by id - - :id: d0237546-702e-4d1a-9212-8391295174da - - :expectedresults: Proxy features are refreshed - - :CaseLevel: Integration - - """ - # Since we want to run multiple commands against our fake capsule, we - # need the tunnel kept open in order not to allow different concurrent - # test to claim it. Thus we want to manage the tunnel manually. - - # get an available port for our fake capsule - new_port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, new_port) as url: - proxy = _create_smart_proxy(request, target_sat, url=url) - proxy.refresh() - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier2 -def test_positive_import_puppet_classes( - request, - session_puppet_enabled_sat, - puppet_proxy_port_range, - module_puppet_org, - module_puppet_loc, -): - """Import puppet classes from proxy for admin and non-admin user - - :id: 385efd1b-6146-47bf-babf-0127ce5955ed - - :expectedresults: Puppet classes are imported from proxy - - :CaseLevel: Integration - - :BZ: 1398695, 2142555 - - :customerscenario: true - """ - puppet_sat = session_puppet_enabled_sat - update_msg = ( - 'Successfully updated environment and puppetclasses from the on-disk puppet installation' - ) - no_update_msg = 'No changes to your environments detected' - # Create role, add permissions and create non-admin user - user_login = gen_string('alpha') - user_password = gen_string('alpha') - role = puppet_sat.api.Role().create() - puppet_sat.api_factory.create_role_permissions( - role, - { - 'ForemanPuppet::Puppetclass': [ - 'view_puppetclasses', - 'create_puppetclasses', - 'import_puppetclasses', - ] - }, - ) - user = puppet_sat.api.User( - role=[role], - admin=True, - login=user_login, - password=user_password, - organization=[module_puppet_org], - location=[module_puppet_loc], - ).create() - request.addfinalizer(user.delete) - request.addfinalizer(role.delete) - - new_port = puppet_sat.available_capsule_port - with puppet_sat.default_url_on_new_port(9090, new_port) as url: - proxy = puppet_sat.api.SmartProxy(url=url).create() - - result = proxy.import_puppetclasses() - assert result['message'] in [update_msg, no_update_msg] - # Import puppetclasses with environment - result = proxy.import_puppetclasses(environment='production') - assert result['message'] in [update_msg, no_update_msg] - - # Non-Admin user with access to import_puppetclasses - user_cfg = user_nailgun_config(user_login, user_password) - user_cfg.url = f'https://{puppet_sat.hostname}' - user_proxy = puppet_sat.api.SmartProxy(server_config=user_cfg, id=proxy.id).read() - - result = user_proxy.import_puppetclasses() - assert result['message'] in [update_msg, no_update_msg] - # Import puppetclasses with environment - result = user_proxy.import_puppetclasses(environment='production') - assert result['message'] in [update_msg, no_update_msg] - - request.addfinalizer(puppet_sat.api.SmartProxy(id=proxy.id).delete) - - -"""Tests to see if the server returns the attributes it should. - -Satellite should return a full description of an entity each time an entity -is created, read or updated. These tests verify that certain attributes -really are returned. The ``one_to_*_names`` functions know what names -Satellite may assign to fields. -""" - - -@pytest.mark.tier1 -def test_positive_update_loc(module_proxy_attrs): - """Update a smart proxy. Inspect the server's response. - - :id: 42d6b749-c047-4fd2-90ee-ffab7be558f9 - - :expectedresults: The response contains some value for the ``location`` - field. - - :BZ: 1262037 - - :CaseImportance: High - - :CaseLevel: Component - - """ - names = {'location', 'location_ids', 'locations'} - assert len(names & module_proxy_attrs) >= 1, f'None of {names} are in {module_proxy_attrs}' - - -@pytest.mark.tier1 -def test_positive_update_org(module_proxy_attrs): - """Update a smart proxy. Inspect the server's response. - - :id: fbde9f87-33db-4b95-a5f7-71a618460c84 - - :expectedresults: The response contains some value for the - ``organization`` field. - - :BZ: 1262037 - - :CaseImportance: High - - :CaseLevel: Component - - """ - names = {'organization', 'organization_ids', 'organizations'} - assert len(names & module_proxy_attrs) >= 1, f'None of {names} are in {module_proxy_attrs}' - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_search_nondefault_proxy(request, target_sat): - """Search non-default proxy with id!=1 - - :id: caf51662-6b4e-11ed-baba-2b9d7b368002 - - :expectedresults: Non-default proxy can be searched - - :BZ: 2077824 - - :customerscenario: true - """ - with target_sat.default_url_on_new_port(9090, target_sat.available_capsule_port) as url: - proxy = _create_smart_proxy(request, target_sat, name=gen_string('alpha'), url=url) - capsules = target_sat.api.Capsule().search(query={'search': 'id != 1'}) - assert len(capsules) == 1 - assert capsules[0].url == proxy.url - assert capsules[0].name == proxy.name diff --git a/tests/foreman/api/test_subnet.py b/tests/foreman/api/test_subnet.py index 10390260db5..99873b537f5 100644 --- a/tests/foreman/api/test_subnet.py +++ b/tests/foreman/api/test_subnet.py @@ -9,32 +9,28 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Networking :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import re import pytest -from nailgun import entities from requests.exceptions import HTTPError -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized +from robottelo.utils.datafactory import ( + gen_string, + generate_strings_list, + invalid_values_list, + parametrized, +) @pytest.mark.tier1 -def test_positive_create_with_parameter(): +def test_positive_create_with_parameter(target_sat): """Subnet can be created along with parameters :id: ec581cb5-8c48-4b9c-b536-302c0b7ec30f @@ -45,14 +41,14 @@ def test_positive_create_with_parameter(): :expectedresults: The Subnet is created with parameter """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() assert subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(generate_strings_list())) -def test_positive_add_parameter(name): +def test_positive_add_parameter(name, target_sat): """Parameters can be created in subnet :id: c1dae6f4-45b1-45db-8529-d7918e41a99b @@ -68,15 +64,15 @@ def test_positive_add_parameter(name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(subnet=subnet.id, name=name, value=value).create() + subnet_param = target_sat.api.Parameter(subnet=subnet.id, name=name, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 -def test_positive_add_parameter_with_values_and_separator(): +def test_positive_add_parameter_with_values_and_separator(target_sat): """Subnet parameters can be created with values separated by comma :id: b3de6f96-7c39-4c44-b91c-a6d141f5dd6a @@ -92,10 +88,10 @@ def test_positive_add_parameter_with_values_and_separator(): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() name = gen_string('alpha') values = ', '.join(generate_strings_list()) - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=values).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=values).create() assert subnet_param.name == name assert subnet_param.value == values @@ -104,7 +100,7 @@ def test_positive_add_parameter_with_values_and_separator(): @pytest.mark.parametrize( 'separator', **parametrized({'comma': ',', 'slash': '/', 'dash': '-', 'pipe': '|'}) ) -def test_positive_create_with_parameter_and_valid_separator(separator): +def test_positive_create_with_parameter_and_valid_separator(separator, target_sat): """Subnet parameters can be created with name with valid separators :id: d1e2d75a-a1e8-4767-93f1-0bb1b75e10a0 @@ -122,16 +118,16 @@ def test_positive_create_with_parameter_and_valid_separator(separator): :CaseImportance: Low """ name = f'{separator}'.join(generate_strings_list()) - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=value).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_create_with_parameter_and_invalid_separator(name): +def test_negative_create_with_parameter_and_invalid_separator(name, target_sat): """Subnet parameters can not be created with name with invalid separators @@ -153,13 +149,13 @@ def test_negative_create_with_parameter_and_invalid_separator(name): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() with pytest.raises(HTTPError): - entities.Parameter(name=name, subnet=subnet.id).create() + target_sat.api.Parameter(name=name, subnet=subnet.id).create() @pytest.mark.tier1 -def test_negative_create_with_duplicated_parameters(): +def test_negative_create_with_duplicated_parameters(target_sat): """Attempt to create multiple parameters with same key name for the same subnet @@ -178,10 +174,10 @@ def test_negative_create_with_duplicated_parameters(): :CaseImportance: Low """ - subnet = entities.Subnet().create() - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() with pytest.raises(HTTPError) as context: - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() assert re.search("Name has already been taken", context.value.response.text) @@ -205,8 +201,6 @@ def test_positive_inherit_subnet_parmeters_in_host(): 2. The parameters from subnet should be displayed in host enc output - :CaseLevel: System - :CaseImportance: Medium :BZ: 1470014 @@ -233,8 +227,6 @@ def test_positive_subnet_parameters_override_from_host(): 2. The new value should be assigned to parameter 3. The parameter and value should be accessible as host parameters - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1470014 @@ -242,7 +234,7 @@ def test_positive_subnet_parameters_override_from_host(): @pytest.mark.tier3 -def test_positive_subnet_parameters_override_impact_on_subnet(): +def test_positive_subnet_parameters_override_impact_on_subnet(target_sat): """Override subnet parameter from host impact on subnet parameter :id: 6fe963ed-93a3-496e-bfd9-599bf91a61f3 @@ -257,22 +249,20 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): :expectedresults: The override value of subnet parameter from host should not change actual value in subnet parameter - :CaseLevel: System - :CaseImportance: Medium """ # Create subnet with valid parameters parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], subnet_parameters_attributes=parameter ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] # Create host with above subnet - host = entities.Host(location=loc, organization=org, subnet=org_subnet).create() + host = target_sat.api.Host(location=loc, organization=org, subnet=org_subnet).create() assert host.subnet.read().name == org_subnet.name parameter_new_value = [ { @@ -291,7 +281,7 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): @pytest.mark.tier1 -def test_positive_update_parameter(): +def test_positive_update_parameter(target_sat): """Subnet parameter can be updated :id: 8c389c3f-60ef-4856-b8fc-c5b066c67a2f @@ -307,7 +297,7 @@ def test_positive_update_parameter(): :CaseImportance: Medium """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() update_parameter = [{'name': gen_string('utf8'), 'value': gen_string('utf8')}] subnet.subnet_parameters_attributes = update_parameter up_subnet = subnet.update(['subnet_parameters_attributes']) @@ -317,7 +307,7 @@ def test_positive_update_parameter(): @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_update_parameter(new_name): +def test_negative_update_parameter(new_name, target_sat): """Subnet parameter can not be updated with invalid names :id: fcdbad13-ad96-4152-8e20-e023d61a2853 @@ -337,18 +327,17 @@ def test_negative_update_parameter(new_name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter( + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter( name=gen_string('utf8'), subnet=subnet.id, value=gen_string('utf8') ).create() + sub_param.name = new_name with pytest.raises(HTTPError): - sub_param.name = new_name sub_param.update(['name']) -@pytest.mark.stubbed @pytest.mark.tier2 -def test_positive_update_subnet_parameter_host_impact(): +def test_positive_update_subnet_parameter_host_impact(target_sat): """Update in parameter name and value from subnet component updates the parameter in host inheriting that subnet @@ -363,19 +352,34 @@ def test_positive_update_subnet_parameter_host_impact(): :expectedresults: 1. The inherited subnet parameter in host should have - updated name and value - 2. The inherited subnet parameter in host enc should have - updated name and value - - :CaseLevel: Integration + updated name and value. :BZ: 1470014 """ + parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( + location=[loc], organization=[org], subnet_parameters_attributes=parameter + ).create() + assert parameter[0]['name'] == org_subnet.subnet_parameters_attributes[0]['name'] + assert parameter[0]['value'] == org_subnet.subnet_parameters_attributes[0]['value'] + host = target_sat.api.Host(location=loc, organization=org, subnet=org_subnet).create() + parameter_new_value = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] + org_subnet.subnet_parameters_attributes = parameter_new_value + org_subnet.update(['subnet_parameters_attributes']) + assert ( + host.subnet.read().subnet_parameters_attributes[0]['name'] == parameter_new_value[0]['name'] + ) + assert ( + host.subnet.read().subnet_parameters_attributes[0]['value'] + == parameter_new_value[0]['value'] + ) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_subnet_parameter(): +def test_positive_delete_subnet_parameter(target_sat): """Subnet parameter can be deleted :id: 972b66ec-d506-4fcb-9786-c62f2f79ac1a @@ -387,8 +391,8 @@ def test_positive_delete_subnet_parameter(): :expectedresults: The parameter should be deleted from subnet """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter(subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter(subnet=subnet.id).create() sub_param.delete() with pytest.raises(HTTPError): sub_param.read() @@ -414,8 +418,6 @@ def test_positive_delete_subnet_parameter_host_impact(): 1. The parameter should be deleted from host 2. The parameter should be deleted from host enc - :CaseLevel: Integration - :BZ: 1470014 """ @@ -443,14 +445,12 @@ def test_positive_delete_subnet_overridden_parameter_host_impact(): host parameter now 2. The parameter should not be deleted from host enc as well - :CaseLevel: Integration - :BZ: 1470014 """ @pytest.mark.tier1 -def test_positive_list_parameters(): +def test_positive_list_parameters(target_sat): """Satellite lists all the subnet parameters :id: ce86d531-bf6b-45a9-81e3-67e1b3398f76 @@ -465,9 +465,9 @@ def test_positive_list_parameters(): parameters """ parameter = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], ipam='DHCP', @@ -476,10 +476,10 @@ def test_positive_list_parameters(): ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter['value'] - sub_param = entities.Parameter( + sub_param = target_sat.api.Parameter( name=gen_string('alpha'), subnet=org_subnet.id, value=gen_string('alpha') ).create() - org_subnet = entities.Subnet(id=org_subnet.id).read() + org_subnet = target_sat.api.Subnet(id=org_subnet.id).read() params_list = { param['name']: param['value'] for param in org_subnet.subnet_parameters_attributes @@ -511,8 +511,6 @@ def test_positive_subnet_parameter_priority(): 2. Host enc should display the parameter with value inherited from higher priority component(HostGroup in this case) - :CaseLevel: System - :CaseImportance: Low :BZ: 1470014 @@ -542,8 +540,6 @@ def test_negative_component_overrides_subnet_parameter(): 2. Host enc should not display the parameter with value inherited from lower priority component(domain in this case) - :CaseLevel: System - :CaseImportance: Low :BZ: 1470014 diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 8b68a2a6b45..282dac528b0 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -8,31 +8,21 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: SubscriptionManagement :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string -from nailgun import entities from nailgun.config import ServerConfig from nailgun.entity_mixins import TaskFailedError +import pytest from requests.exceptions import HTTPError -from robottelo.cli.subscription import Subscription -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET - +from robottelo.config import settings +from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME, PRDS, REPOS, REPOSET pytestmark = [pytest.mark.run_in_one_thread] @@ -47,31 +37,32 @@ def rh_repo(module_sca_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @pytest.fixture(scope='module') -def custom_repo(rh_repo, module_sca_manifest_org): - custom_repo = entities.Repository( - product=entities.Product(organization=module_sca_manifest_org).create(), +def custom_repo(rh_repo, module_sca_manifest_org, module_target_sat): + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=module_sca_manifest_org).create(), ).create() custom_repo.sync() return custom_repo @pytest.fixture(scope='module') -def module_ak(module_sca_manifest_org, rh_repo, custom_repo): +def module_ak(module_sca_manifest_org, rh_repo, custom_repo, module_target_sat): """rh_repo and custom_repo are included here to ensure their execution before the AK""" - module_ak = entities.ActivationKey( + return module_target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment( + id=module_sca_manifest_org.library.id + ), auto_attach=True, ).create() - return module_ak @pytest.mark.tier1 @@ -85,12 +76,12 @@ def test_positive_create(module_entitlement_manifest, module_target_sat): :CaseImportance: Critical """ - org = entities.Organization().create() + org = module_target_sat.api.Organization().create() module_target_sat.upload_manifest(org.id, module_entitlement_manifest.content) @pytest.mark.tier1 -def test_positive_refresh(function_entitlement_manifest_org, request): +def test_positive_refresh(function_entitlement_manifest_org, request, target_sat): """Upload a manifest and refresh it afterwards. :id: cd195db6-e81b-42cb-a28d-ec0eb8a53341 @@ -100,7 +91,7 @@ def test_positive_refresh(function_entitlement_manifest_org, request): :CaseImportance: Critical """ org = function_entitlement_manifest_org - sub = entities.Subscription(organization=org) + sub = target_sat.api.Subscription(organization=org) request.addfinalizer(lambda: sub.delete_manifest(data={'organization_id': org.id})) sub.refresh_manifest(data={'organization_id': org.id}) assert sub.search() @@ -123,9 +114,9 @@ def test_positive_create_after_refresh( :CaseImportance: Critical """ - org_sub = entities.Subscription(organization=function_entitlement_manifest_org) - new_org = entities.Organization().create() - new_org_sub = entities.Subscription(organization=new_org) + org_sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) + new_org = target_sat.api.Organization().create() + new_org_sub = target_sat.api.Subscription(organization=new_org) try: org_sub.refresh_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert org_sub.search() @@ -136,7 +127,7 @@ def test_positive_create_after_refresh( @pytest.mark.tier1 -def test_positive_delete(function_entitlement_manifest_org): +def test_positive_delete(function_entitlement_manifest_org, target_sat): """Delete an Uploaded manifest. :id: 4c21c7c9-2b26-4a65-a304-b978d5ba34fc @@ -145,7 +136,7 @@ def test_positive_delete(function_entitlement_manifest_org): :CaseImportance: Critical """ - sub = entities.Subscription(organization=function_entitlement_manifest_org) + sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) assert sub.search() sub.delete_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert len(sub.search()) == 0 @@ -160,12 +151,12 @@ def test_negative_upload(function_entitlement_manifest, target_sat): :expectedresults: The manifest is not uploaded to the second organization. """ - orgs = [entities.Organization().create() for _ in range(2)] + orgs = [target_sat.api.Organization().create() for _ in range(2)] with function_entitlement_manifest as manifest: target_sat.upload_manifest(orgs[0].id, manifest.content) with pytest.raises(TaskFailedError): target_sat.upload_manifest(orgs[1].id, manifest.content) - assert len(entities.Subscription(organization=orgs[1]).search()) == 0 + assert len(target_sat.api.Subscription(organization=orgs[1]).search()) == 0 @pytest.mark.tier2 @@ -195,7 +186,7 @@ def test_positive_delete_manifest_as_another_user( sc1 = ServerConfig( auth=(user1.login, user1_password), url=target_sat.url, - verify=False, + verify=settings.server.verify_ca, ) user2_password = gen_string('alphanumeric') user2 = target_sat.api.User( @@ -207,18 +198,18 @@ def test_positive_delete_manifest_as_another_user( sc2 = ServerConfig( auth=(user2.login, user2_password), url=target_sat.url, - verify=False, + verify=settings.server.verify_ca, ) # use the first admin to upload a manifest with function_entitlement_manifest as manifest: - entities.Subscription(sc1, organization=function_org).upload( + target_sat.api.Subscription(server_config=sc1, organization=function_org).upload( data={'organization_id': function_org.id}, files={'content': manifest.content} ) # try to search and delete the manifest with another admin - entities.Subscription(sc2, organization=function_org).delete_manifest( + target_sat.api.Subscription(server_config=sc2, organization=function_org).delete_manifest( data={'organization_id': function_org.id} ) - assert len(Subscription.list({'organization-id': function_org.id})) == 0 + assert len(target_sat.cli.Subscription.list({'organization-id': function_org.id})) == 0 @pytest.mark.tier2 @@ -241,7 +232,7 @@ def test_positive_subscription_status_disabled( rhel_contenthost.install_katello_ca(target_sat) rhel_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel_contenthost.subscribed - host_content = entities.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content + host_content = target_sat.api.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content assert 'Simple Content Access' in str(host_content) @@ -269,9 +260,12 @@ def test_sca_end_to_end( rhel7_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel7_contenthost.subscribed # Check to see if Organization is in SCA Mode - assert entities.Organization(id=module_sca_manifest_org.id).read().simple_content_access is True + assert ( + target_sat.api.Organization(id=module_sca_manifest_org.id).read().simple_content_access + is True + ) # Verify that you cannot attach a subscription to an activation key in SCA Mode - subscription = entities.Subscription(organization=module_sca_manifest_org).search( + subscription = target_sat.api.Subscription(organization=module_sca_manifest_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] with pytest.raises(HTTPError) as ak_context: @@ -279,12 +273,12 @@ def test_sca_end_to_end( assert 'Simple Content Access' in ak_context.value.response.text # Verify that you cannot attach a subscription to an Host in SCA Mode with pytest.raises(HTTPError) as host_context: - entities.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( + target_sat.api.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( data={'subscriptions': [{'id': subscription.id, 'quantity': 1}]} ) assert 'Simple Content Access' in host_context.value.response.text # Create a content view with repos and check to see that the client has access - content_view = entities.ContentView(organization=module_sca_manifest_org).create() + content_view = target_sat.api.ContentView(organization=module_sca_manifest_org).create() content_view.repository = [rh_repo, custom_repo] content_view.update(['repository']) content_view.publish() @@ -330,34 +324,34 @@ def test_positive_candlepin_events_processed_by_stomp( :CaseImportance: High """ - repo = entities.Repository( - product=entities.Product(organization=function_org).create() + repo = target_sat.api.Repository( + product=target_sat.api.Product(organization=function_org).create() ).create() repo.sync() - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=function_org.default_content_view, max_hosts=100, organization=function_org, - environment=entities.LifecycleEnvironment(id=function_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=function_org.library.id), auto_attach=True, ).create() rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(function_org.name, ak.name) - host = entities.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) + host = target_sat.api.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) host_id = host[0].id - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 2 with function_entitlement_manifest as manifest: target_sat.upload_manifest(function_org.id, manifest.content) - subscription = entities.Subscription(organization=function_org).search( + subscription = target_sat.api.Subscription(organization=function_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] - entities.HostSubscription(host=host_id).add_subscriptions( + target_sat.api.HostSubscription(host=host_id).add_subscriptions( data={'subscriptions': [{'id': subscription.cp_id, 'quantity': 1}]} ) - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 0 - response = entities.Ping().search_json()['services']['candlepin_events'] + response = target_sat.api.Ping().search_json()['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message'] @@ -389,11 +383,11 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte :CaseImportance: High """ - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=module_sca_manifest_org.library.id), auto_attach=True, ).create() # registering the content host with no content enabled/synced in the org @@ -414,7 +408,7 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() # re-registering the host should test whether Candlepin gracefully handles # registration of a host with an expired SCA cert @@ -448,9 +442,7 @@ def test_positive_os_restriction_on_repos(): """ -def test_positive_async_endpoint_for_manifest_refresh( - target_sat, function_entitlement_manifest_org -): +def test_positive_async_endpoint_for_manifest_refresh(target_sat, module_entitlement_manifest_org): """Verify that manifest refresh is using an async endpoint. Previously this was a single, synchronous endpoint. The endpoint to retrieve manifests is now split into two: an async endpoint to start "exporting" the manifest, and a second endpoint to download the @@ -469,12 +461,12 @@ def test_positive_async_endpoint_for_manifest_refresh( :BZ: 2066323 """ - sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) + sub = target_sat.api.Subscription(organization=module_entitlement_manifest_org) # set log level to 'debug' and restart services target_sat.cli.Admin.logging({'all': True, 'level-debug': True}) target_sat.cli.Service.restart() # refresh manifest and assert new log message to confirm async endpoint - sub.refresh_manifest(data={'organization_id': function_entitlement_manifest_org.id}) + sub.refresh_manifest(data={'organization_id': module_entitlement_manifest_org.id}) results = target_sat.execute( 'grep "Sending GET request to upstream Candlepin" /var/log/foreman/production.log' ) diff --git a/tests/foreman/api/test_syncplan.py b/tests/foreman/api/test_syncplan.py index 30d16893bbb..3ecbfac8714 100644 --- a/tests/foreman/api/test_syncplan.py +++ b/tests/foreman/api/test_syncplan.py @@ -8,42 +8,31 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SyncPlans :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta from time import sleep -import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_string +from fauxfactory import gen_choice, gen_string from nailgun import client -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.config import get_credentials -from robottelo.config import get_url -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants import SYNC_INTERVAL +from robottelo.config import get_credentials, get_url +from robottelo.constants import PRDS, REPOS, REPOSET, SYNC_INTERVAL from robottelo.logging import logger -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_cron_expressions -from robottelo.utils.datafactory import valid_data_list - +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, + valid_cron_expressions, + valid_data_list, +) sync_date_deltas = { # Today @@ -107,7 +96,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.tier1 -def test_positive_get_routes(): +def test_positive_get_routes(target_sat): """Issue an HTTP GET response to both available routes. :id: 9e40ea7f-71ea-4ced-94ba-cde03620c654 @@ -118,8 +107,8 @@ def test_positive_get_routes(): :CaseImportance: Critical """ - org = entities.Organization().create() - entities.SyncPlan(organization=org).create() + org = target_sat.api.Organization().create() + target_sat.api.SyncPlan(organization=org).create() response1 = client.get( f'{get_url()}/katello/api/v2/sync_plans', auth=get_credentials(), @@ -136,7 +125,6 @@ def test_positive_get_routes(): assert response1.json()['results'] == response2.json()['results'] -@pytest.mark.build_sanity @pytest.mark.parametrize("enabled", [False, True]) @pytest.mark.tier1 def test_positive_create_enabled_disabled(module_org, enabled, request, target_sat): @@ -151,7 +139,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan = sync_plan.read() assert sync_plan.enabled == enabled @@ -159,7 +147,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a sync plan with a random name. :id: c1263134-0d7c-425a-82fd-df5274e1f9ba @@ -170,14 +158,16 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, name=name, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan( + enabled=False, name=name, organization=module_org + ).create() sync_plan = sync_plan.read() assert sync_plan.name == name @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, description): +def test_positive_create_with_description(module_org, description, module_target_sat): """Create a sync plan with a random description. :id: 3e5745e8-838d-44a5-ad61-7e56829ad47c @@ -189,7 +179,7 @@ def test_positive_create_with_description(module_org, description): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=description, organization=module_org ).create() sync_plan = sync_plan.read() @@ -198,7 +188,7 @@ def test_positive_create_with_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_create_with_interval(module_org, interval): +def test_positive_create_with_interval(module_org, interval, module_target_sat): """Create a sync plan with a random interval. :id: d160ed1c-b698-42dc-be0b-67ac693c7840 @@ -209,7 +199,7 @@ def test_positive_create_with_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -221,7 +211,7 @@ def test_positive_create_with_interval(module_org, interval): @pytest.mark.parametrize('sync_delta', **parametrized(sync_date_deltas)) @pytest.mark.tier1 -def test_positive_create_with_sync_date(module_org, sync_delta): +def test_positive_create_with_sync_date(module_org, sync_delta, target_sat): """Create a sync plan and update its sync date. :id: bdb6e0a9-0d3b-4811-83e2-2140b7bb62e3 @@ -233,16 +223,16 @@ def test_positive_create_with_sync_date(module_org, sync_delta): :CaseImportance: Critical """ sync_date = datetime.now() + timedelta(seconds=sync_delta) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( enabled=False, organization=module_org, sync_date=sync_date ).create() sync_plan = sync_plan.read() - assert sync_date.strftime('%Y-%m-%d %H:%M:%S UTC') == sync_plan.sync_date + assert sync_date.strftime('%Y-%m-%d %H:%M:%S +0000') == sync_plan.sync_date @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Create a sync plan with an invalid name. :id: a3a0f844-2f81-4f87-9f68-c25506c29ce2 @@ -255,12 +245,12 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(name=name, organization=module_org).create() + module_target_sat.api.SyncPlan(name=name, organization=module_org).create() @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_interval(module_org, interval): +def test_negative_create_with_invalid_interval(module_org, interval, module_target_sat): """Create a sync plan with invalid interval specified. :id: f5844526-9f58-4be3-8a96-3849a465fc02 @@ -273,11 +263,11 @@ def test_negative_create_with_invalid_interval(module_org, interval): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(interval=interval, organization=module_org).create() + module_target_sat.api.SyncPlan(interval=interval, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_empty_interval(module_org): +def test_negative_create_with_empty_interval(module_org, module_target_sat): """Create a sync plan with no interval specified. :id: b4686463-69c8-4538-b040-6fb5246a7b00 @@ -287,7 +277,7 @@ def test_negative_create_with_empty_interval(module_org): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(organization=module_org) + sync_plan = module_target_sat.api.SyncPlan(organization=module_org) sync_plan.create_missing() del sync_plan.interval with pytest.raises(HTTPError): @@ -307,7 +297,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=not enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=not enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan.enabled = enabled sync_plan.update(['enabled']) @@ -317,7 +307,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, name): +def test_positive_update_name(module_org, name, module_target_sat): """Create a sync plan and update its name. :id: dbfadf4f-50af-4aa8-8d7d-43988dc4528f @@ -329,7 +319,7 @@ def test_positive_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name sync_plan.update(['name']) sync_plan = sync_plan.read() @@ -338,7 +328,7 @@ def test_positive_update_name(module_org, name): @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_org, description): +def test_positive_update_description(module_org, description, module_target_sat): """Create a sync plan and update its description. :id: 4769fe9c-9eec-40c8-b015-1e3d7e570bec @@ -348,7 +338,7 @@ def test_positive_update_description(module_org, description): :expectedresults: A sync plan is created and its description can be updated with the specified description. """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org ).create() sync_plan.description = description @@ -359,7 +349,7 @@ def test_positive_update_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval(module_org, interval): +def test_positive_update_interval(module_org, interval, module_target_sat): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 @@ -371,7 +361,7 @@ def test_positive_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -391,7 +381,7 @@ def test_positive_update_interval(module_org, interval): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval_custom_cron(module_org, interval): +def test_positive_update_interval_custom_cron(module_org, interval, module_target_sat): """Create a sync plan and update its interval to custom cron. :id: 26c58319-cae0-4b0c-b388-2a1fe3f22344 @@ -404,7 +394,7 @@ def test_positive_update_interval_custom_cron(module_org, interval): :CaseImportance: Critical """ if interval != SYNC_INTERVAL['custom']: - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, @@ -420,7 +410,7 @@ def test_positive_update_interval_custom_cron(module_org, interval): @pytest.mark.parametrize('sync_delta', **parametrized(sync_date_deltas)) @pytest.mark.tier1 -def test_positive_update_sync_date(module_org, sync_delta): +def test_positive_update_sync_date(module_org, sync_delta, target_sat): """Updated sync plan's sync date. :id: fad472c7-01b4-453b-ae33-0845c9e0dfd4 @@ -432,18 +422,18 @@ def test_positive_update_sync_date(module_org, sync_delta): :CaseImportance: Critical """ sync_date = datetime.now() + timedelta(seconds=sync_delta) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( enabled=False, organization=module_org, sync_date=datetime.now() + timedelta(days=10) ).create() sync_plan.sync_date = sync_date sync_plan.update(['sync_date']) sync_plan = sync_plan.read() - assert sync_date.strftime('%Y-%m-%d %H:%M:%S UTC') == sync_plan.sync_date + assert sync_date.strftime('%Y-%m-%d %H:%M:%S +0000') == sync_plan.sync_date @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, name): +def test_negative_update_name(module_org, name, module_target_sat): """Try to update a sync plan with an invalid name. :id: ae502053-9d3c-4cad-aee4-821f846ceae5 @@ -455,7 +445,7 @@ def test_negative_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name with pytest.raises(HTTPError): sync_plan.update(['name']) @@ -463,7 +453,7 @@ def test_negative_update_name(module_org, name): @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_interval(module_org, interval): +def test_negative_update_interval(module_org, interval, module_target_sat): """Try to update a sync plan with invalid interval. :id: 8c981174-6f55-49c0-8baa-40e5c3fc598c @@ -475,14 +465,14 @@ def test_negative_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.interval = interval with pytest.raises(HTTPError): sync_plan.update(['interval']) @pytest.mark.tier2 -def test_positive_add_product(module_org): +def test_positive_add_product(module_org, target_sat): """Create a sync plan and add one product to it. :id: 036dea02-f73d-4fc1-9c41-5515b6659c79 @@ -490,12 +480,11 @@ def test_positive_add_product(module_org): :expectedresults: A sync plan can be created and one product can be added to it. - :CaseLevel: Integration - :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - product = entities.Product(organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() + target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) sync_plan = sync_plan.read() assert len(sync_plan.product) == 1 @@ -503,18 +492,17 @@ def test_positive_add_product(module_org): @pytest.mark.tier2 -def test_positive_add_products(module_org): +def test_positive_add_products(module_org, target_sat): """Create a sync plan and add two products to it. :id: 2a80ecad-2245-46d8-bbc6-0b802e68d50c :expectedresults: A sync plan can be created and two products can be added to it. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] + [target_sat.api.Repository(product=product).create() for product in products] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) sync_plan = sync_plan.read() assert len(sync_plan.product) == 2 @@ -522,7 +510,7 @@ def test_positive_add_products(module_org): @pytest.mark.tier2 -def test_positive_remove_product(module_org): +def test_positive_remove_product(module_org, target_sat): """Create a sync plan with two products and then remove one product from it. @@ -531,12 +519,11 @@ def test_positive_remove_product(module_org): :expectedresults: A sync plan can be created and one product can be removed from it. - :CaseLevel: Integration - :BZ: 1199150 """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] + [target_sat.api.Repository(product=product).create() for product in products] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [products[0].id]}) @@ -547,7 +534,7 @@ def test_positive_remove_product(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_products(module_org): +def test_positive_remove_products(module_org, target_sat): """Create a sync plan with two products and then remove both products from it. @@ -555,11 +542,10 @@ def test_positive_remove_products(module_org): :expectedresults: A sync plan can be created and both products can be removed from it. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] + [target_sat.api.Repository(product=product).create() for product in products] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [product.id for product in products]}) @@ -575,13 +561,12 @@ def test_positive_repeatedly_add_remove(module_org, request, target_sat): :expectedresults: A task is returned which can be used to monitor the additions and removals. - :CaseLevel: Integration - :BZ: 1199150 """ - sync_plan = entities.SyncPlan(organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() + target_sat.api.Repository(product=product).create() for _ in range(5): sync_plan.add_products(data={'product_ids': [product.id]}) assert len(sync_plan.read().product) == 1 @@ -598,16 +583,15 @@ def test_positive_add_remove_products_custom_cron(module_org, request, target_sa :expectedresults: A sync plan can be created and both products can be removed from it. - - :CaseLevel: Integration """ cron_expression = gen_choice(valid_cron_expressions()) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, interval='custom cron', cron_expression=cron_expression ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - products = [entities.Product(organization=module_org).create() for _ in range(2)] + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] + [target_sat.api.Repository(product=product).create() for product in products] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [product.id for product in products]}) @@ -624,17 +608,15 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, :expectedresults: Product was not synchronized :BZ: 1279539 - - :CaseLevel: System """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=2) validate_repo_content(repo, ['erratum', 'rpm', 'package_group'], after_sync=False) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=datetime.utcnow().replace(second=0) ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -656,15 +638,13 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, :expectedresults: Product is synchronized successfully. :BZ: 1279539 - - :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='hourly', @@ -702,13 +682,11 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques :expectedresults: Product is synchronized successfully. - :CaseLevel: System - :BZ: 1655595, 1695733 """ delay = 2 * 60 # delay for sync date in seconds - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=1) @@ -717,7 +695,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -753,15 +731,15 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque :expectedresults: Products are synchronized successfully. - :CaseLevel: System - :BZ: 1695733 """ # Test with multiple products and multiple repos needs more delay. delay = 8 * 60 # delay for sync date in seconds - products = [entities.Product(organization=module_org).create() for _ in range(2)] + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] repos = [ - entities.Repository(product=product).create() for product in products for _ in range(2) + target_sat.api.Repository(product=product).create() + for product in products + for _ in range(2) ] # Verify products have not been synced yet logger.info( @@ -775,7 +753,7 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -818,8 +796,6 @@ def test_positive_synchronize_rh_product_past_sync_date( :customerscenario: true :BZ: 1279539, 1879537 - - :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 @@ -832,9 +808,9 @@ def test_positive_synchronize_rh_product_past_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() - sync_plan = entities.SyncPlan( + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() + sync_plan = target_sat.api.SyncPlan( organization=org, enabled=True, interval='hourly', @@ -865,7 +841,7 @@ def test_positive_synchronize_rh_product_past_sync_date( # Add disassociate RH product from sync plan check for BZ#1879537 assert len(sync_plan.read().product) == 1 # Disable the reposet - reposet = entities.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] + reposet = target_sat.api.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] reposet.disable(data={'basearch': 'x86_64', 'releasever': None, 'product_id': product.id}) # Assert that the Sync Plan now has no product associated with it assert len(sync_plan.read().product) == 0 @@ -883,8 +859,6 @@ def test_positive_synchronize_rh_product_future_sync_date( :id: 6697a00f-2181-4c2b-88eb-2333268d780b :expectedresults: Product is synchronized successfully. - - :CaseLevel: System """ delay = 2 * 60 # delay for sync date in seconds org = function_entitlement_manifest_org @@ -896,12 +870,12 @@ def test_positive_synchronize_rh_product_future_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=org, enabled=True, interval='hourly', sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -941,15 +915,13 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques :id: d60e33a0-f75c-498e-9e6f-0a2025295a9d :expectedresults: Product is synchronized successfully. - - :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(days=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='daily', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -986,15 +958,13 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque :expectedresults: Product is synchronized successfully. :BZ: 1396647 - - :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(weeks=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='weekly', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -1021,18 +991,17 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque @pytest.mark.tier2 -def test_positive_delete_one_product(module_org): +def test_positive_delete_one_product(module_org, target_sat): """Create a sync plan with one product and delete it. :id: e565c464-33e2-4bca-8eca-15d5a7d4b155 :expectedresults: A sync plan is created with one product and sync plan can be deleted. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - product = entities.Product(organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() + target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) sync_plan.delete() with pytest.raises(HTTPError): @@ -1040,18 +1009,17 @@ def test_positive_delete_one_product(module_org): @pytest.mark.tier2 -def test_positive_delete_products(module_org): +def test_positive_delete_products(module_org, target_sat): """Create a sync plan with two products and delete them. :id: f21bd57f-369e-4acd-a492-5532349a3804 :expectedresults: A sync plan is created with one product and sync plan can be deleted. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = target_sat.api.SyncPlan(organization=module_org).create() + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] + [target_sat.api.Repository(product=product).create() for product in products] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) sync_plan.delete() with pytest.raises(HTTPError): @@ -1060,19 +1028,17 @@ def test_positive_delete_products(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product(module_org): +def test_positive_delete_synced_product(module_org, module_target_sat): """Create a sync plan with one synced product and delete it. :id: 195d8fec-1fa0-42ab-84a5-32dd81a285ca :expectedresults: A sync plan is created with one synced product and sync plan can be deleted. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() sync_plan.delete() @@ -1082,7 +1048,7 @@ def test_positive_delete_synced_product(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product_custom_cron(module_org): +def test_positive_delete_synced_product_custom_cron(module_org, module_target_sat): """Create a sync plan with custom cron with one synced product and delete it. @@ -1090,16 +1056,14 @@ def test_positive_delete_synced_product_custom_cron(module_org): :expectedresults: A sync plan is created with one synced product and sync plan can be deleted. - - :CaseLevel: Integration """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( organization=module_org, interval='custom cron', cron_expression=gen_choice(valid_cron_expressions()), ).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() product = product.read() diff --git a/tests/foreman/api/test_template_combination.py b/tests/foreman/api/test_template_combination.py index c9f8c1360ad..7e9d5f11be8 100644 --- a/tests/foreman/api/test_template_combination.py +++ b/tests/foreman/api/test_template_combination.py @@ -4,97 +4,55 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ProvisioningTemplates :Team: Rocket -:TestType: Functional - -:Upstream: No """ import pytest -from nailgun import entities from requests.exceptions import HTTPError -@pytest.fixture(scope='module') -def module_hostgroup_template(module_hostgroup): - """Create hostgroup to be used on TemplateCombination creation""" - yield {'hostgroup': module_hostgroup} - # Delete hostgroup used on TemplateCombination creation - module_hostgroup.delete() +@pytest.mark.tier1 +@pytest.mark.upgrade +def test_positive_end_to_end_template_combination(request, module_target_sat, module_hostgroup): + """Assert API template combination get/delete method works. + :id: 3a5cb370-c5f6-11e6-bb2f-68f72889dc7g + + :Setup: save a template combination + + :expectedresults: TemplateCombination can be created, retrieved and deleted through API + + :CaseImportance: Critical -@pytest.fixture(scope='function') -def function_template_combination(module_hostgroup_template): - """Create ProvisioningTemplate and TemplateConfiguration for each test - and at the end delete ProvisioningTemplate used on tests + :BZ: 1369737 """ - template = entities.ProvisioningTemplate( + template = module_target_sat.api.ProvisioningTemplate( snippet=False, template_combinations=[ { - 'hostgroup_id': module_hostgroup_template['hostgroup'].id, + 'hostgroup_id': module_hostgroup.id, } ], ) template = template.create() template_combination_dct = template.template_combinations[0] - template_combination = entities.TemplateCombination( + template_combination = module_target_sat.api.TemplateCombination( id=template_combination_dct['id'], provisioning_template=template, - hostgroup=module_hostgroup_template['hostgroup'], + hostgroup=module_hostgroup, ) - yield {'template': template, 'template_combination': template_combination} - # Clean combination if it is not already deleted - try: - template_combination.delete() - except HTTPError: - pass - template.delete() - - -@pytest.mark.tier1 -def test_positive_get_combination(function_template_combination, module_hostgroup_template): - """Assert API template combination get method works. - - :id: 2447674e-c37e-11e6-93cb-68f72889dc7f - - :Setup: save a template combination - - :expectedresults: TemplateCombination can be retrieved through API + # GET + combination = template_combination.read() + assert template.id == combination.provisioning_template.id + assert module_hostgroup.id == combination.hostgroup.id - :CaseImportance: Critical - - :BZ: 1369737 - """ - combination = function_template_combination['template_combination'].read() - assert isinstance(combination, entities.TemplateCombination) - assert function_template_combination['template'].id == combination.provisioning_template.id - assert module_hostgroup_template['hostgroup'].id == combination.hostgroup.id - - -@pytest.mark.tier1 -@pytest.mark.upgrade -def test_positive_delete_combination(function_template_combination): - """Assert API template combination delete method works. - - :id: 3a5cb370-c5f6-11e6-bb2f-68f72889dc7f - - :Setup: save a template combination - - :expectedresults: TemplateCombination can be deleted through API - - :CaseImportance: Critical - - :BZ: 1369737 - """ - combination = function_template_combination['template_combination'].read() - assert isinstance(combination, entities.TemplateCombination) - assert 1 == len(function_template_combination['template'].read().template_combinations) + # DELETE + assert len(template.read().template_combinations) == 1 combination.delete() with pytest.raises(HTTPError): combination.read() - assert 0 == len(function_template_combination['template'].read().template_combinations) + assert len(template.read().template_combinations) == 0 + template.delete() + module_hostgroup.delete() diff --git a/tests/foreman/api/test_templatesync.py b/tests/foreman/api/test_templatesync.py index de0ba3b4175..bda401ef373 100644 --- a/tests/foreman/api/test_templatesync.py +++ b/tests/foreman/api/test_templatesync.py @@ -4,41 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: TemplatesPlugin :Team: Endeavour -:TestType: Functional - -:Upstream: No """ import base64 import json import time +from fauxfactory import gen_string import pytest import requests -from fauxfactory import gen_string -from nailgun import entities from robottelo.config import settings -from robottelo.constants import FOREMAN_TEMPLATE_IMPORT_API_URL -from robottelo.constants import FOREMAN_TEMPLATE_IMPORT_URL -from robottelo.constants import FOREMAN_TEMPLATE_ROOT_DIR -from robottelo.constants import FOREMAN_TEMPLATE_TEST_TEMPLATE +from robottelo.constants import ( + FOREMAN_TEMPLATE_IMPORT_API_URL, + FOREMAN_TEMPLATE_IMPORT_URL, + FOREMAN_TEMPLATE_ROOT_DIR, + FOREMAN_TEMPLATE_TEST_TEMPLATE, +) from robottelo.logging import logger - git = settings.git class TestTemplateSyncTestCase: - """Implements TemplateSync tests from API - - :CaseLevel: Acceptance - """ + """Implements TemplateSync tests from API""" @pytest.fixture(scope='module', autouse=True) def setUpClass(self, module_target_sat): @@ -66,13 +58,15 @@ def setUpClass(self, module_target_sat): ) @pytest.mark.tier2 - def test_positive_import_filtered_templates_from_git(self, module_org, module_location): + def test_positive_import_filtered_templates_from_git( + self, module_org, module_location, module_target_sat + ): """Assure only templates with a given filter regex are pulled from git repo. :id: 628a95d6-7a4e-4e56-ad7b-d9fecd34f765 - :Steps: + :steps: 1. Using nailgun or direct API call import only the templates matching with regex e.g: `^atomic.*` refer to: `/apidoc/v2/template/import.html` @@ -90,7 +84,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo :CaseImportance: High """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -104,7 +98,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(True) assert imported_count == 8 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -113,7 +107,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptemplates) == 5 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -122,7 +116,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptables) == 1 - jtemplates = entities.JobTemplate().search( + jtemplates = module_target_sat.api.JobTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -131,7 +125,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(jtemplates) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -142,13 +136,13 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo assert len(rtemplates) == 1 @pytest.mark.tier2 - def test_import_filtered_templates_from_git_with_negate(self, module_org): + def test_import_filtered_templates_from_git_with_negate(self, module_org, module_target_sat): """Assure templates with a given filter regex are NOT pulled from git repo. :id: a6857454-249b-4a2e-9b53-b5d7b4eb34e3 - :Steps: + :steps: 1. Using nailgun or direct API call import the templates NOT matching with regex e.g: `^freebsd.*` refer to: `/apidoc/v2/template/import.html` using the @@ -161,7 +155,7 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -175,15 +169,15 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(False) assert not_imported_count == 9 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptemplates) == 6 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptables) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(rtemplates) == 1 @@ -192,7 +186,7 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): def test_import_template_with_puppet(self, parametrized_puppet_sat): """Importing puppet templates with enabled/disabled puppet module - :Steps: + :steps: 1. Have enabled(disabled) puppet module 2. Import template containing puppet 3. Check if template was imported @@ -241,7 +235,7 @@ def test_positive_import_and_associate( :id: 04a14a56-bd71-412b-b2da-4b8c3991c401 - :Steps: + :steps: 1. Create new taxonomies, lets say org X and loc Y. 2. From X and Y taxonomies scope, Import template1 as associate 'never', where the template contains the metadata anything other than X and Y taxonomies. @@ -266,7 +260,7 @@ def test_positive_import_and_associate( prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir # Associate Never - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -276,7 +270,7 @@ def test_positive_import_and_associate( } ) # - Template 1 imported in X and Y taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -287,7 +281,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 1 not imported in metadata taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -301,7 +295,7 @@ def test_positive_import_and_associate( f'cp {dir_path}/example_template.erb {dir_path}/another_template.erb && ' f'sed -ie "s/name: .*/name: another_template/" {dir_path}/another_template.erb' ) - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -311,7 +305,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -322,7 +316,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 should be imported in importing taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -333,7 +327,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # Associate Always - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -343,7 +337,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -354,7 +348,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -366,12 +360,12 @@ def test_positive_import_and_associate( assert len(ptemplate[0].read().organization) == 1 @pytest.mark.tier2 - def test_positive_import_from_subdirectory(self, module_org): + def test_positive_import_from_subdirectory(self, module_org, module_target_sat): """Assure templates are imported from specific repositories subdirectory :id: 8ea11a1a-165e-4834-9387-7accb4c94e77 - :Steps: + :steps: 1. Using nailgun or direct API call import templates specifying a git subdirectory e.g: `-d {'dirname': 'test_sub_dir'}` in POST body @@ -383,7 +377,7 @@ def test_positive_import_from_subdirectory(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -409,7 +403,7 @@ def test_positive_export_filtered_templates_to_localdir( :id: b7c98b75-4dd1-4b6a-b424-35b0f48c25db - :Steps: + :steps: 1. Using nailgun or direct API call export only the templates matching with regex e.g: `robottelo` refer to: `/apidoc/v2/template/export.html` @@ -422,7 +416,7 @@ def test_positive_export_filtered_templates_to_localdir( :CaseImportance: Low """ dir_name, dir_path = create_import_export_local_dir - exported_temps = entities.Template().exports( + exported_temps = target_sat.api.Template().exports( data={ 'repo': FOREMAN_TEMPLATE_ROOT_DIR, 'dirname': dir_name, @@ -445,7 +439,7 @@ def test_positive_export_filtered_templates_negate( :id: 2f8ad8f3-f02b-4b2d-85af-423a228976f3 - :Steps: + :steps: 1. Using nailgun or direct API call export templates matching that does not matches regex e.g: `robottelo` using `negate` option. @@ -458,7 +452,7 @@ def test_positive_export_filtered_templates_negate( """ # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -480,7 +474,7 @@ def test_positive_export_and_import_with_metadata( :id: ba8a34ce-c2c6-4889-8729-59714c0a4b19 - :Steps: + :steps: 1. Create a template in local directory and specify Org/Loc. 2. Use import to pull this specific template (using filter). 3. Using nailgun or direct API call @@ -497,7 +491,7 @@ def test_positive_export_and_import_with_metadata( ex_template = 'example_template.erb' prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'location_ids': [module_location.id], @@ -507,7 +501,7 @@ def test_positive_export_and_import_with_metadata( ) export_file = f'{prefix.lower()}{ex_template}' # Export same template to local dir with refreshed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'refresh', 'repo': dir_path, @@ -521,7 +515,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 0 # Export same template to local dir with keeping metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'keep', 'repo': dir_path, @@ -535,7 +529,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 1 # Export same template to local dir with removed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'remove', 'repo': dir_path, @@ -552,13 +546,13 @@ def test_positive_export_and_import_with_metadata( # Take Templates out of Tech Preview Feature Tests @pytest.mark.tier3 @pytest.mark.parametrize('verbose', [True, False]) - def test_positive_import_json_output_verbose(self, module_org, verbose): + def test_positive_import_json_output_verbose(self, module_org, verbose, module_target_sat): """Assert all the required fields displayed in import output when verbose is True and False :id: 74b0a701-341f-4062-9769-e5cb1a1c4792 - :Steps: + :steps: 1. Using nailgun or direct API call Impot a template with verbose `True` and `False` option @@ -574,7 +568,7 @@ def test_positive_import_json_output_verbose(self, module_org, verbose): :CaseImportance: Low """ prefix = gen_string('alpha') - templates = entities.Template().imports( + templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -610,7 +604,7 @@ def test_positive_import_json_output_changed_key_true( :id: 4b866144-822c-4786-9188-53bc7e2dd44a - :Steps: + :steps: 1. Using nailgun or direct API call Create a template and import it from a source 2. Update the template data in source location @@ -627,26 +621,26 @@ def test_positive_import_json_output_changed_key_true( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) target_sat.execute(f'echo " Updating Template data." >> {dir_path}/example_template.erb') - post_template = entities.Template().imports( + post_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(post_template['message']['templates'][0]['changed']) @pytest.mark.tier2 def test_positive_import_json_output_changed_key_false( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `changed` key returns `False` when template data gets updated :id: 64456c0c-c2c6-4a1c-a16e-54ca4a8b66d3 - :Steps: + :steps: 1. Using nailgun or direct API call Create a template and import it from a source 2. Dont update the template data in source location @@ -662,11 +656,11 @@ def test_positive_import_json_output_changed_key_false( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) - post_template = entities.Template().imports( + post_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert not bool(post_template['message']['templates'][0]['changed']) @@ -679,7 +673,7 @@ def test_positive_import_json_output_name_key( :id: a5639368-3d23-4a37-974a-889e2ec0916e - :Steps: + :steps: 1. Using nailgun or direct API call Create a template with some name and import it from a source @@ -696,22 +690,22 @@ def test_positive_import_json_output_name_key( target_sat.execute( f'sed -ie "s/name: .*/name: {template_name}/" {dir_path}/example_template.erb' ) - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) - assert 'name' in template['message']['templates'][0].keys() + assert 'name' in template['message']['templates'][0] assert template_name == template['message']['templates'][0]['name'] @pytest.mark.tier2 def test_positive_import_json_output_imported_key( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `imported` key returns `True` on successful import :id: 5bc11163-e8f3-4744-8a76-5c16e6e46e86 - :Steps: + :steps: 1. Using nailgun or direct API call Create a template and import it from a source @@ -724,19 +718,21 @@ def test_positive_import_json_output_imported_key( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(template['message']['templates'][0]['imported']) @pytest.mark.tier2 - def test_positive_import_json_output_file_key(self, create_import_export_local_dir, module_org): + def test_positive_import_json_output_file_key( + self, create_import_export_local_dir, module_org, module_target_sat + ): """Assert template imports output `file` key returns correct file name from where the template is imported :id: da0b094c-6dc8-4526-b115-8e08bfb05fbb - :Steps: + :steps: 1. Using nailgun or direct API call Create a template with some name and import it from a source @@ -749,10 +745,10 @@ def test_positive_import_json_output_file_key(self, create_import_export_local_d :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) - assert 'example_template.erb' == template['message']['templates'][0]['file'] + assert template['message']['templates'][0]['file'] == 'example_template.erb' @pytest.mark.tier2 def test_positive_import_json_output_corrupted_metadata( @@ -763,7 +759,7 @@ def test_positive_import_json_output_corrupted_metadata( :id: 6bd5bc6b-a7a2-4529-9df6-47a670cd86d8 - :Steps: + :steps: 1. Create a template with wrong syntax in metadata 2. Using nailgun or direct API call Import above template @@ -779,25 +775,25 @@ def test_positive_import_json_output_corrupted_metadata( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/<%#/$#$#@%^$^@@RT$$/" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) assert ( - 'Failed to parse metadata' == template['message']['templates'][0]['additional_errors'] + template['message']['templates'][0]['additional_errors'] == 'Failed to parse metadata' ) @pytest.mark.skip_if_open('BZ:1787355') @pytest.mark.tier2 def test_positive_import_json_output_filtered_skip_message( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output returns template import skipped info for templates whose name doesnt match the filter :id: db68b5de-7647-4568-b79c-2aec3292328a - :Steps: + :steps: 1. Using nailgun or direct API call Create template with name not matching filter @@ -811,7 +807,7 @@ def test_positive_import_json_output_filtered_skip_message( :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -820,8 +816,8 @@ def test_positive_import_json_output_filtered_skip_message( ) assert not bool(template['message']['templates'][0]['imported']) assert ( - "Skipping, 'name' filtered out based on 'filter' and 'negate' settings" - == template['message']['templates'][0]['additional_info'] + template['message']['templates'][0]['additional_info'] + == "Skipping, 'name' filtered out based on 'filter' and 'negate' settings" ) @pytest.mark.tier2 @@ -833,7 +829,7 @@ def test_positive_import_json_output_no_name_error( :id: 259a8a3a-8749-442d-a2bc-51e9af89ce8c - :Steps: + :steps: 1. Create a template without name in metadata 2. Using nailgun or direct API call Import above template @@ -849,13 +845,13 @@ def test_positive_import_json_output_no_name_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/name: .*/name: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) assert ( - "No 'name' found in metadata" - == template['message']['templates'][0]['additional_errors'] + template['message']['templates'][0]['additional_errors'] + == "No 'name' found in metadata" ) @pytest.mark.tier2 @@ -867,7 +863,7 @@ def test_positive_import_json_output_no_model_error( :id: d3f1ffe4-58d7-45a8-b278-74e081dc5062 - :Steps: + :steps: 1. Create a template without model keyword in metadata 2. Using nailgun or direct API call Import above template @@ -883,13 +879,13 @@ def test_positive_import_json_output_no_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "/model: .*/d" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) assert ( - "No 'model' found in metadata" - == template['message']['templates'][0]['additional_errors'] + template['message']['templates'][0]['additional_errors'] + == "No 'model' found in metadata" ) @pytest.mark.tier2 @@ -901,7 +897,7 @@ def test_positive_import_json_output_blank_model_error( :id: 5007b12d-1cf6-49e6-8e54-a189d1a209de - :Steps: + :steps: 1. Create a template with blank model name in metadata 2. Using nailgun or direct API call Import above template @@ -917,13 +913,13 @@ def test_positive_import_json_output_blank_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/model: .*/model: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) assert ( - "Template type was not found, are you missing a plugin?" - == template['message']['templates'][0]['additional_errors'] + template['message']['templates'][0]['additional_errors'] + == "Template type was not found, are you missing a plugin?" ) @pytest.mark.tier2 @@ -934,7 +930,7 @@ def test_positive_export_json_output( :id: 141b893d-72a3-47c2-bb03-004c757bcfc9 - :Steps: + :steps: 1. Using nailgun or direct API call Export all the templates @@ -947,7 +943,7 @@ def test_positive_export_json_output( :CaseImportance: Low """ prefix = gen_string('alpha') - imported_templates = entities.Template().imports( + imported_templates = target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -962,14 +958,14 @@ def test_positive_export_json_output( assert imported_count == 17 # Total Count # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - exported_templates = entities.Template().exports( + exported_templates = target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': prefix} ) exported_count = [ template['exported'] for template in exported_templates['message']['templates'] ].count(True) assert exported_count == 17 - assert 'name' in exported_templates['message']['templates'][0].keys() + assert 'name' in exported_templates['message']['templates'][0] assert ( target_sat.execute( f'[ -d {dir_path}/job_templates ] && ' @@ -986,7 +982,7 @@ def test_positive_import_log_to_production(self, module_org, target_sat): :id: 19ed0e6a-ee77-4e28-86c9-49db1adec479 - :Steps: + :steps: 1. Using nailgun or direct API call Import template from a source @@ -995,11 +991,9 @@ def test_positive_import_log_to_production(self, module_org, target_sat): :Requirement: Take Templates out of tech preview - :CaseLevel: System - :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1024,7 +1018,7 @@ def test_positive_export_log_to_production( :id: 8ae370b1-84e8-436e-a7d7-99cd0b8f45b1 - :Steps: + :steps: 1. Using nailgun or direct API call Export template to destination @@ -1033,11 +1027,9 @@ def test_positive_export_log_to_production( :Requirement: Take Templates out of tech preview - :CaseLevel: System - :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1046,7 +1038,7 @@ def test_positive_export_log_to_production( } ) _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': 'empty'} ) time.sleep(5) @@ -1075,13 +1067,13 @@ def test_positive_export_log_to_production( ids=['non_empty_repo', 'empty_repo'], ) def test_positive_export_all_templates_to_repo( - self, module_org, git_repository, git_branch, url + self, module_org, git_repository, git_branch, url, module_target_sat ): """Assure all templates are exported if no filter is specified. :id: 0bf6fe77-01a3-4843-86d6-22db5b8adf3b - :Steps: + :steps: 1. Using nailgun export all templates to repository (ensure filters are empty) :expectedresults: @@ -1093,7 +1085,7 @@ def test_positive_export_all_templates_to_repo( :CaseImportance: Low """ - output = entities.Template().exports( + output = module_target_sat.api.Template().exports( data={ 'repo': f'{url}/{git.username}/{git_repository["name"]}', 'branch': git_branch, @@ -1117,12 +1109,12 @@ def test_positive_export_all_templates_to_repo( assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_positive_import_all_templates_from_repo(self, module_org): + def test_positive_import_all_templates_from_repo(self, module_org, module_target_sat): """Assure all templates are imported if no filter is specified. :id: 95ac9543-d989-44f4-b4d9-18f20a0b58b9 - :Steps: + :steps: 1. Using nailgun import all templates from repository (ensure filters are empty) :expectedresults: @@ -1130,7 +1122,7 @@ def test_positive_import_all_templates_from_repo(self, module_org): :CaseImportance: Low """ - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1149,12 +1141,12 @@ def test_positive_import_all_templates_from_repo(self, module_org): assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_negative_import_locked_template(self, module_org): + def test_negative_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are not pulled from repository. :id: 88e21cad-448e-45e0-add2-94493a1319c5 - :Steps: + :steps: 1. Using nailgun try to import a locked template :expectedresults: @@ -1163,7 +1155,7 @@ def test_negative_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1175,7 +1167,7 @@ def test_negative_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # try to import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1192,18 +1184,18 @@ def test_negative_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template @pytest.mark.tier2 - def test_positive_import_locked_template(self, module_org): + def test_positive_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are pulled from repository while using force parameter. :id: 936c91cc-1947-45b0-8bf0-79ba4be87b97 - :Steps: + :steps: 1. Using nailgun try to import a locked template with force parameter :expectedresults: @@ -1212,7 +1204,7 @@ def test_positive_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1224,7 +1216,7 @@ def test_positive_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # force import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1243,7 +1235,7 @@ def test_positive_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index 05a6a649b92..75dd19e6f55 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -8,46 +8,40 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import json import re -import pytest -from nailgun import entities from nailgun.config import ServerConfig +import pytest from requests.exceptions import HTTPError from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE +from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE, DataFile from robottelo.utils import gen_ssh_keypairs -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import invalid_emails_list -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import invalid_usernames_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_emails_list -from robottelo.utils.datafactory import valid_usernames_list +from robottelo.utils.datafactory import ( + gen_string, + generate_strings_list, + invalid_emails_list, + invalid_names_list, + invalid_usernames_list, + parametrized, + valid_data_list, + valid_emails_list, + valid_usernames_list, +) @pytest.fixture(scope='module') -def create_user(): +def create_user(module_target_sat): """Create a user""" - return entities.User().create() + return module_target_sat.api.User().create() class TestUser: @@ -55,7 +49,7 @@ class TestUser: @pytest.mark.tier1 @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_with_username(self, username): + def test_positive_create_with_username(self, username, target_sat): """Create User for all variations of Username :id: a9827cda-7f6d-4785-86ff-3b6969c9c00a @@ -66,14 +60,14 @@ def test_positive_create_with_username(self, username): :CaseImportance: Critical """ - user = entities.User(login=username).create() + user = target_sat.api.User(login=username).create() assert user.login == username @pytest.mark.tier1 @pytest.mark.parametrize( 'firstname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_firstname(self, firstname): + def test_positive_create_with_firstname(self, firstname, target_sat): """Create User for all variations of First Name :id: 036bb958-227c-420c-8f2b-c607136f12e0 @@ -86,14 +80,14 @@ def test_positive_create_with_firstname(self, firstname): """ if len(str.encode(firstname)) > 50: firstname = firstname[:20] - user = entities.User(firstname=firstname).create() + user = target_sat.api.User(firstname=firstname).create() assert user.firstname == firstname @pytest.mark.tier1 @pytest.mark.parametrize( 'lastname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_lastname(self, lastname): + def test_positive_create_with_lastname(self, lastname, target_sat): """Create User for all variations of Last Name :id: 95d3b571-77e7-42a1-9c48-21f242e8cdc2 @@ -106,12 +100,12 @@ def test_positive_create_with_lastname(self, lastname): """ if len(str.encode(lastname)) > 50: lastname = lastname[:20] - user = entities.User(lastname=lastname).create() + user = target_sat.api.User(lastname=lastname).create() assert user.lastname == lastname @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_create_with_email(self, mail): + def test_positive_create_with_email(self, mail, target_sat): """Create User for all variations of Email :id: e68caf51-44ba-4d32-b79b-9ab9b67b9590 @@ -122,12 +116,12 @@ def test_positive_create_with_email(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() assert user.mail == mail @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description): + def test_positive_create_with_description(self, description, target_sat): """Create User for all variations of Description :id: 1463d71c-b77d-4223-84fa-8370f77b3edf @@ -138,14 +132,14 @@ def test_positive_create_with_description(self, description): :CaseImportance: Critical """ - user = entities.User(description=description).create() + user = target_sat.api.User(description=description).create() assert user.description == description @pytest.mark.tier1 @pytest.mark.parametrize( 'password', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_password(self, password): + def test_positive_create_with_password(self, password, target_sat): """Create User for all variations of Password :id: 53d0a419-0730-4f7d-9170-d855adfc5070 @@ -156,13 +150,13 @@ def test_positive_create_with_password(self, password): :CaseImportance: Critical """ - user = entities.User(password=password).create() + user = target_sat.api.User(password=password).create() assert user is not None @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_delete(self, mail): + def test_positive_delete(self, mail, target_sat): """Create random users and then delete it. :id: df6059e7-85c5-42fa-99b5-b7f1ef809f52 @@ -173,7 +167,7 @@ def test_positive_delete(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() user.delete() with pytest.raises(HTTPError): user.read() @@ -208,8 +202,8 @@ def test_negative_update_username(self, create_user, login): :CaseImportance: Critical """ + create_user.login = login with pytest.raises(HTTPError): - create_user.login = login create_user.update(['login']) @pytest.mark.tier1 @@ -284,8 +278,8 @@ def test_negative_update_email(self, create_user, mail): :CaseImportance: Critical """ + create_user.mail = mail with pytest.raises(HTTPError): - create_user.mail = mail create_user.update(['mail']) @pytest.mark.tier1 @@ -307,7 +301,7 @@ def test_positive_update_description(self, create_user, description): @pytest.mark.tier1 @pytest.mark.parametrize('admin_enable', [True, False]) - def test_positive_update_admin(self, admin_enable): + def test_positive_update_admin(self, admin_enable, target_sat): """Update a user and provide the ``admin`` attribute. :id: b5fedf65-37f5-43ca-806a-ac9a7979b19d @@ -318,13 +312,13 @@ def test_positive_update_admin(self, admin_enable): :CaseImportance: Critical """ - user = entities.User(admin=admin_enable).create() + user = target_sat.api.User(admin=admin_enable).create() user.admin = not admin_enable assert user.update().admin == (not admin_enable) @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(invalid_emails_list())) - def test_negative_create_with_invalid_email(self, mail): + def test_negative_create_with_invalid_email(self, mail, target_sat): """Create User with invalid Email Address :id: ebbd1f5f-e71f-41f4-a956-ce0071b0a21c @@ -336,11 +330,11 @@ def test_negative_create_with_invalid_email(self, mail): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(mail=mail).create() + target_sat.api.User(mail=mail).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_usernames_list())) - def test_negative_create_with_invalid_username(self, invalid_name): + def test_negative_create_with_invalid_username(self, invalid_name, target_sat): """Create User with invalid Username :id: aaf157a9-0375-4405-ad87-b13970e0609b @@ -352,11 +346,11 @@ def test_negative_create_with_invalid_username(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(login=invalid_name).create() + target_sat.api.User(login=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_firstname(self, invalid_name): + def test_negative_create_with_invalid_firstname(self, invalid_name, target_sat): """Create User with invalid Firstname :id: cb1ca8a9-38b1-4d58-ae32-915b47b91657 @@ -368,11 +362,11 @@ def test_negative_create_with_invalid_firstname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(firstname=invalid_name).create() + target_sat.api.User(firstname=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_lastname(self, invalid_name): + def test_negative_create_with_invalid_lastname(self, invalid_name, target_sat): """Create User with invalid Lastname :id: 59546d26-2b6b-400b-990f-0b5d1c35004e @@ -384,10 +378,10 @@ def test_negative_create_with_invalid_lastname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(lastname=invalid_name).create() + target_sat.api.User(lastname=invalid_name).create() @pytest.mark.tier1 - def test_negative_create_with_blank_authorized_by(self): + def test_negative_create_with_blank_authorized_by(self, target_sat): """Create User with blank authorized by :id: 1fe2d1e3-728c-4d89-97ae-3890e904f413 @@ -397,20 +391,58 @@ def test_negative_create_with_blank_authorized_by(self): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(auth_source='').create() + target_sat.api.User(auth_source='').create() + + @pytest.mark.tier1 + def test_positive_table_preferences(self, module_target_sat): + """Create a user, create their Table Preferences, read it + + :id: 10fda3f1-4fee-461b-a413-a4d1fa098a94 + + :expectedresults: Table Preferences can be accessed + + :CaseImportance: Medium + + :customerscenario: true + + :BZ: 1757394 + """ + existing_roles = module_target_sat.api.Role().search() + password = gen_string('alpha') + user = module_target_sat.api.User(role=existing_roles, password=password).create() + name = "hosts" + columns = ["power_status", "name", "comment"] + sc = ServerConfig( + auth=(user.login, password), url=module_target_sat.url, verify=settings.server.verify_ca + ) + module_target_sat.api.TablePreferences( + server_config=sc, user=user, name=name, columns=columns + ).create() + table_preferences = module_target_sat.api.TablePreferences( + server_config=sc, user=user + ).search() + assert len(table_preferences) == 1 + tp = table_preferences[0] + assert hasattr(tp, 'name') + assert hasattr(tp, 'columns') + assert tp.name == 'hosts' + assert len(tp.columns) == len(columns) + for column in columns: + assert column in tp.columns class TestUserRole: """Test associations between users and roles.""" @pytest.fixture(scope='class') - def make_roles(self): + def make_roles(self, class_target_sat): """Create two roles.""" - return [entities.Role().create() for _ in range(2)] + return [class_target_sat.api.Role().create() for _ in range(2)] @pytest.mark.tier1 + @pytest.mark.build_sanity @pytest.mark.parametrize('number_of_roles', range(1, 3)) - def test_positive_create_with_role(self, make_roles, number_of_roles): + def test_positive_create_with_role(self, make_roles, number_of_roles, class_target_sat): """Create a user with the ``role`` attribute. :id: 32daacf1-eed4-49b1-81e1-ab0a5b0113f2 @@ -424,7 +456,7 @@ def test_positive_create_with_role(self, make_roles, number_of_roles): :CaseImportance: Critical """ chosen_roles = make_roles[:number_of_roles] - user = entities.User(role=chosen_roles).create() + user = class_target_sat.api.User(role=chosen_roles).create() assert len(user.role) == number_of_roles assert {role.id for role in user.role} == {role.id for role in chosen_roles} @@ -454,14 +486,14 @@ class TestSshKeyInUser: """Implements the SSH Key in User Tests""" @pytest.fixture(scope='class') - def create_user(self): + def create_user(self, class_target_sat): """Create an user and import different keys from data json file""" - user = entities.User().create() + user = class_target_sat.api.User().create() data_keys = json.loads(DataFile.SSH_KEYS_JSON.read_bytes()) return dict(user=user, data_keys=data_keys) @pytest.mark.tier1 - def test_positive_CRD_ssh_key(self): + def test_positive_CRD_ssh_key(self, class_target_sat): """SSH Key can be added to User :id: d00905f6-3a70-4e2f-a5ae-fcac18274bb7 @@ -477,18 +509,18 @@ def test_positive_CRD_ssh_key(self): :CaseImportance: Critical """ - user = entities.User().create() + user = class_target_sat.api.User().create() ssh_name = gen_string('alpha') ssh_key = gen_ssh_keypairs()[1] - user_sshkey = entities.SSHKey(user=user, name=ssh_name, key=ssh_key).create() + user_sshkey = class_target_sat.api.SSHKey(user=user, name=ssh_name, key=ssh_key).create() assert ssh_name == user_sshkey.name assert ssh_key == user_sshkey.key user_sshkey.delete() - result = entities.SSHKey(user=user).search() + result = class_target_sat.api.SSHKey(user=user).search() assert len(result) == 0 @pytest.mark.tier1 - def test_negative_create_ssh_key(self, create_user): + def test_negative_create_ssh_key(self, create_user, target_sat): """Invalid ssh key can not be added in User Template :id: e924ff03-8b2c-4ab9-a054-ea491413e143 @@ -508,7 +540,7 @@ def test_negative_create_ssh_key(self, create_user): """ invalid_sshkey = gen_string('alpha', length=256) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_sshkey ).create() assert re.search('Key is not a valid public ssh key', context.value.response.text) @@ -517,7 +549,7 @@ def test_negative_create_ssh_key(self, create_user): assert re.search('Length could not be calculated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_invalid_length_ssh_key(self, create_user): + def test_negative_create_invalid_length_ssh_key(self, create_user, target_sat): """Attempt to add SSH key that has invalid length :id: 899f0c46-c7fe-4610-80f1-1add4a9cbc26 @@ -534,14 +566,14 @@ def test_negative_create_invalid_length_ssh_key(self, create_user): """ invalid_length_key = create_user['data_keys']['ssh_keys']['invalid_ssh_key'] with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_length_key ).create() assert re.search('Length could not be calculated', context.value.response.text) assert not re.search('Fingerprint could not be generated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_ssh_key_with_invalid_name(self, create_user): + def test_negative_create_ssh_key_with_invalid_name(self, create_user, target_sat): """Attempt to add SSH key that has invalid name length :id: e1e17839-a392-45bb-bb1e-28d3cd9dba1c @@ -557,14 +589,14 @@ def test_negative_create_ssh_key_with_invalid_name(self, create_user): """ invalid_ssh_key_name = gen_string('alpha', length=300) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=invalid_ssh_key_name, key=gen_ssh_keypairs()[1] ).create() assert re.search("Name is too long", context.value.response.text) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_multiple_ssh_key_types(self, create_user): + def test_positive_create_multiple_ssh_key_types(self, create_user, class_target_sat): """Multiple types of ssh keys can be added to user :id: d1ffa908-dc86-40c8-b6f0-20650cc67046 @@ -581,15 +613,15 @@ def test_positive_create_multiple_ssh_key_types(self, create_user): dsa = create_user['data_keys']['ssh_keys']['dsa'] ecdsa = create_user['data_keys']['ssh_keys']['ecdsa'] ed = create_user['data_keys']['ssh_keys']['ed'] - user = entities.User().create() + user = class_target_sat.api.User().create() for key in [rsa, dsa, ecdsa, ed]: - entities.SSHKey(user=user, name=gen_string('alpha'), key=key).create() - user_sshkeys = entities.SSHKey(user=user).search() + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=key).create() + user_sshkeys = class_target_sat.api.SSHKey(user=user).search() assert len(user_sshkeys) == 4 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_ssh_key_in_host_enc(self, target_sat): + def test_positive_ssh_key_in_host_enc(self, class_target_sat): """SSH key appears in host ENC output :id: 4b70a950-e777-4b2d-a83d-29279715fe6d @@ -603,15 +635,16 @@ def test_positive_ssh_key_in_host_enc(self, target_sat): :expectedresults: SSH key should be added to host ENC output - :CaseLevel: Integration """ - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - user = entities.User(organization=[org], location=[loc]).create() + org = class_target_sat.api.Organization().create() + loc = class_target_sat.api.Location(organization=[org]).create() + user = class_target_sat.api.User(organization=[org], location=[loc]).create() ssh_key = gen_ssh_keypairs()[1] - entities.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() - host = entities.Host(owner=user, owner_type='User', organization=org, location=loc).create() - sshkey_updated_for_host = f'{ssh_key} {user.login}@{target_sat.hostname}' + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() + host = class_target_sat.api.Host( + owner=user, owner_type='User', organization=org, location=loc + ).create() + sshkey_updated_for_host = f'{ssh_key} {user.login}@{class_target_sat.hostname}' host_enc_key = host.enc()['data']['parameters']['ssh_authorized_keys'] assert sshkey_updated_for_host == host_enc_key[0] @@ -634,7 +667,7 @@ def create_ldap(self, ad_data, module_target_sat): ldap_user_passwd=ad_data['ldap_user_passwd'], authsource=module_target_sat.api.AuthSourceLDAP( onthefly_register=True, - account=ad_data['ldap_user_name'], + account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], base_dn=ad_data['base_dn'], groups_base=ad_data['group_base_dn'], @@ -661,7 +694,7 @@ def create_ldap(self, ad_data, module_target_sat): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_in_ldap_mode(self, username, create_ldap): + def test_positive_create_in_ldap_mode(self, username, create_ldap, target_sat): """Create User in ldap mode :id: 6f8616b1-5380-40d2-8678-7c4434050cfb @@ -670,16 +703,15 @@ def test_positive_create_in_ldap_mode(self, username, create_ldap): :expectedresults: User is created without specifying the password - :CaseLevel: Integration """ - user = entities.User( + user = target_sat.api.User( login=username, auth_source=create_ldap['authsource'], password='' ).create() assert user.login == username @pytest.mark.tier3 - def test_positive_ad_basic_no_roles(self, create_ldap): - """Login with LDAP Auth- AD for user with no roles/rights + def test_positive_ad_basic_no_roles(self, create_ldap, target_sat): + """Login with LDAP Auth AD for user with no roles/rights :id: 3910c6eb-6eff-4ab7-a50d-ba40f5c24c08 @@ -687,21 +719,20 @@ def test_positive_ad_basic_no_roles(self, create_ldap): :steps: Login to server with an AD user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. - :CaseLevel: System """ sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(server_config=sc).search() @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_access_entities_from_ldap_org_admin(self, create_ldap): + def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_target_sat): """LDAP User can access resources within its taxonomies if assigned role has permission for same taxonomies @@ -709,7 +740,7 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap): :steps: - 1. Create Org Admin and assign taxonomies to it + 1. Create Org Admin role and assign taxonomies to it 2. Create LDAP user with same taxonomies as role above 3. Assign Org Admin role to user above 4. Login with LDAP user and attempt to access resources @@ -717,11 +748,22 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap): :expectedresults: LDAP User should be able to access all the resources and permissions in taxonomies selected in Org Admin role - :CaseLevel: System """ + # Workaround issue where, in an upgrade template, there is already + # some auth source present with this user. That auth source instance + # doesn't really run and attempting to login as that user + # leads to ISE 500 (which is itself a bug, the error should be handled, it is + # reported as BZ2240205). + for user in module_target_sat.api.User().search( + query={'search': f'login={create_ldap["ldap_user_name"]}'} + ): + user.delete() + role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = module_target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = module_target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -733,27 +775,27 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap): sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search( + module_target_sat.api.Architecture(server_config=sc).search() + user = module_target_sat.api.User().search( query={'search': 'login={}'.format(create_ldap['ldap_user_name'])} )[0] - user.role = [entities.Role(id=org_admin['id']).read()] + user.role = [module_target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + module_target_sat.api.Architecture, + module_target_sat.api.Audit, + module_target_sat.api.Bookmark, + module_target_sat.api.CommonParameter, + module_target_sat.api.LibvirtComputeResource, + module_target_sat.api.OVirtComputeResource, + module_target_sat.api.VMWareComputeResource, + module_target_sat.api.Errata, + module_target_sat.api.OperatingSystem, ]: - entity(sc).search() + entity(server_config=sc).search() @pytest.mark.run_in_one_thread @@ -799,7 +841,7 @@ def create_ldap(self, class_target_sat): user.delete() @pytest.mark.tier3 - def test_positive_ipa_basic_no_roles(self, create_ldap): + def test_positive_ipa_basic_no_roles(self, create_ldap, target_sat): """Login with LDAP Auth- FreeIPA for user with no roles/rights :id: 901a241d-aa76-4562-ab1a-a752e6fb7ed5 @@ -808,21 +850,20 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): :steps: Login to server with an FreeIPA user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. - :CaseLevel: System """ sc = ServerConfig( auth=(create_ldap['username'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(server_config=sc).search() @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): + def test_positive_access_entities_from_ipa_org_admin(self, create_ldap, target_sat): """LDAP FreeIPA User can access resources within its taxonomies if assigned role has permission for same taxonomies @@ -830,7 +871,7 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): :steps: - 1. Create Org Admin and assign taxonomies to it + 1. Create Org Admin role and assign taxonomies to it 2. Create FreeIPA user with same taxonomies as role above 3. Assign Org Admin role to user above 4. Login with FreeIPA user and attempt to access resources @@ -838,11 +879,12 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): :expectedresults: FreeIPA User should be able to access all the resources and permissions in taxonomies selected in Org Admin role - :CaseLevel: System """ role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -854,27 +896,27 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): sc = ServerConfig( auth=(create_ldap['username'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': 'login={}'.format(create_ldap['username'])})[ - 0 - ] - user.role = [entities.Role(id=org_admin['id']).read()] + target_sat.api.Architecture(server_config=sc).search() + user = target_sat.api.User().search( + query={'search': 'login={}'.format(create_ldap['username'])} + )[0] + user.role = [target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: - entity(sc).search() + entity(server_config=sc).search() class TestPersonalAccessToken: @@ -896,8 +938,6 @@ def test_personal_access_token_admin(self): 1. Should show output of the api endpoint 2. When revoked, authentication error - :CaseLevel: System - :CaseImportance: High """ @@ -920,10 +960,7 @@ def test_positive_personal_access_token_user_with_role(self): 2. When an incorrect role and end point is used, missing permission should be displayed. - :CaseLevel: System - :CaseImportance: High - """ @pytest.mark.tier2 @@ -940,8 +977,6 @@ def test_expired_personal_access_token(self): :expectedresults: Authentication error - :CaseLevel: System - :CaseImportance: Medium """ diff --git a/tests/foreman/api/test_usergroup.py b/tests/foreman/api/test_usergroup.py index cb9e6477576..95d1547934f 100644 --- a/tests/foreman/api/test_usergroup.py +++ b/tests/foreman/api/test_usergroup.py @@ -7,41 +7,37 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import randint -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from requests.exceptions import HTTPError -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_usernames_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, + valid_usernames_list, +) class TestUserGroup: """Tests for the ``usergroups`` path.""" @pytest.fixture - def user_group(self): - return entities.UserGroup().create() + def user_group(self, target_sat): + return target_sat.api.UserGroup().create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, target_sat, name): """Create new user group using different valid names :id: 3a2255d9-f48d-4f22-a4b9-132361bd9224 @@ -52,12 +48,12 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - user_group = entities.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(name=name).create() assert user_group.name == name @pytest.mark.tier1 @pytest.mark.parametrize('login', **parametrized(valid_usernames_list())) - def test_positive_create_with_user(self, login): + def test_positive_create_with_user(self, target_sat, login): """Create new user group using valid user attached to that group. :id: ab127e09-31d2-4c5b-ae6c-726e4b11a21e @@ -68,13 +64,13 @@ def test_positive_create_with_user(self, login): :CaseImportance: Critical """ - user = entities.User(login=login).create() - user_group = entities.UserGroup(user=[user]).create() + user = target_sat.api.User(login=login).create() + user_group = target_sat.api.UserGroup(user=[user]).create() assert len(user_group.user) == 1 assert user_group.user[0].read().login == login @pytest.mark.tier1 - def test_positive_create_with_users(self): + def test_positive_create_with_users(self, target_sat): """Create new user group using multiple users attached to that group. :id: b8dbbacd-b5cb-49b1-985d-96df21440652 @@ -84,15 +80,15 @@ def test_positive_create_with_users(self): :CaseImportance: Critical """ - users = [entities.User().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(user=users).create() + users = [target_sat.api.User().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(user=users).create() assert sorted(user.login for user in users) == sorted( user.read().login for user in user_group.user ) @pytest.mark.tier1 @pytest.mark.parametrize('role_name', **parametrized(valid_data_list())) - def test_positive_create_with_role(self, role_name): + def test_positive_create_with_role(self, target_sat, role_name): """Create new user group using valid role attached to that group. :id: c4fac71a-9dda-4e5f-a5df-be362d3cbd52 @@ -103,13 +99,13 @@ def test_positive_create_with_role(self, role_name): :CaseImportance: Critical """ - role = entities.Role(name=role_name).create() - user_group = entities.UserGroup(role=[role]).create() + role = target_sat.api.Role(name=role_name).create() + user_group = target_sat.api.UserGroup(role=[role]).create() assert len(user_group.role) == 1 assert user_group.role[0].read().name == role_name @pytest.mark.tier1 - def test_positive_create_with_roles(self): + def test_positive_create_with_roles(self, target_sat): """Create new user group using multiple roles attached to that group. :id: 5838fcfd-e256-49cf-aef8-b2bf215b3586 @@ -119,15 +115,15 @@ def test_positive_create_with_roles(self): :CaseImportance: Critical """ - roles = [entities.Role().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(role=roles).create() + roles = [target_sat.api.Role().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(role=roles).create() assert sorted(role.name for role in roles) == sorted( role.read().name for role in user_group.role ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_usergroup(self, name): + def test_positive_create_with_usergroup(self, target_sat, name): """Create new user group using another user group attached to the initial group. @@ -139,13 +135,13 @@ def test_positive_create_with_usergroup(self, name): :CaseImportance: Critical """ - sub_user_group = entities.UserGroup(name=name).create() - user_group = entities.UserGroup(usergroup=[sub_user_group]).create() + sub_user_group = target_sat.api.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(usergroup=[sub_user_group]).create() assert len(user_group.usergroup) == 1 assert user_group.usergroup[0].read().name == name @pytest.mark.tier2 - def test_positive_create_with_usergroups(self): + def test_positive_create_with_usergroups(self, target_sat): """Create new user group using multiple user groups attached to that initial group. @@ -154,17 +150,16 @@ def test_positive_create_with_usergroups(self): :expectedresults: User group is created successfully and contains all expected user groups - :CaseLevel: Integration """ - sub_user_groups = [entities.UserGroup().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(usergroup=sub_user_groups).create() + sub_user_groups = [target_sat.api.UserGroup().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(usergroup=sub_user_groups).create() assert sorted(usergroup.name for usergroup in sub_user_groups) == sorted( usergroup.read().name for usergroup in user_group.usergroup ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, target_sat, name): """Attempt to create user group with invalid name. :id: 1a3384dc-5d52-442c-87c8-e38048a61dfa @@ -176,10 +171,10 @@ def test_negative_create_with_name(self, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.UserGroup(name=name).create() + target_sat.api.UserGroup(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_same_name(self): + def test_negative_create_with_same_name(self, target_sat): """Attempt to create user group with a name of already existent entity. :id: aba0925a-d5ec-4e90-86c6-404b9b6f0179 @@ -188,9 +183,9 @@ def test_negative_create_with_same_name(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() with pytest.raises(HTTPError): - entities.UserGroup(name=user_group.name).create() + target_sat.api.UserGroup(name=user_group.name).create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @@ -210,7 +205,7 @@ def test_positive_update(self, user_group, new_name): assert new_name == user_group.name @pytest.mark.tier1 - def test_positive_update_with_new_user(self): + def test_positive_update_with_new_user(self, target_sat): """Add new user to user group :id: e11b57c3-5f86-4963-9cc6-e10e2f02468b @@ -219,30 +214,29 @@ def test_positive_update_with_new_user(self): :CaseImportance: Critical """ - user = entities.User().create() - user_group = entities.UserGroup().create() + user = target_sat.api.User().create() + user_group = target_sat.api.UserGroup().create() user_group.user = [user] user_group = user_group.update(['user']) assert user.login == user_group.user[0].read().login @pytest.mark.tier2 - def test_positive_update_with_existing_user(self): + def test_positive_update_with_existing_user(self, target_sat): """Update user that assigned to user group with another one :id: 71b78f64-867d-4bf5-9b1e-02698a17fb38 :expectedresults: User group is updated successfully. - :CaseLevel: Integration """ - users = [entities.User().create() for _ in range(2)] - user_group = entities.UserGroup(user=[users[0]]).create() + users = [target_sat.api.User().create() for _ in range(2)] + user_group = target_sat.api.UserGroup(user=[users[0]]).create() user_group.user[0] = users[1] user_group = user_group.update(['user']) assert users[1].login == user_group.user[0].read().login @pytest.mark.tier1 - def test_positive_update_with_new_role(self): + def test_positive_update_with_new_role(self, target_sat): """Add new role to user group :id: 8e0872c1-ae88-4971-a6fc-cd60127d6663 @@ -251,15 +245,15 @@ def test_positive_update_with_new_role(self): :CaseImportance: Critical """ - new_role = entities.Role().create() - user_group = entities.UserGroup().create() + new_role = target_sat.api.Role().create() + user_group = target_sat.api.UserGroup().create() user_group.role = [new_role] user_group = user_group.update(['role']) assert new_role.name == user_group.role[0].read().name @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_update_with_new_usergroup(self): + def test_positive_update_with_new_usergroup(self, target_sat): """Add new user group to existing one :id: 3cb29d07-5789-4f94-9fd9-a7e494b3c110 @@ -268,8 +262,8 @@ def test_positive_update_with_new_usergroup(self): :CaseImportance: Critical """ - new_usergroup = entities.UserGroup().create() - user_group = entities.UserGroup().create() + new_usergroup = target_sat.api.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.usergroup = [new_usergroup] user_group = user_group.update(['usergroup']) assert new_usergroup.name == user_group.usergroup[0].read().name @@ -293,7 +287,7 @@ def test_negative_update(self, user_group, new_name): assert user_group.read().name != new_name @pytest.mark.tier1 - def test_negative_update_with_same_name(self): + def test_negative_update_with_same_name(self, target_sat): """Attempt to update user group with a name of already existent entity. :id: 14888998-9282-4d81-9e99-234d19706783 @@ -303,15 +297,15 @@ def test_negative_update_with_same_name(self): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.UserGroup(name=name).create() - new_user_group = entities.UserGroup().create() + target_sat.api.UserGroup(name=name).create() + new_user_group = target_sat.api.UserGroup().create() new_user_group.name = name with pytest.raises(HTTPError): new_user_group.update(['name']) assert new_user_group.read().name != name @pytest.mark.tier1 - def test_positive_delete(self): + def test_positive_delete(self, target_sat): """Create user group with valid name and then delete it :id: c5cfcc4a-9177-47bb-8f19-7a8930eb7ca3 @@ -320,7 +314,7 @@ def test_positive_delete(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.delete() with pytest.raises(HTTPError): user_group.read() diff --git a/tests/foreman/api/test_webhook.py b/tests/foreman/api/test_webhook.py index c9a2bd5c60b..bb6c3637c2b 100644 --- a/tests/foreman/api/test_webhook.py +++ b/tests/foreman/api/test_webhook.py @@ -4,29 +4,21 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: HooksandWebhooks :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import re import pytest -from nailgun import entities from requests.exceptions import HTTPError -from wait_for import TimedOutError -from wait_for import wait_for +from wait_for import TimedOutError, wait_for from robottelo.config import settings -from robottelo.constants import WEBHOOK_EVENTS -from robottelo.constants import WEBHOOK_METHODS +from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS from robottelo.logging import logger from robottelo.utils.datafactory import parametrized @@ -64,13 +56,13 @@ def assert_event_triggered(channel, event): try: log = _wait_for_log(channel, pattern) assert pattern in log - except TimedOutError: - raise AssertionError(f'Timed out waiting for {pattern} from VM') + except TimedOutError as err: + raise AssertionError(f'Timed out waiting for {pattern} from VM') from err class TestWebhook: @pytest.mark.tier2 - def test_negative_invalid_event(self): + def test_negative_invalid_event(self, target_sat): """Test negative webhook creation with an invalid event :id: 60cd456a-9943-45cb-a72e-23a83a691499 @@ -80,11 +72,11 @@ def test_negative_invalid_event(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(event='invalid_event').create() + target_sat.api.Webhooks(event='invalid_event').create() @pytest.mark.tier2 - @pytest.mark.parametrize('event', **parametrized(WEBHOOK_EVENTS)) - def test_positive_valid_event(self, event): + @pytest.mark.parametrize('event', WEBHOOK_EVENTS) + def test_positive_valid_event(self, event, target_sat): """Test positive webhook creation with a valid event :id: 9b505f1b-7ee1-4362-b44c-f3107d043a05 @@ -93,11 +85,11 @@ def test_positive_valid_event(self, event): :CaseImportance: High """ - hook = entities.Webhooks(event=event).create() + hook = target_sat.api.Webhooks(event=event).create() assert event in hook.event @pytest.mark.tier2 - def test_negative_invalid_method(self): + def test_negative_invalid_method(self, target_sat): """Test negative webhook creation with an invalid HTTP method :id: 573be312-7bf3-4d9e-aca1-e5cac810d04b @@ -107,11 +99,11 @@ def test_negative_invalid_method(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(http_method='NONE').create() + target_sat.api.Webhooks(http_method='NONE').create() @pytest.mark.tier2 @pytest.mark.parametrize('method', **parametrized(WEBHOOK_METHODS)) - def test_positive_valid_method(self, method): + def test_positive_valid_method(self, method, target_sat): """Test positive webhook creation with a valid HTTP method :id: cf8f276a-d21e-44d0-92f2-657232240c7e @@ -120,12 +112,12 @@ def test_positive_valid_method(self, method): :CaseImportance: High """ - hook = entities.Webhooks(http_method=method).create() + hook = target_sat.api.Webhooks(http_method=method).create() assert hook.http_method == method @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_end_to_end(self): + def test_positive_end_to_end(self, target_sat): """Create a new webhook. :id: 7593a04e-cf7e-414c-9e7e-3fe2936cc32a @@ -134,7 +126,7 @@ def test_positive_end_to_end(self): :CaseImportance: Critical """ - hook = entities.Webhooks().create() + hook = target_sat.api.Webhooks().create() assert hook hook.name = "testing" hook.http_method = "GET" @@ -151,7 +143,9 @@ def test_positive_end_to_end(self): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_event_triggered(self, module_org, target_sat): + @pytest.mark.e2e + @pytest.mark.parametrize('setting_update', ['safemode_render=False'], indirect=True) + def test_positive_event_triggered(self, module_org, target_sat, setting_update): """Create a webhook and trigger the event associated with it. @@ -162,13 +156,16 @@ def test_positive_event_triggered(self, module_org, target_sat): :CaseImportance: Critical """ - hook = entities.Webhooks( - event='actions.katello.repository.sync_succeeded', http_method='GET' + hook = target_sat.api.Webhooks( + event='actions.katello.repository.sync_succeeded', + http_method='GET', + target_url=settings.repos.yum_0.url, ).create() - repo = entities.Repository( + repo = target_sat.api.Repository( organization=module_org, content_type='yum', url=settings.repos.yum_0.url ).create() with target_sat.session.shell() as shell: shell.send('foreman-tail') repo.sync() assert_event_triggered(shell, hook.event) + target_sat.wait_for_tasks(f'Deliver webhook {hook.name}') diff --git a/tests/foreman/cli/test_abrt.py b/tests/foreman/cli/test_abrt.py index ecba0b067b7..0543679b733 100644 --- a/tests/foreman/cli/test_abrt.py +++ b/tests/foreman/cli/test_abrt.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Infrastructure :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -30,7 +25,7 @@ def test_positive_create_report(self): :Setup: abrt - :Steps: start a sleep process in background, kill it send the report + :steps: start a sleep process in background, kill it send the report using smart-proxy-abrt-send :expectedresults: A abrt report with ccpp.* extension created under @@ -39,7 +34,6 @@ def test_positive_create_report(self): :CaseAutomation: NotAutomated :CaseImportance: Critical - """ @@ -51,7 +45,7 @@ def test_positive_create_reports(self): :Setup: abrt - :Steps: + :steps: 1. Create multiple reports of abrt 2. Keep track of counts @@ -59,7 +53,6 @@ def test_positive_create_reports(self): :expectedresults: Count is updated in proper manner :CaseAutomation: NotAutomated - """ @@ -71,12 +64,11 @@ def test_positive_update_timer(self): :Setup: abrt - :Steps: edit the timer for /etc/cron.d/rubygem-smart_proxy_abrt + :steps: edit the timer for /etc/cron.d/rubygem-smart_proxy_abrt :expectedresults: the timer file is edited :CaseAutomation: NotAutomated - """ @@ -88,12 +80,11 @@ def test_positive_identify_hostname(self): :Setup: abrt - :Steps: UI => Settings => Abrt tab => edit hostnames + :steps: UI => Settings => Abrt tab => edit hostnames :expectedresults: Assertion of hostnames is possible :CaseAutomation: NotAutomated - """ @@ -105,10 +96,9 @@ def test_positive_search_report(self): :Setup: abrt - :Steps: access /var/tmp/abrt/ccpp-* files + :steps: access /var/tmp/abrt/ccpp-* files :expectedresults: Assertion of parameters :CaseAutomation: NotAutomated - """ diff --git a/tests/foreman/cli/test_acs.py b/tests/foreman/cli/test_acs.py index 941b9d066e4..f7ad33cf204 100644 --- a/tests/foreman/cli/test_acs.py +++ b/tests/foreman/cli/test_acs.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: AlternateContentSources :Team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_alphanumeric +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.constants.repos import PULP_FIXTURE_ROOT -from robottelo.constants.repos import PULP_SUBPATHS_COMBINED +from robottelo.constants.repos import PULP_FIXTURE_ROOT, PULP_SUBPATHS_COMBINED +from robottelo.exceptions import CLIReturnCodeError ACS_UPDATED = 'Alternate Content Source updated.' ACS_DELETED = 'Alternate Content Source deleted.' @@ -59,7 +53,6 @@ def test_positive_CRUD_all_types( 3. ACS can be updated and read with new name. 4. ACS can be refreshed. 5. ACS can be deleted. - """ if 'rhui' in request.node.name and 'file' in request.node.name: pytest.skip('unsupported parametrize combination') @@ -142,7 +135,6 @@ def test_negative_check_name_validation(module_target_sat, acs_type): :expectedresults: 1. Should fail with validation error and proper message. - """ with pytest.raises(CLIReturnCodeError) as context: module_target_sat.cli.ACS.create({'alternate-content-source-type': acs_type}) @@ -168,7 +160,6 @@ def test_negative_check_custom_rhui_validations(module_target_sat, acs_type, mod :expectedresults: 1. Should fail as base-url and verify-ssl are required. 2. Should fail as product-ids is forbidden. - """ # Create with required missing with pytest.raises(CLIReturnCodeError) as context: @@ -218,7 +209,6 @@ def test_negative_check_simplified_validations( :expectedresults: 1. Should fail and list all the forbidden parameters must be blank. - """ # Create with forbidden present params = { diff --git a/tests/foreman/cli/test_activationkey.py b/tests/foreman/cli/test_activationkey.py index 329a57c3558..cbcfaea5700 100644 --- a/tests/foreman/cli/test_activationkey.py +++ b/tests/foreman/cli/test_activationkey.py @@ -4,59 +4,39 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ActivationKeys :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import re from random import choice +import re -import pytest from broker import Broker -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string +from fauxfactory import gen_alphanumeric, gen_string +import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import add_role_permissions -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_activation_key -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_host_collection -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.factory import setup_org_for_a_custom_repo -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.repository import Repository -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) from robottelo.utils.issue_handlers import is_open @pytest.fixture(scope='module') -def get_default_env(module_org): +def get_default_env(module_org, module_target_sat): """Get default lifecycle environment""" - return LifecycleEnvironment.info({'organization-id': module_org.id, 'name': 'Library'}) + return module_target_sat.cli.LifecycleEnvironment.info( + {'organization-id': module_org.id, 'name': 'Library'} + ) @pytest.mark.tier1 @@ -81,7 +61,7 @@ def test_positive_create_with_name(module_target_sat, module_entitlement_manifes @pytest.mark.tier1 @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) -def test_positive_create_with_description(desc, module_org): +def test_positive_create_with_description(desc, module_org, module_target_sat): """Create Activation key for all variations of Description :id: 5a5ca7f9-1449-4365-ac8a-978605620bf2 @@ -92,12 +72,14 @@ def test_positive_create_with_description(desc, module_org): :parametrized: yes """ - new_ak = make_activation_key({'organization-id': module_org.id, 'description': desc}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'description': desc} + ) assert new_ak['description'] == desc @pytest.mark.tier1 -def test_positive_create_with_default_lce_by_id(module_org, get_default_env): +def test_positive_create_with_default_lce_by_id(module_org, get_default_env, target_sat): """Create Activation key with associated default environment :id: 9171adb2-c9ac-4cda-978f-776826668aa3 @@ -107,14 +89,14 @@ def test_positive_create_with_default_lce_by_id(module_org, get_default_env): :CaseImportance: Critical """ lce = get_default_env - new_ak_env = make_activation_key( + new_ak_env = target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': lce['id']} ) assert new_ak_env['lifecycle-environment'] == lce['name'] @pytest.mark.tier1 -def test_positive_create_with_non_default_lce(module_org): +def test_positive_create_with_non_default_lce(module_org, module_target_sat): """Create Activation key with associated custom environment :id: ad4d4611-3fb5-4449-ae47-305f9931350e @@ -124,15 +106,17 @@ def test_positive_create_with_non_default_lce(module_org): :CaseImportance: Critical """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_ak_env = make_activation_key( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + new_ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': env['id']} ) assert new_ak_env['lifecycle-environment'] == env['name'] @pytest.mark.tier1 -def test_positive_create_with_default_lce_by_name(module_org, get_default_env): +def test_positive_create_with_default_lce_by_name(module_org, get_default_env, module_target_sat): """Create Activation key with associated environment by name :id: 7410f7c4-e8b5-4080-b6d2-65dbcedffe8a @@ -142,7 +126,7 @@ def test_positive_create_with_default_lce_by_name(module_org, get_default_env): :CaseImportance: Critical """ lce = get_default_env - new_ak_env = make_activation_key( + new_ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment': lce['name']} ) assert new_ak_env['lifecycle-environment'] == lce['name'] @@ -150,7 +134,7 @@ def test_positive_create_with_default_lce_by_name(module_org, get_default_env): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_cv(name, module_org, get_default_env): +def test_positive_create_with_cv(name, module_org, get_default_env, module_target_sat): """Create Activation key for all variations of Content Views :id: ec7b1af5-c3f4-40c3-b1df-c69c02a3b9a7 @@ -158,15 +142,16 @@ def test_positive_create_with_cv(name, module_org, get_default_env): :expectedresults: Activation key is created and has proper content view assigned - :CaseLevel: Integration - :parametrized: yes """ - new_cv = make_content_view({'name': name, 'organization-id': module_org.id}) - new_ak_cv = make_activation_key( + new_cv = module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_ak_cv = module_target_sat.cli_factory.make_activation_key( { 'content-view': new_cv['name'], - 'environment': get_default_env['name'], + 'lifecycle-environment': get_default_env['name'], 'organization-id': module_org.id, } ) @@ -174,7 +159,7 @@ def test_positive_create_with_cv(name, module_org, get_default_env): @pytest.mark.tier1 -def test_positive_create_with_usage_limit_default(module_org): +def test_positive_create_with_usage_limit_default(module_org, module_target_sat): """Create Activation key with default Usage limit (Unlimited) :id: cba13c72-9845-486d-beff-e0fb55bb762c @@ -183,12 +168,12 @@ def test_positive_create_with_usage_limit_default(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) assert new_ak['host-limit'] == 'Unlimited' @pytest.mark.tier1 -def test_positive_create_with_usage_limit_finite(module_org): +def test_positive_create_with_usage_limit_finite(module_org, module_target_sat): """Create Activation key with finite Usage limit :id: 529a0f9e-977f-4e9d-a1af-88bb98c28a6a @@ -197,13 +182,15 @@ def test_positive_create_with_usage_limit_finite(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id, 'max-hosts': '10'}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': '10'} + ) assert new_ak['host-limit'] == '10' @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_create_content_and_check_enabled(module_org): +def test_positive_create_content_and_check_enabled(module_org, module_target_sat): """Create activation key and add content to it. Check enabled state. :id: abfc6c6e-acd1-4761-b309-7e68e1d17172 @@ -212,13 +199,11 @@ def test_positive_create_content_and_check_enabled(module_org): successfully :BZ: 1361993 - - :CaseLevel: Integration """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['default-enabled?'] == 'true' @@ -226,7 +211,7 @@ def test_positive_create_content_and_check_enabled(module_org): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_invalid_name(name, module_org): +def test_negative_create_with_invalid_name(name, module_org, module_target_sat): """Create Activation key with invalid Name :id: d9b7e3a9-1d24-4e47-bd4a-dce75772d829 @@ -239,7 +224,9 @@ def test_negative_create_with_invalid_name(name, module_org): :parametrized: yes """ with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'name': name}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) if name in ['', ' ', '\t']: assert 'Name must contain at least 1 character' in str(raise_ctx) if len(name) > 255: @@ -251,7 +238,7 @@ def test_negative_create_with_invalid_name(name, module_org): 'limit', **parametrized([value for value in invalid_values_list() if not value.isdigit()] + [0.5]), ) -def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): +def test_negative_create_with_usage_limit_with_not_integers(module_org, limit, module_target_sat): """Create Activation key with non integers Usage Limit :id: 247ebc2e-c80f-488b-aeaf-6bf5eba55375 @@ -267,17 +254,20 @@ def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): # invalid_values = [value for value in invalid_values_list() if not value.isdigit()] # invalid_values.append(0.5) with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'max-hosts': limit}) - if type(limit) is int: - if limit < 1: - assert 'Max hosts cannot be less than one' in str(raise_ctx) - if type(limit) is str: + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': limit} + ) + if isinstance(limit, int) and limit < 1: + assert 'Max hosts cannot be less than one' in str(raise_ctx) + if isinstance(limit, str): assert 'Numeric value is required.' in str(raise_ctx) @pytest.mark.tier3 -@pytest.mark.parametrize('invalid_values', ('-1', '-500', 0)) -def test_negative_create_with_usage_limit_with_invalid_integers(module_org, invalid_values): +@pytest.mark.parametrize('invalid_values', ['-1', '-500', 0]) +def test_negative_create_with_usage_limit_with_invalid_integers( + module_org, invalid_values, module_target_sat +): """Create Activation key with invalid integers Usage Limit :id: 9089f756-fda8-4e28-855c-cf8273f7c6cd @@ -290,13 +280,15 @@ def test_negative_create_with_usage_limit_with_invalid_integers(module_org, inva :parametrized: yes """ with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'max-hosts': invalid_values}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': invalid_values} + ) assert 'Failed to create ActivationKey with data:' in str(raise_ctx) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete_by_name(name, module_org): +def test_positive_delete_by_name(name, module_org, module_target_sat): """Create Activation key and delete it for all variations of Activation key name @@ -308,14 +300,18 @@ def test_positive_delete_by_name(name, module_org): :parametrized: yes """ - new_ak = make_activation_key({'name': name, 'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'name': name, 'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 -def test_positive_delete_by_org_name(module_org): +def test_positive_delete_by_org_name(module_org, module_target_sat): """Create Activation key and delete it using organization name for which that key was created @@ -325,14 +321,16 @@ def test_positive_delete_by_org_name(module_org): :CaseImportance: High """ - new_ak = make_activation_key({'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 -def test_positive_delete_by_org_label(module_org): +def test_positive_delete_by_org_label(module_org, module_target_sat): """Create Activation key and delete it using organization label for which that key was created @@ -342,53 +340,53 @@ def test_positive_delete_by_org_label(module_org): :CaseImportance: High """ - new_ak = make_activation_key({'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-label': module_org.label}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-label': module_org.label} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_with_cv(module_org): +def test_positive_delete_with_cv(module_org, module_target_sat): """Create activation key with content view assigned to it and delete it using activation key id :id: bba323fa-0362-4a9b-97af-560d446cbb6c :expectedresults: Activation key is deleted - - :CaseLevel: Integration """ - new_cv = make_content_view({'organization-id': module_org.id}) - new_ak = make_activation_key({'organization-id': module_org.id, 'content-view': new_cv['name']}) - ActivationKey.delete({'id': new_ak['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view': new_cv['name']} + ) + module_target_sat.cli.ActivationKey.delete({'id': new_ak['id']}) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier2 -def test_positive_delete_with_lce(module_org, get_default_env): +def test_positive_delete_with_lce(module_org, get_default_env, module_target_sat): """Create activation key with lifecycle environment assigned to it and delete it using activation key id :id: e1830e52-5b1a-4ac4-8d0a-df6efb218a8b :expectedresults: Activation key is deleted - - :CaseLevel: Integration """ - new_ak = make_activation_key( + new_ak = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment': get_default_env['name']} ) - ActivationKey.delete({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.delete({'id': new_ak['id']}) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_update_name_by_id(module_org, name): +def test_positive_update_name_by_id(module_org, name, module_target_sat): """Update Activation Key Name in Activation key searching by ID :id: bc304894-fd9b-4622-96e3-57c2257e26ca @@ -399,16 +397,18 @@ def test_positive_update_name_by_id(module_org, name): :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( {'id': activation_key['id'], 'new-name': name, 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['name'] == name @pytest.mark.tier1 -def test_positive_update_name_by_name(module_org): +def test_positive_update_name_by_name(module_org, module_target_sat): """Update Activation Key Name in an Activation key searching by name @@ -419,17 +419,19 @@ def test_positive_update_name_by_name(module_org): :CaseImportance: Critical """ new_name = gen_string('alpha') - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( {'name': activation_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['name'] == new_name @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_update_description(description, module_org): +def test_positive_update_description(description, module_org, module_target_sat): """Update Description in an Activation key :id: 60a4e860-d99c-431e-b70b-9b0fa90d839b @@ -440,37 +442,41 @@ def test_positive_update_description(description, module_org): :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( { 'description': description, 'name': activation_key['name'], 'organization-id': module_org.id, } ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['description'] == description @pytest.mark.tier2 -def test_positive_update_lce(module_org, get_default_env): +def test_positive_update_lce(module_org, get_default_env, module_target_sat): """Update Environment in an Activation key :id: 55aaee60-b8c8-49f0-995a-6c526b9b653b :expectedresults: Activation key is updated - - :CaseLevel: Integration """ - ak_env = make_activation_key( + ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': get_default_env['id']} ) - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - ActivationKey.update( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + module_target_sat.cli.ActivationKey.update( { 'id': ak_env['id'], 'lifecycle-environment-id': env['id'], @@ -478,32 +484,32 @@ def test_positive_update_lce(module_org, get_default_env): 'organization-id': module_org.id, } ) - updated_ak = ActivationKey.info({'id': ak_env['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': ak_env['id']}) assert updated_ak['lifecycle-environment'] == env['name'] @pytest.mark.tier2 -def test_positive_update_cv(module_org): +def test_positive_update_cv(module_org, module_target_sat): """Update Content View in an Activation key :id: aa94997d-fc9b-4532-aeeb-9f27b9834914 :expectedresults: Activation key is updated - - :CaseLevel: Integration """ - cv = make_content_view({'organization-id': module_org.id}) - ak_cv = make_activation_key({'organization-id': module_org.id, 'content-view-id': cv['id']}) - new_cv = make_content_view({'organization-id': module_org.id}) - ActivationKey.update( + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + ak_cv = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view-id': cv['id']} + ) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.update( {'content-view': new_cv['name'], 'name': ak_cv['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': ak_cv['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': ak_cv['id']}) assert updated_ak['content-view'] == new_cv['name'] @pytest.mark.tier1 -def test_positive_update_usage_limit_to_finite_number(module_org): +def test_positive_update_usage_limit_to_finite_number(module_org, module_target_sat): """Update Usage limit from Unlimited to a finite number :id: a55bb8dc-c7d8-4a6a-ac0f-1d5a377da543 @@ -512,17 +518,17 @@ def test_positive_update_usage_limit_to_finite_number(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) assert new_ak['host-limit'] == 'Unlimited' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'max-hosts': '2147483647', 'name': new_ak['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['host-limit'] == '2147483647' @pytest.mark.tier1 -def test_positive_update_usage_limit_to_unlimited(module_org): +def test_positive_update_usage_limit_to_unlimited(module_org, module_target_sat): """Update Usage limit from definite number to Unlimited :id: 0b83657b-41d1-4fb2-9c23-c36011322b83 @@ -531,18 +537,20 @@ def test_positive_update_usage_limit_to_unlimited(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id, 'max-hosts': '10'}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': '10'} + ) assert new_ak['host-limit'] == '10' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'unlimited-hosts': True, 'name': new_ak['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['host-limit'] == 'Unlimited' @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_update_name(module_org, name): +def test_negative_update_name(module_org, name, module_target_sat): """Try to update Activation Key using invalid value for its name :id: b75e7c38-fde2-4110-ba65-4157319fc159 @@ -554,16 +562,16 @@ def test_negative_update_name(module_org, name): :parametrized: yes """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'id': new_ak['id'], 'new-name': name, 'organization-id': module_org.id} ) assert 'Could not update the activation key:' in raise_ctx.value.message @pytest.mark.tier2 -def test_negative_update_usage_limit(module_org): +def test_negative_update_usage_limit(module_org, module_target_sat): """Try to update Activation Key using invalid value for its usage limit attribute @@ -574,9 +582,9 @@ def test_negative_update_usage_limit(module_org): :CaseImportance: Low """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'max-hosts': int('9' * 20), 'id': new_ak['id'], 'organization-id': module_org.id} ) assert 'Validation failed: Max hosts must be less than 2147483648' in raise_ctx.value.message @@ -590,7 +598,7 @@ def test_positive_usage_limit(module_org, target_sat): :id: 00ded856-e939-4140-ac84-91b6a8643623 - :Steps: + :steps: 1. Create Activation key 2. Update Usage Limit to a finite number @@ -602,15 +610,15 @@ def test_positive_usage_limit(module_org, target_sat): shown :CaseImportance: Critical - - :CaseLevel: System """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - new_ak = make_activation_key( + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + new_cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + new_ak = target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': new_cv['name'], @@ -632,7 +640,7 @@ def test_positive_usage_limit(module_org, target_sat): @pytest.mark.tier2 @pytest.mark.parametrize('host_col_name', **parametrized(valid_data_list())) -def test_positive_update_host_collection(module_org, host_col_name): +def test_positive_update_host_collection(module_org, host_col_name, module_target_sat): """Test that host collections can be associated to Activation Keys @@ -643,30 +651,30 @@ def test_positive_update_host_collection(module_org, host_col_name): :expectedresults: Host collections are successfully associated to Activation key - :CaseLevel: Integration - :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col_name = make_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col_name = module_target_sat.cli_factory.make_host_collection( {'name': host_col_name, 'organization-id': module_org.id} )['name'] # Assert that name matches data passed assert new_host_col_name == host_col_name - ActivationKey.add_host_collection( + module_target_sat.cli.ActivationKey.add_host_collection( { 'host-collection': new_host_col_name, 'name': activation_key['name'], 'organization-id': module_org.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['host-collections'][0]['name'] == host_col_name @pytest.mark.run_in_one_thread @pytest.mark.tier2 -def test_positive_update_host_collection_with_default_org(module_org): +def test_positive_update_host_collection_with_default_org(module_org, module_target_sat): """Test that host collection can be associated to Activation Keys with specified default organization setting in config @@ -679,12 +687,14 @@ def test_positive_update_host_collection_with_default_org(module_org): """ Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) try: - activation_key = make_activation_key({'organization-id': module_org.id}) - host_col = make_host_collection() - ActivationKey.add_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + host_col = module_target_sat.cli_factory.make_host_collection() + module_target_sat.cli.ActivationKey.add_host_collection( {'host-collection': host_col['name'], 'name': activation_key['name']} ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['host-collections'][0]['name'] == host_col['name'] finally: Defaults.delete({'param-name': 'organization_id'}) @@ -692,21 +702,19 @@ def test_positive_update_host_collection_with_default_org(module_org): @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_add_redhat_product(function_entitlement_manifest_org): +def test_positive_add_redhat_product(function_entitlement_manifest_org, target_sat): """Test that RH product can be associated to Activation Keys :id: 7b15de8e-edde-41aa-937b-ad6aa529891a :expectedresults: RH products are successfully associated to Activation key - - :CaseLevel: System """ org = function_entitlement_manifest_org # Using CDN as we need this repo to be RH one no matter are we in # downstream or cdn - result = setup_org_for_a_rh_repo( + result = target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -715,7 +723,7 @@ def test_positive_add_redhat_product(function_entitlement_manifest_org): }, force_use_cdn=True, ) - content = ActivationKey.product_content( + content = target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': org.id} ) assert content[0]['name'] == REPOSET['rhst7'] @@ -723,7 +731,7 @@ def test_positive_add_redhat_product(function_entitlement_manifest_org): @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_custom_product(module_org): +def test_positive_add_custom_product(module_org, module_target_sat): """Test that custom product can be associated to Activation Keys :id: 96ace967-e165-4069-8ff7-f54c4c822de0 @@ -731,15 +739,13 @@ def test_positive_add_custom_product(module_org): :expectedresults: Custom products are successfully associated to Activation key - :CaseLevel: System - :BZ: 1426386 """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - repo = Repository.info({'id': result['repository-id']}) - content = ActivationKey.product_content( + repo = module_target_sat.cli.Repository.info({'id': result['repository-id']}) + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['name'] == repo['name'] @@ -756,7 +762,7 @@ def test_positive_add_redhat_and_custom_products( :id: 74c77426-18f5-4abb-bca9-a2135f7fcc1f - :Steps: + :steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key @@ -765,14 +771,12 @@ def test_positive_add_redhat_and_custom_products( :expectedresults: RH/Custom product is successfully associated to Activation key - :CaseLevel: System - :BZ: 1426386 """ org = function_entitlement_manifest_org # Using CDN as we need this repo to be RH one no matter are we in # downstream or cdn - result = setup_org_for_a_rh_repo( + result = module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -781,7 +785,7 @@ def test_positive_add_redhat_and_custom_products( }, force_use_cdn=True, ) - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_0.url, 'organization-id': org.id, @@ -804,7 +808,7 @@ def test_positive_delete_manifest(function_entitlement_manifest_org, target_sat) :id: 8256ac6d-3f60-4668-897d-2e88d29532d3 - :Steps: + :steps: 1. Upload manifest 2. Create activation key - attach some subscriptions 3. Delete manifest @@ -817,18 +821,18 @@ def test_positive_delete_manifest(function_entitlement_manifest_org, target_sat) :CaseAutomation: Automated """ org = function_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) + new_ak = target_sat.cli_factory.make_activation_key({'organization-id': org.id}) ak_subs = target_sat.cli.ActivationKey.subscriptions( {'id': new_ak['id'], 'organization-id': org.id} ) - subscription_result = Subscription.list( + subscription_result = target_sat.cli.Subscription.list( {'organization-id': org.id, 'order': 'id desc'}, per_page=False ) result = target_sat.cli.ActivationKey.add_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} ) assert 'Subscription added to activation key.' in result - Subscription.delete_manifest({'organization-id': org.id}) + target_sat.cli.Subscription.delete_manifest({'organization-id': org.id}) ak_subs_info = target_sat.cli.ActivationKey.subscriptions( {'id': new_ak['id'], 'organization-id': org.id} ) @@ -845,19 +849,19 @@ def test_positive_delete_subscription(function_entitlement_manifest_org, module_ :expectedresults: Deleting subscription removes it from the Activation key - - :CaseLevel: Integration """ org = function_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) - subscription_result = Subscription.list( + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) + subscription_result = module_target_sat.cli.Subscription.list( {'organization-id': org.id, 'order': 'id desc'}, per_page=False ) result = module_target_sat.cli.ActivationKey.add_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} ) assert 'Subscription added to activation key.' in result - ak_subs_info = ActivationKey.subscriptions({'id': new_ak['id'], 'organization-id': org.id}) + ak_subs_info = module_target_sat.cli.ActivationKey.subscriptions( + {'id': new_ak['id'], 'organization-id': org.id} + ) assert subscription_result[-1]['name'] in ak_subs_info result = module_target_sat.cli.ActivationKey.remove_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} @@ -882,16 +886,16 @@ def test_positive_update_aks_to_chost(module_org, rhel7_contenthost, target_sat) host :parametrized: yes - - :CaseLevel: System """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + new_cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) new_aks = [ - make_activation_key( + target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': new_cv['name'], @@ -929,8 +933,6 @@ def test_positive_update_aks_to_chost_in_one_command(module_org): :expectedresults: Multiple Activation keys are attached to a Content host - - :CaseLevel: System """ @@ -947,7 +949,9 @@ def test_positive_list_by_name(module_org, name, module_target_sat): :parametrized: yes """ - make_activation_key({'organization-id': module_org.id, 'name': name}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) result = module_target_sat.cli.ActivationKey.list( {'name': name, 'organization-id': module_org.id} ) @@ -965,8 +969,10 @@ def test_positive_list_by_cv_id(module_org, module_target_sat): :CaseImportance: High """ - cv = make_content_view({'organization-id': module_org.id}) - make_activation_key({'organization-id': module_org.id, 'content-view-id': cv['id']}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view-id': cv['id']} + ) result = module_target_sat.cli.ActivationKey.list( {'content-view-id': cv['id'], 'organization-id': module_org.id} ) @@ -986,14 +992,18 @@ def test_positive_create_using_old_name(module_org, module_target_sat): :CaseImportance: High """ name = gen_string('utf8') - activation_key = make_activation_key({'organization-id': module_org.id, 'name': name}) + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) new_name = gen_string('utf8') module_target_sat.cli.ActivationKey.update( {'id': activation_key['id'], 'new-name': new_name, 'organization-id': module_org.id} ) activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['name'] == new_name - new_activation_key = make_activation_key({'name': name, 'organization-id': module_org.id}) + new_activation_key = module_target_sat.cli_factory.make_activation_key( + {'name': name, 'organization-id': module_org.id} + ) assert new_activation_key['name'] == name @@ -1004,7 +1014,7 @@ def test_positive_remove_host_collection_by_id(module_org, module_target_sat): :id: 20f8ecca-1756-4900-b966-f0144b6bd0aa - :Steps: + :steps: 1. Create Activation key 2. Create host collection @@ -1017,12 +1027,12 @@ def test_positive_remove_host_collection_by_id(module_org, module_target_sat): :CaseImportance: Medium - :CaseLevel: Integration - :BZ: 1336716 """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col = make_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col = module_target_sat.cli_factory.make_host_collection( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) module_target_sat.cli.ActivationKey.add_host_collection( @@ -1034,7 +1044,7 @@ def test_positive_remove_host_collection_by_id(module_org, module_target_sat): ) activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert len(activation_key['host-collections']) == 1 - ActivationKey.remove_host_collection( + module_target_sat.cli.ActivationKey.remove_host_collection( { 'host-collection-id': new_host_col['id'], 'name': activation_key['name'], @@ -1053,7 +1063,7 @@ def test_positive_remove_host_collection_by_name(module_org, host_col, module_ta :id: 1a559a82-db5f-48b0-beeb-2fa02aed7ef9 - :Steps: + :steps: 1. Create Activation key 2. Create host collection @@ -1064,14 +1074,16 @@ def test_positive_remove_host_collection_by_name(module_org, host_col, module_ta :expectedresults: Host collection successfully removed from activation key - :CaseLevel: Integration - :BZ: 1336716 :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col = make_host_collection({'name': host_col, 'organization-id': module_org.id}) + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'name': host_col, 'organization-id': module_org.id} + ) # Assert that name matches data passed assert new_host_col['name'] == host_col module_target_sat.cli.ActivationKey.add_host_collection( @@ -1101,7 +1113,7 @@ def test_create_ak_with_syspurpose_set(module_entitlement_manifest_org, module_t :id: ac8931e5-7089-494a-adac-cee2a8ab57ee - :Steps: + :steps: 1. Create Activation key with system purpose values set 2. Read Activation key values and assert system purpose values are set 3. Clear AK system purpose values @@ -1112,7 +1124,7 @@ def test_create_ak_with_syspurpose_set(module_entitlement_manifest_org, module_t :BZ: 1789028 """ # Requires Cls org and manifest. Manifest is for self-support values. - new_ak = make_activation_key( + new_ak = module_target_sat.cli_factory.make_activation_key( { 'purpose-addons': "test-addon1, test-addon2", 'purpose-role': "test-role", @@ -1152,7 +1164,7 @@ def test_update_ak_with_syspurpose_values(module_entitlement_manifest_org, modul :id: db943c05-70f1-4385-9537-fe23368a9dfd - :Steps: + :steps: 1. Create Activation key with no system purpose values set 2. Assert system purpose values are not set @@ -1168,7 +1180,7 @@ def test_update_ak_with_syspurpose_values(module_entitlement_manifest_org, modul # Requires Cls org and manifest. Manifest is for self-support values. # Create an AK with no system purpose values set org = module_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) # Assert system purpose values are null after creating the AK and adding the manifest. assert new_ak['system-purpose']['purpose-addons'] == '' assert new_ak['system-purpose']['purpose-role'] == '' @@ -1220,7 +1232,7 @@ def test_positive_add_subscription_by_id(module_entitlement_manifest_org, module :id: b884be1c-b35d-440a-9a9d-c854c83e10a7 - :Steps: + :steps: 1. Create Activation key 2. Upload manifest and add subscription @@ -1230,12 +1242,10 @@ def test_positive_add_subscription_by_id(module_entitlement_manifest_org, module :BZ: 1463685 - :CaseLevel: Integration - :BZ: 1463685 """ org_id = module_entitlement_manifest_org.id - ackey_id = make_activation_key({'organization-id': org_id})['id'] + ackey_id = module_target_sat.cli_factory.make_activation_key({'organization-id': org_id})['id'] subs_id = module_target_sat.cli.Subscription.list({'organization-id': org_id}, per_page=False) result = module_target_sat.cli.ActivationKey.add_subscription( {'id': ackey_id, 'subscription-id': subs_id[0]['id']} @@ -1245,7 +1255,7 @@ def test_positive_add_subscription_by_id(module_entitlement_manifest_org, module @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_copy_by_parent_id(module_org, new_name): +def test_positive_copy_by_parent_id(module_org, new_name, module_target_sat): """Copy Activation key for all valid Activation Key name variations @@ -1257,15 +1267,17 @@ def test_positive_copy_by_parent_id(module_org, new_name): :parametrized: yes """ - parent_ak = make_activation_key({'organization-id': module_org.id}) - result = ActivationKey.copy( + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + result = module_target_sat.cli.ActivationKey.copy( {'id': parent_ak['id'], 'new-name': new_name, 'organization-id': module_org.id} ) assert 'Activation key copied.' in result @pytest.mark.tier1 -def test_positive_copy_by_parent_name(module_org): +def test_positive_copy_by_parent_name(module_org, module_target_sat): """Copy Activation key by passing name of parent :id: 5d5405e6-3b26-47a3-96ff-f6c0f6c32607 @@ -1274,8 +1286,10 @@ def test_positive_copy_by_parent_name(module_org): :CaseImportance: Critical """ - parent_ak = make_activation_key({'organization-id': module_org.id}) - result = ActivationKey.copy( + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + result = module_target_sat.cli.ActivationKey.copy( { 'name': parent_ak['name'], 'new-name': gen_string('alpha'), @@ -1286,17 +1300,18 @@ def test_positive_copy_by_parent_name(module_org): @pytest.mark.tier1 -def test_negative_copy_with_same_name(module_org): +def test_negative_copy_with_same_name(module_org, module_target_sat): """Copy activation key with duplicate name :id: f867c468-4155-495c-a1e5-c04d9868a2e0 :expectedresults: Activation key is not successfully copied - """ - parent_ak = make_activation_key({'organization-id': module_org.id}) + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.copy( + module_target_sat.cli.ActivationKey.copy( { 'name': parent_ak['name'], 'new-name': parent_ak['name'], @@ -1311,46 +1326,48 @@ def test_negative_copy_with_same_name(module_org): @pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_copy_subscription(module_entitlement_manifest_org): +def test_positive_copy_subscription(module_entitlement_manifest_org, module_target_sat): """Copy Activation key and verify contents :id: f4ee8096-4120-4d06-8c9a-57ac1eaa8f68 - :Steps: + :steps: 1. Create parent key and add content 2. Copy Activation key by passing id of parent 3. Verify content was successfully copied :expectedresults: Activation key is successfully copied - - :CaseLevel: Integration """ # Begin test setup org = module_entitlement_manifest_org - parent_ak = make_activation_key({'organization-id': org.id}) - subscription_result = Subscription.list({'organization-id': org.id}, per_page=False) - ActivationKey.add_subscription( + parent_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) + subscription_result = module_target_sat.cli.Subscription.list( + {'organization-id': org.id}, per_page=False + ) + module_target_sat.cli.ActivationKey.add_subscription( {'id': parent_ak['id'], 'subscription-id': subscription_result[0]['id']} ) # End test setup new_name = gen_string('utf8') - result = ActivationKey.copy( + result = module_target_sat.cli.ActivationKey.copy( {'id': parent_ak['id'], 'new-name': new_name, 'organization-id': org.id} ) assert 'Activation key copied.' in result - result = ActivationKey.subscriptions({'name': new_name, 'organization-id': org.id}) + result = module_target_sat.cli.ActivationKey.subscriptions( + {'name': new_name, 'organization-id': org.id} + ) # Verify that the subscription copied over assert subscription_result[0]['name'] in result # subscription name # subscription list @pytest.mark.tier1 -def test_positive_update_autoattach_toggle(module_org): +def test_positive_update_autoattach_toggle(module_org, module_target_sat): """Update Activation key with inverse auto-attach value :id: de3b5fb7-7963-420a-b4c9-c66e78a111dc - :Steps: + :steps: 1. Get the key's current auto attach value. 2. Update the key with the value's inverse. @@ -1360,19 +1377,19 @@ def test_positive_update_autoattach_toggle(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) attach_value = new_ak['auto-attach'] # invert value new_value = 'false' if attach_value == 'true' else 'true' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'auto-attach': new_value, 'id': new_ak['id'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['auto-attach'] == new_value @pytest.mark.tier1 -def test_positive_update_autoattach(module_org): +def test_positive_update_autoattach(module_org, module_target_sat): """Update Activation key with valid auto-attach values :id: 9e18b950-6f0f-4f82-a3ac-ef6aba950a78 @@ -1381,21 +1398,21 @@ def test_positive_update_autoattach(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) for new_value in ('1', '0', 'true', 'false', 'yes', 'no'): - result = ActivationKey.update( + result = module_target_sat.cli.ActivationKey.update( {'auto-attach': new_value, 'id': new_ak['id'], 'organization-id': module_org.id} ) - assert 'Activation key updated.' == result[0]['message'] + assert result[0]['message'] == 'Activation key updated.' @pytest.mark.tier2 -def test_negative_update_autoattach(module_org): +def test_negative_update_autoattach(module_org, module_target_sat): """Attempt to update Activation key with bad auto-attach value :id: 54b6f808-ff54-4e69-a54d-e1f99a4652f9 - :Steps: + :steps: 1. Attempt to update a key with incorrect auto-attach value 2. Verify that an appropriate error message was returned @@ -1405,9 +1422,9 @@ def test_negative_update_autoattach(module_org): :CaseImportance: Low """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as exe: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'auto-attach': gen_string('utf8'), 'id': new_ak['id'], @@ -1419,12 +1436,12 @@ def test_negative_update_autoattach(module_org): @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_content_override(module_org): +def test_positive_content_override(module_org, module_target_sat): """Positive content override :id: a4912cc0-3bf7-4e90-bb51-ec88b2fad227 - :Steps: + :steps: 1. Create activation key and add content 2. Get the first product's label @@ -1432,17 +1449,15 @@ def test_positive_content_override(module_org): 4. Verify that the command succeeded :expectedresults: Activation key content override was successful - - :CaseLevel: System """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) for override_value in (True, False): - ActivationKey.content_override( + module_target_sat.cli.ActivationKey.content_override( { 'content-label': content[0]['label'], 'id': result['activationkey-id'], @@ -1451,14 +1466,14 @@ def test_positive_content_override(module_org): } ) # Retrieve the product content enabled flag - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['override'] == f'enabled:{int(override_value)}' @pytest.mark.tier2 -def test_positive_remove_user(module_org): +def test_positive_remove_user(module_org, module_target_sat): """Delete any user who has previously created an activation key and check that activation key still exists @@ -1469,20 +1484,22 @@ def test_positive_remove_user(module_org): :BZ: 1291271 """ password = gen_string('alpha') - user = make_user({'password': password, 'admin': 'true'}) - ak = ActivationKey.with_user(username=user['login'], password=password).create( - {'name': gen_string('alpha'), 'organization-id': module_org.id} - ) - User.delete({'id': user['id']}) + user = module_target_sat.cli_factory.user({'password': password, 'admin': 'true'}) + ak = module_target_sat.cli.ActivationKey.with_user( + username=user['login'], password=password + ).create({'name': gen_string('alpha'), 'organization-id': module_org.id}) + module_target_sat.cli.User.delete({'id': user['id']}) try: - ActivationKey.info({'id': ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': ak['id']}) except CLIReturnCodeError: pytest.fail("Activation key can't be read") @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manifest_org): +def test_positive_view_subscriptions_by_non_admin_user( + module_entitlement_manifest_org, module_target_sat +): """Attempt to read activation key subscriptions by non admin user :id: af75b640-97be-431b-8ac0-a6367f8f1996 @@ -1512,8 +1529,6 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife subscription :BZ: 1406076 - - :CaseLevel: System """ org = module_entitlement_manifest_org user_name = gen_alphanumeric() @@ -1524,18 +1539,24 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife f'Test_*_{gen_string("alpha")}', ) ak_name = f'{ak_name_like}_{gen_string("alpha")}' - available_subscriptions = Subscription.list({'organization-id': org.id}, per_page=False) + available_subscriptions = module_target_sat.cli.Subscription.list( + {'organization-id': org.id}, per_page=False + ) assert len(available_subscriptions) > 0 available_subscription_ids = [subscription['id'] for subscription in available_subscriptions] subscription_id = choice(available_subscription_ids) - activation_key = make_activation_key({'name': ak_name, 'organization-id': org.id}) - ActivationKey.add_subscription({'id': activation_key['id'], 'subscription-id': subscription_id}) - subscriptions = ActivationKey.subscriptions( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'name': ak_name, 'organization-id': org.id} + ) + module_target_sat.cli.ActivationKey.add_subscription( + {'id': activation_key['id'], 'subscription-id': subscription_id} + ) + subscriptions = module_target_sat.cli.ActivationKey.subscriptions( {'organization-id': org.id, 'id': activation_key['id']}, output_format='csv', ) assert len(subscriptions) == 1 - role = make_role({'organization-id': org.id}) + role = module_target_sat.cli_factory.make_role({'organization-id': org.id}) resource_permissions = { 'Katello::ActivationKey': { 'permissions': [ @@ -1559,8 +1580,8 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife ] }, } - add_role_permissions(role['id'], resource_permissions) - user = make_user( + module_target_sat.cli_factory.add_role_permissions(role['id'], resource_permissions) + user = module_target_sat.cli_factory.user( { 'admin': False, 'default-organization-id': org.id, @@ -1569,8 +1590,8 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife 'password': user_password, } ) - User.add_role({'id': user['id'], 'role-id': role['id']}) - ak_user_cli_session = ActivationKey.with_user(user_name, user_password) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) + ak_user_cli_session = module_target_sat.cli.ActivationKey.with_user(user_name, user_password) subscriptions = ak_user_cli_session.subscriptions( {'organization-id': org.id, 'id': activation_key['id']}, output_format='csv', @@ -1601,7 +1622,7 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost :BZ: 1633094 """ org = function_org - result = setup_org_for_a_rh_repo( + result = target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -1610,8 +1631,8 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost }, force_use_cdn=True, ) - ak = ActivationKey.info({'id': result['activationkey-id']}) - setup_org_for_a_custom_repo( + ak = target_sat.cli.ActivationKey.info({'id': result['activationkey-id']}) + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_0.url, 'organization-id': org['id'], @@ -1620,13 +1641,13 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost 'lifecycle-environment-id': result['lifecycle-environment-id'], } ) - subs = Subscription.list({'organization-id': org['id']}, per_page=False) + subs = target_sat.cli.Subscription.list({'organization-id': org['id']}, per_page=False) subs_lookup = {s['id']: s for s in subs} rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(org['label'], activation_key=ak['name']) assert rhel7_contenthost.subscribed - ak_subs = ActivationKey.subscriptions( + ak_subs = target_sat.cli.ActivationKey.subscriptions( {'activation-key': ak['name'], 'organization-id': org['id']}, output_format='json' ) assert len(ak_subs) == 2 # one for #rh product, one for custom product @@ -1647,7 +1668,7 @@ def test_positive_ak_with_custom_product_on_rhel6(module_org, rhel6_contenthost, :customerscenario: true - :Steps: + :steps: 1. Create a custom repo 2. Create ak and add custom repo to ak 3. Add subscriptions to the ak diff --git a/tests/foreman/cli/test_ansible.py b/tests/foreman/cli/test_ansible.py index 922660d5d5c..707acbcfec9 100644 --- a/tests/foreman/cli/test_ansible.py +++ b/tests/foreman/cli/test_ansible.py @@ -4,165 +4,520 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Ansible - :Team: Rocket -:TestType: Functional - :CaseImportance: High - -:Upstream: No """ -import pytest +from time import sleep + from fauxfactory import gen_string +import pytest from robottelo.config import settings -@pytest.mark.e2e -@pytest.mark.no_containers -@pytest.mark.rhel_ver_match('[^6].*') -def test_positive_ansible_e2e(target_sat, module_org, rhel_contenthost): - """ - Test successful execution of Ansible Job on host. - - :id: 0c52bc63-a41a-4f48-a980-fe49b4ecdbdc - - :Steps: - 1. Register a content host with satellite - 2. Import a role into satellite - 3. Assign that role to a host - 4. Assert that the role and variable were assigned to the host successfully - 5. Run the Ansible playbook associated with that role - 6. Check if the job is executed successfully. - 7. Disassociate the Role from the host. - 8. Delete the assigned ansible role - - :expectedresults: - 1. Host should be assigned the proper role. - 2. Job execution must be successful. - 3. Operations performed with hammer must be successful. - - :CaseAutomation: Automated - - :BZ: 2154184 - - :customerscenario: true - - :CaseImportance: Critical +def assert_job_invocation_result( + sat, invocation_command_id, client_hostname, expected_result='success' +): + """Asserts the job invocation finished with the expected result and fetches job output + when error occurs. Result is one of: success, pending, error, warning""" + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) + try: + assert result[expected_result] == '1' + except AssertionError as err: + raise AssertionError( + 'host output: {}'.format( + ' '.join( + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) + ) + ) + ) from err + + +@pytest.mark.upgrade +class TestAnsibleCfgMgmt: + """Test class for Configuration Management with Ansible + + :CaseComponent: Ansible-ConfigurationManagement """ - SELECTED_ROLE = 'RedHatInsights.insights-client' - SELECTED_ROLE_1 = 'theforeman.foreman_scap_client' - SELECTED_VAR = gen_string('alpha') - # disable batch tasks to test BZ#2154184 - target_sat.cli.Settings.set({'name': "foreman_tasks_proxy_batch_trigger", 'value': "false"}) - if rhel_contenthost.os_version.major <= 7: - rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel_contenthost.execute('yum install -y insights-client').status == 0 - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, force=True) - assert rhel_contenthost.subscribed - rhel_contenthost.add_rex_key(satellite=target_sat) - proxy_id = target_sat.nailgun_smart_proxy.id - target_host = rhel_contenthost.nailgun_host - - target_sat.cli.Ansible.roles_sync( - {'role-names': f'{SELECTED_ROLE},{SELECTED_ROLE_1}', 'proxy-id': proxy_id} - ) - - result = target_sat.cli.Host.ansible_roles_add( - {'id': target_host.id, 'ansible-role': SELECTED_ROLE} - ) - assert 'Ansible role has been associated.' in result[0]['message'] - - target_sat.cli.Ansible.variables_create( - {'variable': SELECTED_VAR, 'ansible-role': SELECTED_ROLE} - ) - assert SELECTED_ROLE, ( - SELECTED_VAR in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout - ) - template_id = ( - target_sat.api.JobTemplate() - .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] - .id - ) - job = target_sat.api.JobInvocation().run( - synchronous=False, - data={ - 'job_template_id': template_id, - 'targeting_type': 'static_query', - 'search_query': f'name = {rhel_contenthost.hostname}', - }, - ) - target_sat.wait_for_tasks( - f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 - ) - result = target_sat.api.JobInvocation(id=job['id']).read() - assert result.succeeded == 1 - - result = target_sat.cli.Host.ansible_roles_assign( - {'id': target_host.id, 'ansible-roles': f'{SELECTED_ROLE},{SELECTED_ROLE_1}'} - ) - assert 'Ansible roles were assigned to the host' in result[0]['message'] - - result = target_sat.cli.Host.ansible_roles_remove( - {'id': target_host.id, 'ansible-role': SELECTED_ROLE} - ) - assert 'Ansible role has been disassociated.' in result[0]['message'] - - result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) - assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] - - assert SELECTED_ROLE, ( - SELECTED_VAR not in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout - ) - - -@pytest.mark.e2e -@pytest.mark.tier2 -def test_add_and_remove_ansible_role_hostgroup(target_sat): + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('[^6].*') + def test_positive_ansible_e2e( + self, target_sat, module_sca_manifest_org, module_ak_with_cv, rhel_contenthost + ): + """ + Test successful execution of Ansible Job on host. + + :id: 0c52bc63-a41a-4f48-a980-fe49b4ecdbdc + + :steps: + 1. Register a content host with satellite + 2. Import a role into satellite + 3. Assign that role to a host + 4. Assert that the role and variable were assigned to the host successfully + 5. Run the Ansible playbook associated with that role + 6. Check if the job is executed successfully. + 7. Disassociate the Role from the host. + 8. Delete the assigned ansible role + + :expectedresults: + 1. Host should be assigned the proper role. + 2. Job execution must be successful. + 3. Operations performed with hammer must be successful. + + :BZ: 2154184 + + :customerscenario: true + """ + SELECTED_ROLE = 'RedHatInsights.insights-client' + SELECTED_ROLE_1 = 'theforeman.foreman_scap_client' + SELECTED_VAR = gen_string('alpha') + proxy_id = target_sat.nailgun_smart_proxy.id + # disable batch tasks to test BZ#2154184 + target_sat.cli.Settings.set({'name': 'foreman_tasks_proxy_batch_trigger', 'value': 'false'}) + result = rhel_contenthost.register( + module_sca_manifest_org, None, module_ak_with_cv.name, target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + if rhel_contenthost.os_version.major <= 7: + rhel_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) + assert rhel_contenthost.execute('yum install -y insights-client').status == 0 + target_host = rhel_contenthost.nailgun_host + + target_sat.cli.Ansible.roles_sync( + {'role-names': f'{SELECTED_ROLE},{SELECTED_ROLE_1}', 'proxy-id': proxy_id} + ) + result = target_sat.cli.Host.ansible_roles_add( + {'id': target_host.id, 'ansible-role': SELECTED_ROLE} + ) + assert 'Ansible role has been associated.' in result[0]['message'] + + target_sat.cli.Ansible.variables_create( + {'variable': SELECTED_VAR, 'ansible-role': SELECTED_ROLE} + ) + + assert SELECTED_ROLE, ( + SELECTED_VAR in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout + ) + template_id = ( + target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {rhel_contenthost.hostname}', + }, + ) + target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000 + ) + result = target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + + result = target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': f'{SELECTED_ROLE},{SELECTED_ROLE_1}'} + ) + assert 'Ansible roles were assigned to the host' in result[0]['message'] + + result = target_sat.cli.Host.ansible_roles_remove( + {'id': target_host.id, 'ansible-role': SELECTED_ROLE} + ) + assert 'Ansible role has been disassociated.' in result[0]['message'] + + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + + assert SELECTED_ROLE, ( + SELECTED_VAR not in target_sat.cli.Ansible.variables_info({'name': SELECTED_VAR}).stdout + ) + + @pytest.mark.e2e + @pytest.mark.tier2 + def test_add_and_remove_ansible_role_hostgroup(self, target_sat): + """ + Test add and remove functionality for ansible roles in hostgroup via CLI + + :id: 2c6fda14-4cd2-490a-b7ef-7a08f8164fad + + :customerscenario: true + + :steps: + 1. Create a hostgroup + 2. Sync few ansible roles + 3. Assign a few ansible roles with the host group + 4. Add some ansible role with the host group + 5. Remove the added ansible roles from the host group + + :expectedresults: + 1. Ansible role assign/add/remove functionality should work as expected in CLI + + :BZ: 2029402 + """ + ROLES = [ + 'theforeman.foreman_scap_client', + 'redhat.satellite.hostgroups', + 'RedHatInsights.insights-client', + ] + proxy_id = target_sat.nailgun_smart_proxy.id + hg_name = gen_string('alpha') + result = target_sat.cli.HostGroup.create({'name': hg_name}) + assert result['name'] == hg_name + target_sat.cli.Ansible.roles_sync({'role-names': ROLES, 'proxy-id': proxy_id}) + result = target_sat.cli.HostGroup.ansible_roles_assign( + {'name': hg_name, 'ansible-roles': f'{ROLES[1]},{ROLES[2]}'} + ) + assert 'Ansible roles were assigned to the hostgroup' in result[0]['message'] + result = target_sat.cli.HostGroup.ansible_roles_add( + {'name': hg_name, 'ansible-role': ROLES[0]} + ) + assert 'Ansible role has been associated.' in result[0]['message'] + result = target_sat.cli.HostGroup.ansible_roles_remove( + {'name': hg_name, 'ansible-role': ROLES[0]} + ) + assert 'Ansible role has been disassociated.' in result[0]['message'] + + +@pytest.mark.tier3 +@pytest.mark.upgrade +class TestAnsibleREX: + """Test class for remote execution via Ansible + + :CaseComponent: Ansible-RemoteExecution """ - Test add and remove functionality for ansible roles in hostgroup via cli - - :id: 2c6fda14-4cd2-490a-b7ef-7a08f8164fad - - :customerscenario: true - :Steps: - 1. Create a hostgroup - 2. Sync few ansible roles - 3. Assign a few ansible roles with the host group - 4. Add some ansible role with the host group - 5. Remove the added ansible roles from the host group - - :expectedresults: - 1. Ansible role assign/add/remove functionality should work as expected in cli - - :BZ: 2029402 - - :CaseAutomation: Automated - """ - ROLES = [ - 'theforeman.foreman_scap_client', - 'redhat.satellite.hostgroups', - 'RedHatInsights.insights-client', - ] - proxy_id = target_sat.nailgun_smart_proxy.id - hg_name = gen_string('alpha') - result = target_sat.cli.HostGroup.create({'name': hg_name}) - assert result['name'] == hg_name - target_sat.cli.Ansible.roles_sync({'role-names': ROLES, 'proxy-id': proxy_id}) - result = target_sat.cli.HostGroup.ansible_roles_assign( - {'name': hg_name, 'ansible-roles': f'{ROLES[1]},{ROLES[2]}'} - ) - assert 'Ansible roles were assigned to the hostgroup' in result[0]['message'] - result = target_sat.cli.HostGroup.ansible_roles_add({'name': hg_name, 'ansible-role': ROLES[0]}) - assert 'Ansible role has been associated.' in result[0]['message'] - result = target_sat.cli.HostGroup.ansible_roles_remove( - {'name': hg_name, 'ansible-role': ROLES[0]} + @pytest.mark.pit_client + @pytest.mark.pit_server + @pytest.mark.rhel_ver_match('[^6]') + def test_positive_run_effective_user_job(self, rex_contenthost, target_sat): + """Tests Ansible REX job having effective user runs successfully + + :id: a5fa20d8-c2bd-4bbf-a6dc-bf307b59dd8c + + :steps: + 0. Create a VM and register to SAT and prepare for REX (ssh key) + 1. Run Ansible Command job for the host to create a user + 2. Run Ansible Command job using effective user + 3. Check the job result at the host is done under that user + + :expectedresults: multiple asserts along the code + + :parametrized: yes + """ + client = rex_contenthost + # create a user on client via remote job + username = gen_string('alpha') + filename = gen_string('alpha') + make_user_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': f'command=useradd -m {username}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, make_user_job['id'], client.hostname) + # create a file as new user + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': f'command=touch /home/{username}/{filename}', + 'search-query': f'name ~ {client.hostname}', + 'effective-user': username, + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + + # check the file owner + result = client.execute( + f'''stat -c '%U' /home/{username}/{filename}''', + ) + # assert the file is owned by the effective user + assert username == result.stdout.strip('\n'), 'file ownership mismatch' + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_reccuring_job(self, rex_contenthost, target_sat): + """Tests Ansible REX reccuring job runs successfully multiple times + + :id: 49b0d31d-58f9-47f1-aa5d-561a1dcb0d66 + + :setup: + 1. Create a VM, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run recurring Ansible Command job for the host + 2. Check the multiple job results at the host + + :expectedresults: multiple asserts along the code + + :bz: 2129432 + + :customerscenario: true + + :parametrized: yes + """ + client = rex_contenthost + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': 'command=ls', + 'search-query': f'name ~ {client.hostname}', + 'cron-line': '* * * * *', # every minute + 'max-iteration': 2, # just two runs + } + ) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + sleep(150) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) + assert rec_logic['state'] == 'finished' + assert rec_logic['iteration'] == '2' + # 2129432 + rec_logic_keys = rec_logic.keys() + assert 'action' in rec_logic_keys + assert 'last-occurrence' in rec_logic_keys + assert 'next-occurrence' in rec_logic_keys + assert 'state' in rec_logic_keys + assert 'purpose' in rec_logic_keys + assert 'iteration' in rec_logic_keys + assert 'iteration-limit' in rec_logic_keys + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_concurrent_jobs(self, rex_contenthosts, target_sat): + """Tests Ansible REX concurent jobs without batch trigger + + :id: ad0f108c-03f2-49c7-8732-b1056570567b + + :steps: + 1. Create 2 hosts, disable foreman_tasks_proxy_batch_trigger + 2. Run Ansible Command job with concurrency-setting + + :expectedresults: multiple asserts along the code + + :BZ: 1817320 + + :customerscenario: true + + :parametrized: yes + """ + clients = rex_contenthosts + param_name = 'foreman_tasks_proxy_batch_trigger' + target_sat.cli.GlobalParameter().set({'name': param_name, 'value': 'false'}) + output_msgs = [] + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Ansible Default', + 'inputs': 'command=ls', + 'search-query': f'name ~ {clients[0].hostname} or name ~ {clients[1].hostname}', + 'concurrency-level': 2, + } + ) + for vm in clients: + output_msgs.append( + 'host output from {}: {}'.format( + vm.hostname, + ' '.join( + target_sat.cli.JobInvocation.get_output( + {'id': invocation_command['id'], 'host': vm.hostname} + ) + ), + ) + ) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert result['success'] == '2', output_msgs + target_sat.cli.GlobalParameter().delete({'name': param_name}) + assert len(target_sat.cli.GlobalParameter().list({'search': param_name})) == 0 + + @pytest.mark.rhel_ver_list([8]) + def test_positive_run_serial(self, rex_contenthosts, target_sat): + """Tests subtasks in a job run one by one when concurrency level set to 1 + + :id: 5ce39447-82d0-42df-81be-16ed3d67a2a4 + + :setup: + 1. Create 2 hosts, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run a bash command job with concurrency level 1 + + :expectedresults: First subtask should run immediately, second one after the first one finishes + + :parametrized: yes + """ + hosts = rex_contenthosts + output_msgs = [] + template_file = f'/root/{gen_string("alpha")}.template' + target_sat.execute( + f"echo 'rm /root/test-<%= @host %>; echo $(date +%s) >> /root/test-<%= @host %>; sleep 120; echo $(date +%s) >> /root/test-<%= @host %>' > {template_file}" + ) + template = target_sat.cli.JobTemplate.create( + { + 'name': gen_string('alpha'), + 'file': template_file, + 'job-category': 'Commands', + 'provider-type': 'script', + } + ) + invocation = target_sat.cli_factory.job_invocation( + { + 'job-template': template['name'], + 'search-query': f'name ~ {hosts[0].hostname} or name ~ {hosts[1].hostname}', + 'concurrency-level': 1, + } + ) + for vm in hosts: + output_msgs.append( + 'host output from {}: {}'.format( + vm.hostname, + ' '.join( + target_sat.cli.JobInvocation.get_output( + {'id': invocation['id'], 'host': vm.hostname} + ) + ), + ) + ) + result = target_sat.cli.JobInvocation.info({'id': invocation['id']}) + assert result['success'] == '2', output_msgs + # assert for time diffs + file1 = hosts[0].execute('cat /root/test-$(hostname)').stdout + file2 = hosts[1].execute('cat /root/test-$(hostname)').stdout + file1_start, file1_end = map(int, file1.rstrip().split('\n')) + file2_start, file2_end = map(int, file2.rstrip().split('\n')) + if file1_start > file2_start: + file1_start, file1_end, file2_start, file2_end = ( + file2_start, + file2_end, + file1_start, + file1_end, + ) + assert file1_end - file1_start >= 120 + assert file2_end - file2_start >= 120 + assert file2_start >= file1_end # the jobs did NOT run concurrently + + @pytest.mark.e2e + @pytest.mark.no_containers + @pytest.mark.pit_server + @pytest.mark.rhel_ver_match('[^6].*') + @pytest.mark.skipif( + (not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url' ) - assert 'Ansible role has been disassociated.' in result[0]['message'] + def test_positive_run_packages_and_services_job( + self, rhel_contenthost, module_sca_manifest_org, module_ak_with_cv, target_sat + ): + """Tests Ansible REX job can install packages and start services + + :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 + + :setup: + 1. Create a VM, register to SAT and configure REX (ssh-key) + + :steps: + 1. Run Ansible Package job for the host to install a package + 2. Check the package is present at the host + 3. Run Ansible Service job for the host to start a service + 4. Check the service is started on the host + + :expectedresults: multiple asserts along the code + + :bz: 1872688, 1811166 + + :customerscenario: true + + :parametrized: yes + """ + client = rhel_contenthost + packages = ['tapir'] + result = client.register( + module_sca_manifest_org, + None, + module_ak_with_cv.name, + target_sat, + repo=settings.repos.yum_3.url, + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + # install package + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Package Action - Ansible Default', + 'inputs': 'state=latest, name={}'.format(*packages), + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.run(f'rpm -q {" ".join(packages)}') + assert result.status == 0 + + # stop a service + service = 'rsyslog' + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Service Action - Ansible Default', + 'inputs': f'state=stopped, name={service}', + 'search-query': f"name ~ {client.hostname}", + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.execute(f'systemctl status {service}') + assert result.status == 3 + + # start it again + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Service Action - Ansible Default', + 'inputs': f'state=started, name={service}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.execute(f'systemctl status {service}') + assert result.status == 0 + + @pytest.mark.rhel_ver_list([8]) + def test_positive_install_ansible_collection(self, rex_contenthost, target_sat): + """Test whether Ansible collection can be installed via Ansible REX + + :id: ad25aee5-4ea3-4743-a301-1c6271856f79 + + :steps: + 1. Upload a manifest. + 2. Register content host to Satellite with REX setup + 3. Enable Ansible repo on content host. + 4. Install ansible or ansible-core package + 5. Run REX job to install Ansible collection on content host. + + :expectedresults: Ansible collection can be installed on content host via REX. + """ + client = rex_contenthost + # Enable Ansible repository and Install ansible or ansible-core package + client.create_custom_repos(rhel8_aps=settings.repos.rhel8_os.appstream) + assert client.execute('dnf -y install ansible-core').status == 0 + + collection_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Ansible Collection - Install from Galaxy', + 'inputs': 'ansible_collections_list="oasis_roles.system"', + 'search-query': f'name ~ {client.hostname}', + } + ) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) + assert result['success'] == '1' + collection_path = client.execute('ls /etc/ansible/collections/ansible_collections').stdout + assert 'oasis_roles' in collection_path + + # Extend test with custom collections_path advanced input field + collection_job = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Ansible Collection - Install from Galaxy', + 'inputs': 'ansible_collections_list="oasis_roles.system", collections_path="~/"', + 'search-query': f'name ~ {client.hostname}', + } + ) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) + assert result['success'] == '1' + collection_path = client.execute('ls ~/ansible_collections').stdout + assert 'oasis_roles' in collection_path diff --git a/tests/foreman/cli/test_architecture.py b/tests/foreman/cli/test_architecture.py index b68ff28b618..7578fe577b6 100644 --- a/tests/foreman/cli/test_architecture.py +++ b/tests/foreman/cli/test_architecture.py @@ -4,40 +4,35 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_choice +import pytest -from robottelo.cli.architecture import Architecture -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_architecture -from robottelo.utils.datafactory import invalid_id_list -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_id_list, + invalid_values_list, + parametrized, + valid_data_list, +) class TestArchitecture: """Architecture CLI related tests.""" @pytest.fixture(scope='class') - def class_architecture(self): + def class_architecture(self, class_target_sat): """Shared architecture for tests""" - return make_architecture() + return class_target_sat.cli_factory.make_architecture() @pytest.mark.tier1 - def test_positive_CRUD(self): + def test_positive_CRUD(self, module_target_sat): """Create a new Architecture, update the name and delete the Architecture itself. :id: cd8654b8-e603-11ea-adc1-0242ac120002 @@ -50,18 +45,18 @@ def test_positive_CRUD(self): name = gen_choice(list(valid_data_list().values())) new_name = gen_choice(list(valid_data_list().values())) - architecture = make_architecture({'name': name}) + architecture = module_target_sat.cli_factory.make_architecture({'name': name}) assert architecture['name'] == name - Architecture.update({'id': architecture['id'], 'new-name': new_name}) - architecture = Architecture.info({'id': architecture['id']}) + module_target_sat.cli.Architecture.update({'id': architecture['id'], 'new-name': new_name}) + architecture = module_target_sat.cli.Architecture.info({'id': architecture['id']}) assert architecture['name'] == new_name - Architecture.delete({'id': architecture['id']}) + module_target_sat.cli.Architecture.delete({'id': architecture['id']}) with pytest.raises(CLIReturnCodeError): - Architecture.info({'id': architecture['id']}) + module_target_sat.cli.Architecture.info({'id': architecture['id']}) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, name, module_target_sat): """Don't create an Architecture with invalid data. :id: cfed972e-9b09-4852-bdd2-b5a8a8aed170 @@ -74,13 +69,13 @@ def test_negative_create_with_name(self, name): """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.create({'name': name}) + module_target_sat.cli.Architecture.create({'name': name}) assert 'Could not create the architecture:' in error.value.message @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, class_architecture, new_name): + def test_negative_update_name(self, class_architecture, new_name, module_target_sat): """Create Architecture then fail to update its name :id: 037c4892-5e62-46dd-a2ed-92243e870e40 @@ -93,16 +88,18 @@ def test_negative_update_name(self, class_architecture, new_name): """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.update({'id': class_architecture['id'], 'new-name': new_name}) + module_target_sat.cli.Architecture.update( + {'id': class_architecture['id'], 'new-name': new_name} + ) assert 'Could not update the architecture:' in error.value.message - result = Architecture.info({'id': class_architecture['id']}) + result = module_target_sat.cli.Architecture.info({'id': class_architecture['id']}) assert class_architecture['name'] == result['name'] @pytest.mark.tier1 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) - def test_negative_delete_by_id(self, entity_id): + def test_negative_delete_by_id(self, entity_id, module_target_sat): """Delete architecture by invalid ID :id: 78bae664-6493-4c74-a587-94170f20746e @@ -114,6 +111,6 @@ def test_negative_delete_by_id(self, entity_id): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.delete({'id': entity_id}) + module_target_sat.cli.Architecture.delete({'id': entity_id}) assert 'Could not delete the architecture' in error.value.message diff --git a/tests/foreman/cli/test_auth.py b/tests/foreman/cli/test_auth.py index ddcb174585d..59aafee0be5 100644 --- a/tests/foreman/cli/test_auth.py +++ b/tests/foreman/cli/test_auth.py @@ -4,32 +4,21 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ from time import sleep -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.auth import Auth -from robottelo.cli.auth import AuthLogin -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_user -from robottelo.cli.org import Org -from robottelo.cli.settings import Settings -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import HAMMER_CONFIG +from robottelo.exceptions import CLIReturnCodeError LOGEDIN_MSG = "Session exists, currently logged in as '{0}'" NOTCONF_MSG = "Credentials are not configured." @@ -53,18 +42,20 @@ def configure_sessions(satellite, enable=True, add_default_creds=False): @pytest.fixture(scope='module') -def admin_user(): +def admin_user(module_target_sat): """create the admin role user for tests""" uname_admin = gen_string('alpha') - return make_user({'login': uname_admin, 'password': password, 'admin': '1'}) + return module_target_sat.cli_factory.user( + {'login': uname_admin, 'password': password, 'admin': '1'} + ) @pytest.fixture(scope='module') -def non_admin_user(): +def non_admin_user(module_target_sat): """create the non-admin role user for tests""" uname_viewer = gen_string('alpha') - user = make_user({'login': uname_viewer, 'password': password}) - User.add_role({'login': uname_viewer, 'role': 'Viewer'}) + user = module_target_sat.cli_factory.user({'login': uname_viewer, 'password': password}) + module_target_sat.cli.User.add_role({'login': uname_viewer, 'role': 'Viewer'}) return user @@ -74,7 +65,7 @@ def test_positive_create_session(admin_user, target_sat): :id: fcee7f5f-1040-41a9-bf17-6d0c24a93e22 - :Steps: + :steps: 1. Set use_sessions, set short expiration time 2. Authenticate, assert credentials are not demanded @@ -86,24 +77,24 @@ def test_positive_create_session(admin_user, target_sat): expires after specified time """ try: - idle_timeout = Settings.list({'search': 'name=idle_timeout'})[0]['value'] - Settings.set({'name': 'idle_timeout', 'value': 1}) + idle_timeout = target_sat.cli.Settings.list({'search': 'name=idle_timeout'})[0]['value'] + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': 1}) result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() + assert target_sat.cli.Org.with_user().list() # wait until session expires sleep(70) with pytest.raises(CLIReturnCodeError): - Org.with_user().list() - result = Auth.with_user().status() + target_sat.cli.Org.with_user().list() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] finally: # reset timeout to default - Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) @pytest.mark.tier1 @@ -113,7 +104,7 @@ def test_positive_disable_session(admin_user, target_sat): :id: 38ee0d85-c2fe-4cac-a992-c5dbcec11031 - :Steps: + :steps: 1. Set use_sessions 2. Authenticate, assert credentials are not demanded @@ -121,22 +112,21 @@ def test_positive_disable_session(admin_user, target_sat): 3. Disable use_sessions :expectedresults: The session is terminated - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() + assert target_sat.cli.Org.with_user().list() # disabling sessions result = configure_sessions(satellite=target_sat, enable=False) assert result == 0, 'Failed to configure hammer sessions' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -145,7 +135,7 @@ def test_positive_log_out_from_session(admin_user, target_sat): :id: 0ba05f2d-7b83-4b0c-a04c-80e62b7c4cf2 - :Steps: + :steps: 1. Set use_sessions 2. Authenticate, assert credentials are not demanded @@ -153,20 +143,19 @@ def test_positive_log_out_from_session(admin_user, target_sat): 3. Run `hammer auth logout` :expectedresults: The session is terminated - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() - Auth.logout() - result = Auth.with_user().status() + assert target_sat.cli.Org.with_user().list() + target_sat.cli.Auth.logout() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -175,7 +164,7 @@ def test_positive_change_session(admin_user, non_admin_user, target_sat): :id: b6ea6f3c-fcbd-4e7b-97bd-f3e0e6b9da8f - :Steps: + :steps: 1. Set use_sessions 2. Authenticate, assert credentials are not demanded @@ -185,19 +174,18 @@ def test_positive_change_session(admin_user, non_admin_user, target_sat): :CaseImportance: High :expectedresults: The session is altered - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert User.with_user().list() - AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) - result = Auth.with_user().status() + assert target_sat.cli.User.with_user().list() + target_sat.cli.AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(non_admin_user['login']) in result[0]['message'] - assert User.with_user().list() + assert target_sat.cli.User.with_user().list() @pytest.mark.tier1 @@ -206,7 +194,7 @@ def test_positive_session_survives_unauthenticated_call(admin_user, target_sat): :id: 8bc304a0-70ea-489c-9c3f-ea8343c5284c - :Steps: + :steps: 1. Set use_sessions 2. Authenticate, assert credentials are not demanded @@ -216,20 +204,19 @@ def test_positive_session_survives_unauthenticated_call(admin_user, target_sat): :CaseImportance: Medium :expectedresults: The session is unchanged - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - Org.with_user().list() + target_sat.cli.Org.with_user().list() result = target_sat.execute('hammer ping') assert result.status == 0, 'Failed to run hammer ping' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -240,7 +227,7 @@ def test_positive_session_survives_failed_login(admin_user, non_admin_user, targ :BZ: 1465552 - :Steps: + :steps: 1. Set use_sessions 2. Authenticate, assert credentials are not demanded @@ -248,21 +235,22 @@ def test_positive_session_survives_failed_login(admin_user, non_admin_user, targ 3. Run login with invalid credentials :expectedresults: The session is unchanged - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() # using invalid password with pytest.raises(CLIReturnCodeError): - AuthLogin.basic({'username': non_admin_user['login'], 'password': gen_string('alpha')}) + target_sat.cli.AuthLogin.basic( + {'username': non_admin_user['login'], 'password': gen_string('alpha')} + ) # checking the session status again - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.e2e @@ -277,7 +265,7 @@ def test_positive_session_preceeds_saved_credentials(admin_user, target_sat): :CaseImportance: High - :Steps: + :steps: 1. Set use_sessions, set username and password, set short expiration time @@ -287,23 +275,22 @@ def test_positive_session_preceeds_saved_credentials(admin_user, target_sat): :expectedresults: Session expires after specified time and saved credentials are not applied - """ try: - idle_timeout = Settings.list({'search': 'name=idle_timeout'})[0]['value'] - Settings.set({'name': 'idle_timeout', 'value': 1}) + idle_timeout = target_sat.cli.Settings.list({'search': 'name=idle_timeout'})[0]['value'] + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': 1}) result = configure_sessions(satellite=target_sat, add_default_creds=True) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials sleep(70) with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() finally: # reset timeout to default - Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) @pytest.mark.tier1 @@ -318,10 +305,10 @@ def test_negative_no_credentials(target_sat): """ result = configure_sessions(satellite=target_sat, enable=False) assert result == 0, 'Failed to configure hammer sessions' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -333,13 +320,14 @@ def test_negative_no_permissions(admin_user, non_admin_user, target_sat): :expectedresults: Command is not executed :CaseImportance: High - """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(non_admin_user['login']) in result[0]['message'] # try to update user from viewer's session with pytest.raises(CLIReturnCodeError): - User.with_user().update({'login': admin_user['login'], 'new-login': gen_string('alpha')}) + target_sat.cli.User.with_user().update( + {'login': admin_user['login'], 'new-login': gen_string('alpha')} + ) diff --git a/tests/foreman/cli/test_bootdisk.py b/tests/foreman/cli/test_bootdisk.py new file mode 100644 index 00000000000..e6515e1af9f --- /dev/null +++ b/tests/foreman/cli/test_bootdisk.py @@ -0,0 +1,86 @@ +"""Tests for BootdiskPlugin + +:Requirement: Bootdisk + +:CaseAutomation: Automated + +:CaseComponent: BootdiskPlugin + +:Team: Rocket + +:CaseImportance: High + +""" +from fauxfactory import gen_mac, gen_string +import pytest + +from robottelo.config import settings +from robottelo.constants import HTTPS_MEDIUM_URL + + +@pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) +def test_positive_bootdisk_download_https( + module_location, + module_sync_kickstart_content, + module_provisioning_capsule, + module_target_sat, + module_sca_manifest_org, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, +): + """Verify bootdisk is able to download using https url media . + + :id: ebced3cf-99e8-4ed6-a41d-d53789e90f8e + + :steps: + 1. Create a host with https url media + 2. Download the full host bootdisk + 3. Check if bootdisk is downloaded properly + + :expectedresults: Full host bootdisk is downloaded properly + + :BZ: 2173199 + + :customerscenario: true + + :parametrized: yes + """ + + macaddress = gen_mac(multicast=False) + capsule = module_target_sat.nailgun_smart_proxy + # create medium with https url + media = module_target_sat.cli_factory.make_medium( + { + 'name': gen_string('alpha'), + 'operatingsystem-ids': module_sync_kickstart_content.os.id, + 'location-ids': module_provisioning_capsule.id, + 'organization-ids': module_sca_manifest_org.id, + 'path': HTTPS_MEDIUM_URL, + 'os-family': 'Redhat', + } + ) + host = module_target_sat.cli_factory.make_host( + { + 'organization-id': module_sca_manifest_org.id, + 'location-id': module_location.id, + 'name': gen_string('alpha').lower(), + 'mac': macaddress, + 'operatingsystem-id': module_sync_kickstart_content.os.id, + 'medium-id': media['id'], + 'architecture-id': default_architecture.id, + 'domain-id': module_sync_kickstart_content.domain.id, + 'build': 'true', + 'root-password': settings.provisioning.host_root_password, + 'partition-table-id': default_partitiontable.id, + 'content-source-id': capsule.id, + 'content-view-id': module_default_org_view.id, + 'lifecycle-environment-id': module_lce_library.id, + } + ) + # Check if full-host bootdisk can be downloaded. + bootdisk = module_target_sat.cli.Bootdisk.host({'host-id': host['id'], 'full': 'true'}) + assert 'Successfully downloaded host disk image' in bootdisk['message'] + module_target_sat.api.Host(id=host.id).delete() + module_target_sat.api.Media(id=media['id']).delete() diff --git a/tests/foreman/cli/test_bootstrap_script.py b/tests/foreman/cli/test_bootstrap_script.py index e3b910a0a87..f4b71c24bd1 100644 --- a/tests/foreman/cli/test_bootstrap_script.py +++ b/tests/foreman/cli/test_bootstrap_script.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Bootstrap :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -35,7 +30,7 @@ def test_positive_register( :id: e34561fd-e0d6-4587-84eb-f86bd131aab1 - :Steps: + :steps: 1. Ensure system is not registered 2. Register a system diff --git a/tests/foreman/cli/test_capsule.py b/tests/foreman/cli/test_capsule.py index 30c3efcba7a..ddcc100326a 100644 --- a/tests/foreman/cli/test_capsule.py +++ b/tests/foreman/cli/test_capsule.py @@ -1,392 +1,59 @@ """Test class for the capsule CLI. -:Requirement: Capsule +:Requirement: ForemanProxy :CaseAutomation: Automated -:CaseLevel: Component +:CaseComponent: ForemanProxy -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional +:Team: Platform :CaseImportance: Critical -:Upstream: No """ import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.proxy import Proxy -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.issue_handlers import is_open - pytestmark = [pytest.mark.run_in_one_thread] -def _make_proxy(request, target_sat, options=None): - """Create a Proxy and add the finalizer""" - proxy = target_sat.cli_factory.make_proxy(options) - - @request.addfinalizer - def _cleanup(): - target_sat.cli.Proxy.delete({'id': proxy['id']}) - - return proxy - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_negative_create_with_url(target_sat): - """Proxy creation with random URL - - :id: 9050b362-c710-43ba-9d77-7680b8f9ed8c - - :expectedresults: Proxy is not created - - :CaseLevel: Component - - """ - # Create a random proxy - with pytest.raises(CLIFactoryError, match='Could not create the proxy:'): - target_sat.cli_factory.make_proxy( - { - 'url': f"http://{gen_string('alpha', 6)}:{gen_string('numeric', 4)}", - } - ) - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -@pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_name(request, target_sat, name): - """Proxy creation with the home proxy - - :id: 7decd7a3-2d35-43ff-9a20-de44e83c7389 - - :expectedresults: Proxy is created - - :CaseLevel: Component - - :Parametrized: Yes - - :BZ: 1398695, 2084661 - """ - if is_open('BZ:2084661') and 'html' in request.node.name: - pytest.skip() - proxy = _make_proxy(request, target_sat, options={'name': name}) - assert proxy['name'] == name - - @pytest.mark.skip_if_not_set('fake_capsules') @pytest.mark.tier1 -@pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete_by_id(request, name, target_sat): - """Proxy deletion with the home proxy - - :id: 1b6973b1-259d-4866-b36f-c2d5fb154035 - - :expectedresults: Proxy is deleted - - :CaseLevel: Component - - :Parametrized: Yes - - :BZ: 1398695, 2084661 - """ - if is_open('BZ:2084661') and 'html' in request.node.name: - pytest.skip() - proxy = target_sat.cli_factory.make_proxy({'name': name}) - Proxy.delete({'id': proxy['id']}) - with pytest.raises(CLIReturnCodeError): - Proxy.info({'id': proxy['id']}) - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_update_name(request, target_sat): - """Proxy name update with the home proxy - - :id: 1a02a06b-e9ab-4b9b-bcb0-ac7060188316 - - :expectedresults: Proxy has the name updated - - :CaseLevel: Component - - :BZ: 1398695, 2084661 - """ - proxy = _make_proxy(request, target_sat, options={'name': gen_alphanumeric()}) - valid_data = valid_data_list() - if is_open('BZ:2084661') and 'html' in request.node.name: - del valid_data['html'] - for new_name in valid_data.values(): - newport = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, newport) as url: - Proxy.update({'id': proxy['id'], 'name': new_name, 'url': url}) - proxy = Proxy.info({'id': proxy['id']}) - assert proxy['name'] == new_name - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier2 -def test_positive_refresh_features_by_id(request, target_sat): - """Refresh smart proxy features, search for proxy by id - - :id: d3db63ce-b877-40eb-a863-294c12489ddd - - :expectedresults: Proxy features are refreshed - - :CaseLevel: Integration - - :CaseImportance: High - - """ - # Since we want to run multiple commands against our fake capsule, we - # need the tunnel kept open in order not to allow different concurrent - # test to claim it. Thus we want to manage the tunnel manually. - - # get an available port for our fake capsule - port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, port) as url: - proxy = _make_proxy(request, target_sat, options={'url': url}) - Proxy.refresh_features({'id': proxy['id']}) - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier2 -def test_positive_refresh_features_by_name(request, target_sat): - """Refresh smart proxy features, search for proxy by name - - :id: 2ddd0097-8f65-430e-963d-a3b5dcffe86b - - :expectedresults: Proxy features are refreshed - - :CaseLevel: Integration - - :CaseImportance: High - - """ - # Since we want to run multiple commands against our fake capsule, we - # need the tunnel kept open in order not to allow different concurrent - # test to claim it. Thus we want to manage the tunnel manually. - - # get an available port for our fake capsule - port = target_sat.available_capsule_port - with target_sat.default_url_on_new_port(9090, port) as url: - proxy = _make_proxy(request, target_sat, options={'url': url}) - Proxy.refresh_features({'id': proxy['name']}) - - -@pytest.mark.skip_if_not_set('fake_capsules') -@pytest.mark.tier1 -def test_positive_import_puppet_classes(session_puppet_enabled_sat, puppet_proxy_port_range): +def test_positive_import_puppet_classes(session_puppet_enabled_sat): """Import puppet classes from proxy :id: 42e3a9c0-62e1-4049-9667-f3c0cdfe0b04 :expectedresults: Puppet classes are imported from proxy - - :CaseLevel: Component - """ with session_puppet_enabled_sat as puppet_sat: port = puppet_sat.available_capsule_port with puppet_sat.default_url_on_new_port(9090, port) as url: proxy = puppet_sat.cli_factory.make_proxy({'url': url}) - Proxy.import_classes({'id': proxy['id']}) - Proxy.delete({'id': proxy['id']}) - - -@pytest.mark.stubbed -def test_positive_provision(): - """User can provision through a capsule - - :id: 1b91e6ed-56bb-4a21-9b69-8b41242458c5 - - :Setup: Some valid, functional compute resource (perhaps one variation - of this case for each supported compute resource type). Also, - functioning capsule with proxy is required. - - :Steps: - - 1. Attempt to route provisioning content through capsule that is - using a proxy - 2. Attempt to provision instance - - :expectedresults: Instance can be provisioned, with content coming - through proxy-enabled capsule. - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_register(): - """User can register system through proxy-enabled capsule - - :id: dc544ec8-0320-4897-a6ca-ce9ebad27975 - - :Steps: attempt to register a system trhough a proxy-enabled capsule - - :expectedresults: system is successfully registered - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_unregister(): - """User can unregister system through proxy-enabled capsule - - :id: 9b7714da-74be-4c0a-9209-9d15c2c98eaa - - :Steps: attempt to unregister a system through a proxy-enabled capsule - - :expectedresults: system is successfully unregistered - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_subscribe(): - """User can subscribe system to content through proxy-enabled - capsule - - :id: 091bba73-bc78-4b8c-ac27-5c10e9838cfb - - :Setup: Content source types configured/synced for [RH, Custom, Puppet, - Docker] etc. - - :Steps: attempt to subscribe a system to a content type variation, via - a proxy-enabled capsule - - :expectedresults: system is successfully subscribed to each content - type - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_consume_content(): - """User can consume content on system, from a content source, - through proxy-enabled capsule - - :id: a3fb9879-7799-4743-99a8-963701e687c1 - - :Setup: Content source types configured/synced for [RH, Custom, Puppet, - Docker] etc. - - :Steps: - - 1. attempt to subscribe a system to a content type variation, via a - proxy-enabled capsule - 2. Attempt to install content RPMs via - proxy-enabled capsule - - :expectedresults: system successfully consume content - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_unsubscribe(): - """User can unsubscribe system from content through - proxy-enabled capsule - - :id: 0d34713d-3d60-4e5a-ada6-9a24aa865cb4 - - :Setup: Content source types configured/synced for [RH, Custom, Puppet] - etc. - - :Steps: - - 1. attempt to subscribe a system to a content type variation, via a - proxy-enabled capsule - 2. attempt to unsubscribe a system from said content type(s) via a - proxy-enabled capsule - - :expectedresults: system is successfully unsubscribed from each content - type - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_reregister_with_capsule_cert(): - """system can register via capsule using cert provided by - the capsule itself. - - :id: 785b94ea-ffbf-4c18-8160-f705e3d7cbe6 - - :Setup: functional capsule and certs rpm installed on target client. - - :Steps: - - 1. Attempt to register from parent satellite; unregister and remove - cert rpm - 2. Attempt to reregister using same credentials and certs from a - functional capsule. - - :expectedresults: Registration works , and certs RPM installed from - capsule. - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -def test_positive_ssl_capsule(): - """Assure SSL functionality for capsules - - :id: 4d19bee6-15d4-4fd5-b3de-9144608cdba7 - - :Setup: A capsule installed with SSL enabled. - - :Steps: Execute basic steps from above (register, subscribe, consume, - unsubscribe, unregister) while connected to a capsule that is - SSL-enabled - - :expectedresults: No failures executing said test scenarios against - SSL, baseline functionality identical to non-SSL - - :CaseAutomation: NotAutomated - """ + puppet_sat.cli.Proxy.import_classes({'id': proxy['id']}) + puppet_sat.cli.Proxy.delete({'id': proxy['id']}) @pytest.mark.stubbed -def test_positive_enable_bmc(): - """Enable BMC feature on smart-proxy +@pytest.mark.e2e +@pytest.mark.upgrade +def test_positive_capsule_content(): + """Registered and provisioned hosts can consume content from capsule - :id: 9cc4db2f-3bec-4e51-89a2-18a0a6167012 + :id: 7e5493de-b27b-4adc-ba18-4dc2e94e7305 - :Setup: A capsule installed with SSL enabled. + :Setup: Capsule with some content synced - :Steps: + :steps: - 1. Enable BMC feature on proxy by running installer with: - ``katello-installer --foreman-proxy-bmc 'true'`` - 2. Please make sure to check default values to other BMC options. - Should be like below: ``--foreman-proxy-bmc-default-provider - BMC default provider. (default: "ipmitool")`` - ``--foreman-proxy-bmc-listen-on BMC proxy to listen on https, - http, or both (default: "https")`` - 3. Check if BMC plugin is enabled with: ``#cat - /etc/foreman-proxy/settings.d/bmc.yml | grep enabled`` - 4. Restart foreman-proxy service + 1. Register a host to the capsule + 2. Sync content from capsule to the host + 3. Unregister host from the capsule + 4. Provision host via capsule + 5. Provisioned host syncs content from capsule - :expectedresults: Katello installer should show the options to enable - BMC + :expectedresults: Hosts can be successfully registered to the capsule, + consume content and unregister afterwards. Hosts can be successfully + provisioned via capsule and consume content as well. :CaseAutomation: NotAutomated """ diff --git a/tests/foreman/cli/test_capsule_installer.py b/tests/foreman/cli/test_capsule_installer.py deleted file mode 100644 index 7d619ece0c2..00000000000 --- a/tests/foreman/cli/test_capsule_installer.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Test for capsule installer CLI - -:Requirement: Capsule Installer - -:CaseAutomation: Automated - -:CaseLevel: System - -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional - -:CaseImportance: High - -:Upstream: No -""" -import pytest - - -@pytest.mark.stubbed -def test_positive_basic(): - """perform a basic install of capsule. - - :id: 47445685-5924-4980-89d0-bbb2fb608f4d - - :Steps: - - 1. Assure your target capsule has ONLY the Capsule repo enabled. In - other words, the Satellite repo itself is not enabled by - default. - 2. attempt to perform a basic, functional install the capsule using - `capsule-installer`. - - :expectedresults: product is installed - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_positive_option_qpid_router(): - """assure the --qpid-router flag can be used in - capsule-installer to enable katello-agent functionality via - remote clients - - :id: d040a72d-72b2-41cf-b14e-a8e37e80200d - - :Steps: Install capsule-installer with the '--qpid-router=true` flag - - :expectedresults: Capsule installs correctly and qpid functionality is - enabled. - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_positive_option_reverse_proxy(): - """assure the --reverse-proxy flag can be used in - capsule-installer to enable katello-agent functionality via - remote clients - - :id: 756fd76a-0183-4637-93c8-fe7c375be751 - - :Steps: Install using the '--reverse-proxy=true' flag - - :expectedresults: Capsule installs correctly and functionality is - enabled. - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_negative_invalid_parameters(): - """invalid (non-boolean) parameters cannot be passed to flag - - :id: f4366c87-e436-42b4-ada4-55f0e66a481e - - :Steps: attempt to provide a variety of invalid parameters to installer - (strings, numerics, whitespace, etc.) - - :expectedresults: user is told that such parameters are invalid and - install aborts. - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_negative_option_parent_reverse_proxy_port(): - """invalid (non-integer) parameters cannot be passed to flag - - :id: a1af16d3-84da-4e94-818e-90bc82cc5698 - - :Setup: na - - :Steps: attempt to provide a variety of invalid parameters to - --parent-reverse-proxy-port flag (strings, numerics, whitespace, - etc.) - - :expectedresults: user told parameters are invalid; install aborts. - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_positive_option_parent_reverse_proxy(): - """valid parameters can be passed to --parent-reverse-proxy - (true) - - :id: a905f4ca-a729-4efb-84fc-43923737f75b - - :Setup: note that this requires an accompanying, valid port value - - :Steps: Attempt to provide a value of "true" to --parent-reverse-proxy - - :expectedresults: Install commences/completes with proxy installed - correctly. - - :CaseAutomation: NotAutomated - - """ - - -@pytest.mark.stubbed -def test_positive_option_parent_reverse_proxy_port(): - """valid parameters can be passed to - --parent-reverse-proxy-port (integer) - - :id: 32238045-53e2-4ed4-ac86-57917e7aedcd - - :Setup: note that this requires an accompanying, valid host for proxy - parameter - - :Steps: Attempt to provide a valid proxy port # to flag - - :expectedresults: Install commences and completes with proxy installed - correctly. - - :CaseAutomation: NotAutomated - - """ diff --git a/tests/foreman/cli/test_classparameters.py b/tests/foreman/cli/test_classparameters.py index f48019c4963..022ceed0aac 100644 --- a/tests/foreman/cli/test_classparameters.py +++ b/tests/foreman/cli/test_classparameters.py @@ -4,22 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Puppet :CaseImportance: Medium :Team: Rocket -:TestType: Functional - -:Upstream: No """ import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import gen_string @@ -71,6 +66,7 @@ class TestSmartClassParameters: @pytest.mark.e2e def test_positive_list( self, + request, session_puppet_enabled_sat, module_puppet_org, module_puppet_loc, @@ -90,8 +86,9 @@ def test_positive_list( location=module_puppet_loc.id, environment=module_puppet['env'].name, ).create() + request.addfinalizer(host.delete) host.add_puppetclass(data={'puppetclass_id': module_puppet['class']['id']}) - hostgroup = session_puppet_enabled_sat.cli_factory.make_hostgroup( + hostgroup = session_puppet_enabled_sat.cli_factory.hostgroup( { 'puppet-environment-id': module_puppet['env'].id, 'puppet-class-ids': module_puppet['class']['id'], @@ -149,9 +146,7 @@ def test_positive_list_with_non_admin_user(self, session_puppet_enabled_sat, mod ] }, } - user = session_puppet_enabled_sat.cli_factory.make_user( - {'admin': '0', 'password': password} - ) + user = session_puppet_enabled_sat.cli_factory.user({'admin': '0', 'password': password}) role = session_puppet_enabled_sat.cli_factory.make_role() session_puppet_enabled_sat.cli_factory.add_role_permissions( role['id'], required_user_permissions diff --git a/tests/foreman/cli/test_computeresource_azurerm.py b/tests/foreman/cli/test_computeresource_azurerm.py index a25ad7761b8..1756a0ed674 100644 --- a/tests/foreman/cli/test_computeresource_azurerm.py +++ b/tests/foreman/cli/test_computeresource_azurerm.py @@ -4,30 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ComputeResources-Azure :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.host import Host from robottelo.config import settings -from robottelo.constants import AZURERM_FILE_URI -from robottelo.constants import AZURERM_PLATFORM_DEFAULT -from robottelo.constants import AZURERM_PREMIUM_OS_Disk -from robottelo.constants import AZURERM_RHEL7_FT_CUSTOM_IMG_URN -from robottelo.constants import AZURERM_RHEL7_UD_IMG_URN -from robottelo.constants import AZURERM_VM_SIZE_DEFAULT +from robottelo.constants import ( + AZURERM_FILE_URI, + AZURERM_PLATFORM_DEFAULT, + AZURERM_RHEL7_FT_CUSTOM_IMG_URN, + AZURERM_RHEL7_UD_IMG_URN, + AZURERM_VM_SIZE_DEFAULT, + AZURERM_PREMIUM_OS_Disk, +) @pytest.fixture(scope='class') @@ -42,7 +37,7 @@ def azurerm_hostgroup( ): """Sets Hostgroup for AzureRm Host Provisioning""" - hgroup = sat_azure.api.HostGroup( + return sat_azure.api.HostGroup( architecture=sat_azure_default_architecture, compute_resource=module_azurerm_cr, domain=sat_azure_domain, @@ -51,7 +46,6 @@ def azurerm_hostgroup( operatingsystem=sat_azure_default_os, organization=[sat_azure_org], ).create() - return hgroup class TestAzureRMComputeResourceTestCase: @@ -73,7 +67,6 @@ def test_positive_crud_azurerm_cr( :CaseImportance: Critical - :CaseLevel: Component """ # Create CR cr_name = gen_string('alpha') @@ -115,7 +108,7 @@ def test_positive_crud_azurerm_cr( # Delete CR sat_azure.cli.ComputeResource.delete({'name': result['name']}) - assert not ComputeResource.exists(search=('name', result['name'])) + assert not sat_azure.cli.ComputeResource.exists(search=('name', result['name'])) @pytest.mark.upgrade @pytest.mark.tier2 @@ -156,7 +149,6 @@ def test_positive_image_crud( :CaseImportance: Critical - :CaseLevel: Integration """ # Create @@ -230,8 +222,6 @@ def test_positive_check_available_networks(self, sat_azure, azurermclient, modul :expectedresults: All the networks from AzureRM CR should be available. - :CaseLevel: Integration - :BZ: 1850934 """ @@ -255,7 +245,6 @@ def test_positive_create_compute_profile_values( :expectedresults: Compute-profile values should be create with AzureRm CR - :CaseLevel: Integration """ username = gen_string('alpha') password = gen_string('alpha') @@ -354,29 +343,30 @@ def class_host_ft( Provisions the host on AzureRM using Finish template Later in tests this host will be used to perform assertions """ - with sat_azure.hammer_api_timeout(): - with sat_azure.skip_yum_update_during_provisioning(template='Kickstart default finish'): - host = sat_azure.cli.Host.create( - { - 'name': self.hostname, - 'compute-resource': module_azurerm_cr.name, - 'compute-attributes': self.compute_attrs, - 'interface': self.interfaces_attributes, - 'location-id': sat_azure_loc.id, - 'organization-id': sat_azure_org.id, - 'domain-id': sat_azure_domain.id, - 'domain': sat_azure_domain.name, - 'architecture-id': sat_azure_default_architecture.id, - 'operatingsystem-id': sat_azure_default_os.id, - 'root-password': gen_string('alpha'), - 'image': module_azurerm_custom_finishimg.name, - }, - timeout=1800000, - ) - yield host - with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): - if Host.exists(search=('name', host['name'])): - Host.delete({'name': self.fullhostname}, timeout=1800000) + with sat_azure.hammer_api_timeout(), sat_azure.skip_yum_update_during_provisioning( + template='Kickstart default finish' + ): + host = sat_azure.cli.Host.create( + { + 'name': self.hostname, + 'compute-resource': module_azurerm_cr.name, + 'compute-attributes': self.compute_attrs, + 'interface': self.interfaces_attributes, + 'location-id': sat_azure_loc.id, + 'organization-id': sat_azure_org.id, + 'domain-id': sat_azure_domain.id, + 'domain': sat_azure_domain.name, + 'architecture-id': sat_azure_default_architecture.id, + 'operatingsystem-id': sat_azure_default_os.id, + 'root-password': gen_string('alpha'), + 'image': module_azurerm_custom_finishimg.name, + }, + timeout=1800000, + ) + yield host + with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): + if sat_azure.cli.Host.exists(search=('name', host['name'])): + sat_azure.cli.Host.delete({'name': self.fullhostname}, timeout=1800000) @pytest.fixture(scope='class') def azureclient_host(self, azurermclient, class_host_ft): @@ -398,9 +388,7 @@ def test_positive_azurerm_host_provisioned( :id: 9e8242e5-3ef3-4884-a200-7ba79b8ef49f - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a AzureRM Compute Resource and provision host. @@ -484,27 +472,26 @@ def class_host_ud( Provisions the host on AzureRM using UserData template Later in tests this host will be used to perform assertions """ - with sat_azure.hammer_api_timeout(): - with sat_azure.skip_yum_update_during_provisioning( - template='Kickstart default user data' - ): - host = sat_azure.cli.Host.create( - { - 'name': self.hostname, - 'compute-attributes': self.compute_attrs, - 'interface': self.interfaces_attributes, - 'image': module_azurerm_cloudimg.name, - 'hostgroup': azurerm_hostgroup.name, - 'location': sat_azure_loc.name, - 'organization': sat_azure_org.name, - 'operatingsystem-id': sat_azure_default_os.id, - }, - timeout=1800000, - ) - yield host - with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): - if Host.exists(search=('name', host['name'])): - Host.delete({'name': self.fullhostname}, timeout=1800000) + with sat_azure.hammer_api_timeout(), sat_azure.skip_yum_update_during_provisioning( + template='Kickstart default user data' + ): + host = sat_azure.cli.Host.create( + { + 'name': self.hostname, + 'compute-attributes': self.compute_attrs, + 'interface': self.interfaces_attributes, + 'image': module_azurerm_cloudimg.name, + 'hostgroup': azurerm_hostgroup.name, + 'location': sat_azure_loc.name, + 'organization': sat_azure_org.name, + 'operatingsystem-id': sat_azure_default_os.id, + }, + timeout=1800000, + ) + yield host + with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): + if sat_azure.cli.Host.exists(search=('name', host['name'])): + sat_azure.cli.Host.delete({'name': self.fullhostname}, timeout=1800000) @pytest.fixture(scope='class') def azureclient_host(self, azurermclient, class_host_ud): @@ -528,9 +515,7 @@ def test_positive_azurerm_host_provisioned( :id: c99d2679-1742-4ef3-9288-2961d18a30e7 - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a AzureRM Compute Resource and provision host. diff --git a/tests/foreman/cli/test_computeresource_docker.py b/tests/foreman/cli/test_computeresource_docker.py deleted file mode 100644 index d6c38b3c9a4..00000000000 --- a/tests/foreman/cli/test_computeresource_docker.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tests for the Docker Compute Resource feature - -:Requirement: Provisioning - -:CaseAutomation: Automated - -:CaseLevel: Component - -:TestType: Functional - -:Team: Rocket - -:CaseComponent: Provisioning - -:CaseImportance: High - -:Upstream: No -""" -import pytest -from fauxfactory import gen_string - -from robottelo.cli.factory import make_product_wait -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import Repository -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import REPO_TYPE - - -@pytest.mark.skip_if_not_set('docker') -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_positive_upload_image(module_org, target_sat, container_contenthost): - """A Docker-enabled client can create a new ``Dockerfile`` - pointing to an existing Docker image from a Satellite 6 and modify it. - Then, using ``docker build`` generate a new image which can then be - uploaded back onto the Satellite 6 as a new repository. - - :id: 2c47559c-b27f-436e-9b1e-df5c3633b007 - - :Steps: - - 1. Create a local docker compute resource - 2. Create a container and start it - 3. [on docker host] Commit a new image from the container - 4. [on docker host] Export the image to tar - 5. scp the image to satellite box - 6. create a new docker repo - 7. upload the image to the new repo - - :expectedresults: Client can create a new image based off an existing - Docker image from a Satellite instance, add a new package and - upload the modified image (plus layer) back to the Satellite. - - :parametrized: yes - """ - try: - """ - These functions were removed, but let's leave them here - to maintain overall test logic - in case required functionality - is eventually implemented - - compute_resource = make_compute_resource({ - 'organization-ids': [module_org.id], - 'provider': 'Docker', - 'url': f'http://{container_contenthost.ip_addr}:2375', - }) - container = make_container({ - 'compute-resource-id': compute_resource['id'], - 'organization-ids': [module_org.id], - }) - Docker.container.start({'id': container['id']}) - """ - container = {'uuid': 'stubbed test'} - repo_name = gen_string('alphanumeric').lower() - - # Commit a new docker image and verify image was created - image_name = f'{repo_name}/{CONTAINER_UPSTREAM_NAME}' - result = container_contenthost.execute( - f'docker commit {container["uuid"]} {image_name}:latest && ' - f'docker images --all | grep {image_name}' - ) - assert result.status == 0 - - # Save the image to a tar archive - result = container_contenthost.execute(f'docker save -o {repo_name}.tar {image_name}') - assert result.status == 0 - - tar_file = f'{repo_name}.tar' - container_contenthost.get(remote_path=tar_file) - target_sat.put( - local_path=tar_file, - remote_path=f'/tmp/{tar_file}', - ) - - # Upload tarred repository - product = make_product_wait({'organization-id': module_org.id}) - repo = make_repository( - { - 'content-type': REPO_TYPE['docker'], - 'docker-upstream-name': CONTAINER_UPSTREAM_NAME, - 'name': gen_string('alpha', 5), - 'product-id': product['id'], - 'url': CONTAINER_REGISTRY_HUB, - } - ) - Repository.upload_content({'id': repo['id'], 'path': f'/tmp/{tar_file}'}) - - # Verify repository was uploaded successfully - repo = Repository.info({'id': repo['id']}) - assert target_sat.hostname == repo['published-at'] - - repo_name = '-'.join((module_org.label, product['label'], repo['label'])).lower() - assert repo_name in repo['published-at'] - finally: - # Remove the archive - target_sat.execute(f'rm -f /tmp/{tar_file}') diff --git a/tests/foreman/cli/test_computeresource_ec2.py b/tests/foreman/cli/test_computeresource_ec2.py index 4bbbb5cd77e..e4e1e669e6b 100644 --- a/tests/foreman/cli/test_computeresource_ec2.py +++ b/tests/foreman/cli/test_computeresource_ec2.py @@ -1,36 +1,26 @@ """ :Requirement: Computeresource EC2 -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-EC2 :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.cli.org import Org from robottelo.config import settings -from robottelo.constants import EC2_REGION_CA_CENTRAL_1 -from robottelo.constants import FOREMAN_PROVIDERS +from robottelo.constants import EC2_REGION_CA_CENTRAL_1, FOREMAN_PROVIDERS @pytest.fixture(scope='module') -def aws(): +def aws(module_target_sat): aws = type('rhev', (object,), {})() - aws.org = make_org() - aws.loc = make_location() - Org.add_location({'id': aws.org['id'], 'location-id': aws.loc['id']}) + aws.org = module_target_sat.cli_factory.make_org() + aws.loc = module_target_sat.cli_factory.make_location() + module_target_sat.cli.Org.add_location({'id': aws.org['id'], 'location-id': aws.loc['id']}) aws.aws_access_key = settings.ec2.access_key aws.aws_secret_key = settings.ec2.secret_key aws.aws_region = settings.ec2.region @@ -44,7 +34,7 @@ def aws(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_create_ec2_with_custom_region(aws): +def test_positive_create_ec2_with_custom_region(aws, module_target_sat): """Create a new ec2 compute resource with custom region :id: 28eb592d-ebf0-4659-900a-87112b3b2ad7 @@ -58,12 +48,10 @@ def test_positive_create_ec2_with_custom_region(aws): :CaseAutomation: Automated :CaseImportance: Critical - - :CaseLevel: Component """ cr_name = gen_string(str_type='alpha') cr_description = gen_string(str_type='alpha') - cr = make_compute_resource( + cr = module_target_sat.cli_factory.compute_resource( { 'name': cr_name, 'description': cr_description, diff --git a/tests/foreman/cli/test_computeresource_libvirt.py b/tests/foreman/cli/test_computeresource_libvirt.py index 7147844bfb9..2a4720750a0 100644 --- a/tests/foreman/cli/test_computeresource_libvirt.py +++ b/tests/foreman/cli/test_computeresource_libvirt.py @@ -21,32 +21,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-libvirt :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random +from fauxfactory import gen_string, gen_url import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url +from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.factory import make_location from robottelo.config import settings -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import LIBVIRT_RESOURCE_URL +from robottelo.constants import FOREMAN_PROVIDERS, LIBVIRT_RESOURCE_URL +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import parametrized +from robottelo.utils.issue_handlers import is_open + +LIBVIRT_URL = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname def valid_name_desc_data(): @@ -107,13 +101,12 @@ def invalid_update_data(): @pytest.fixture(scope="module") -@pytest.mark.skip_if_not_set('libvirt') def libvirt_url(): return LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname @pytest.mark.tier1 -def test_positive_create_with_name(libvirt_url): +def test_positive_create_with_name(libvirt_url, module_target_sat): """Create Compute Resource :id: 6460bcc7-d7f7-406a-aecb-b3d54d51e697 @@ -121,10 +114,8 @@ def test_positive_create_with_name(libvirt_url): :expectedresults: Compute resource is created :CaseImportance: Critical - - :CaseLevel: Component """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': f'cr {gen_string("alpha")}', 'provider': 'Libvirt', @@ -134,7 +125,7 @@ def test_positive_create_with_name(libvirt_url): @pytest.mark.tier1 -def test_positive_info(libvirt_url): +def test_positive_info(libvirt_url, module_target_sat): """Test Compute Resource Info :id: f54af041-4471-4d8e-9429-45d821df0440 @@ -142,11 +133,9 @@ def test_positive_info(libvirt_url): :expectedresults: Compute resource Info is displayed :CaseImportance: Critical - - :CaseLevel: Component """ name = gen_string('utf8') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.compute_resource( { 'name': name, 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -158,7 +147,7 @@ def test_positive_info(libvirt_url): @pytest.mark.tier1 -def test_positive_list(libvirt_url): +def test_positive_list(libvirt_url, module_target_sat): """Test Compute Resource List :id: 11123361-ffbc-4c59-a0df-a4af3408af7a @@ -166,20 +155,22 @@ def test_positive_list(libvirt_url): :expectedresults: Compute resource List is displayed :CaseImportance: Critical - - :CaseLevel: Component """ - comp_res = make_compute_resource({'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url}) + comp_res = module_target_sat.cli_factory.compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) assert comp_res['name'] - result_list = ComputeResource.list({'search': 'name=%s' % comp_res['name']}) + result_list = module_target_sat.cli.ComputeResource.list( + {'search': 'name=%s' % comp_res['name']} + ) assert len(result_list) > 0 - result = ComputeResource.exists(search=('name', comp_res['name'])) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert result @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_by_name(libvirt_url): +def test_positive_delete_by_name(libvirt_url, module_target_sat): """Test Compute Resource delete :id: 7fcc0b66-f1c1-4194-8a4b-7f04b1dd439a @@ -187,13 +178,13 @@ def test_positive_delete_by_name(libvirt_url): :expectedresults: Compute resource deleted :CaseImportance: Critical - - :CaseLevel: Component """ - comp_res = make_compute_resource({'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url}) + comp_res = module_target_sat.cli_factory.compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) assert comp_res['name'] - ComputeResource.delete({'name': comp_res['name']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'name': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert len(result) == 0 @@ -201,7 +192,7 @@ def test_positive_delete_by_name(libvirt_url): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('options', **parametrized(valid_name_desc_data())) -def test_positive_create_with_libvirt(libvirt_url, options): +def test_positive_create_with_libvirt(libvirt_url, options, target_sat): """Test Compute Resource create :id: adc6f4f8-6420-4044-89d1-c69e0bfeeab9 @@ -210,11 +201,9 @@ def test_positive_create_with_libvirt(libvirt_url, options): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - ComputeResource.create( + target_sat.cli.ComputeResource.create( { 'description': options['description'], 'name': options['name'], @@ -225,7 +214,7 @@ def test_positive_create_with_libvirt(libvirt_url, options): @pytest.mark.tier2 -def test_positive_create_with_loc(libvirt_url): +def test_positive_create_with_loc(libvirt_url, module_target_sat): """Create Compute Resource with location :id: 224c7cbc-6bac-4a94-8141-d6249896f5a2 @@ -233,17 +222,21 @@ def test_positive_create_with_loc(libvirt_url): :expectedresults: Compute resource is created and has location assigned :CaseImportance: High - - :CaseLevel: Integration """ - location = make_location() - comp_resource = make_compute_resource({'location-ids': location['id']}) + location = module_target_sat.cli_factory.make_location() + comp_resource = module_target_sat.cli_factory.compute_resource( + { + 'location-ids': location['id'], + 'provider': FOREMAN_PROVIDERS['libvirt'], + 'url': libvirt_url, + } + ) assert len(comp_resource['locations']) == 1 assert comp_resource['locations'][0] == location['name'] @pytest.mark.tier2 -def test_positive_create_with_locs(libvirt_url): +def test_positive_create_with_locs(libvirt_url, module_target_sat): """Create Compute Resource with multiple locations :id: f665c586-39bf-480a-a0fc-81d9e1eb7c54 @@ -252,13 +245,15 @@ def test_positive_create_with_locs(libvirt_url): locations assigned :CaseImportance: High - - :CaseLevel: Integration """ locations_amount = random.randint(3, 5) - locations = [make_location() for _ in range(locations_amount)] - comp_resource = make_compute_resource( - {'location-ids': [location['id'] for location in locations]} + locations = [module_target_sat.cli_factory.make_location() for _ in range(locations_amount)] + comp_resource = module_target_sat.cli_factory.compute_resource( + { + 'location-ids': [location['id'] for location in locations], + 'provider': FOREMAN_PROVIDERS['libvirt'], + 'url': libvirt_url, + } ) assert len(comp_resource['locations']) == locations_amount for location in locations: @@ -270,7 +265,7 @@ def test_positive_create_with_locs(libvirt_url): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_create_data())) -def test_negative_create_with_name_url(libvirt_url, options): +def test_negative_create_with_name_url(libvirt_url, options, target_sat): """Compute Resource negative create with invalid values :id: cd432ff3-b3b9-49cd-9a16-ed00d81679dd @@ -279,12 +274,10 @@ def test_negative_create_with_name_url(libvirt_url, options): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + target_sat.cli.ComputeResource.create( { 'name': options.get('name', gen_string(str_type='alphanumeric')), 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -294,7 +287,7 @@ def test_negative_create_with_name_url(libvirt_url, options): @pytest.mark.tier2 -def test_negative_create_with_same_name(libvirt_url): +def test_negative_create_with_same_name(libvirt_url, module_target_sat): """Compute Resource negative create with the same name :id: ddb5c45b-1ea3-46d0-b248-56c0388d2e4b @@ -302,12 +295,12 @@ def test_negative_create_with_same_name(libvirt_url): :expectedresults: Compute resource not created :CaseImportance: High - - :CaseLevel: Component """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': comp_res['name'], 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -321,7 +314,7 @@ def test_negative_create_with_same_name(libvirt_url): @pytest.mark.tier1 @pytest.mark.parametrize('options', **parametrized(valid_update_data())) -def test_positive_update_name(libvirt_url, options): +def test_positive_update_name(libvirt_url, options, module_target_sat): """Compute Resource positive update :id: 213d7f04-4c54-4985-8ca0-d2a1a9e3b305 @@ -330,16 +323,16 @@ def test_positive_update_name(libvirt_url, options): :CaseImportance: Critical - :CaseLevel: Component - :parametrized: yes """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) options.update({'name': comp_res['name']}) # update Compute Resource - ComputeResource.update(options) + module_target_sat.cli.ComputeResource.update(options) # check updated values - result = ComputeResource.info({'id': comp_res['id']}) + result = module_target_sat.cli.ComputeResource.info({'id': comp_res['id']}) assert result['description'] == options.get('description', comp_res['description']) assert result['name'] == options.get('new-name', comp_res['name']) assert result['url'] == options.get('url', comp_res['url']) @@ -351,7 +344,7 @@ def test_positive_update_name(libvirt_url, options): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_update_data())) -def test_negative_update(libvirt_url, options): +def test_negative_update(libvirt_url, options, module_target_sat): """Compute Resource negative update :id: e7aa9b39-dd01-4f65-8e89-ff5a6f4ee0e3 @@ -360,24 +353,26 @@ def test_negative_update(libvirt_url, options): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) with pytest.raises(CLIReturnCodeError): - ComputeResource.update(dict({'name': comp_res['name']}, **options)) - result = ComputeResource.info({'id': comp_res['id']}) + module_target_sat.cli.ComputeResource.update(dict({'name': comp_res['name']}, **options)) + result = module_target_sat.cli.ComputeResource.info({'id': comp_res['id']}) # check attributes have not changed assert result['name'] == comp_res['name'] options.pop('new-name', None) - for key in options.keys(): + for key in options: assert comp_res[key] == result[key] @pytest.mark.tier2 @pytest.mark.parametrize('set_console_password', ['true', 'false']) -def test_positive_create_with_console_password_and_name(libvirt_url, set_console_password): +def test_positive_create_with_console_password_and_name( + libvirt_url, set_console_password, module_target_sat +): """Create a compute resource with ``--set-console-password``. :id: 5b4c838a-0265-4c71-a73d-305fecbe508a @@ -388,11 +383,9 @@ def test_positive_create_with_console_password_and_name(libvirt_url, set_console :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': gen_string('utf8'), 'provider': 'Libvirt', @@ -404,7 +397,7 @@ def test_positive_create_with_console_password_and_name(libvirt_url, set_console @pytest.mark.tier2 @pytest.mark.parametrize('set_console_password', ['true', 'false']) -def test_positive_update_console_password(libvirt_url, set_console_password): +def test_positive_update_console_password(libvirt_url, set_console_password, module_target_sat): """Update a compute resource with ``--set-console-password``. :id: ef09351e-dcd3-4b4f-8d3b-995e9e5873b3 @@ -415,10 +408,100 @@ def test_positive_update_console_password(libvirt_url, set_console_password): :CaseImportance: High - :CaseLevel: Component - :parametrized: yes """ cr_name = gen_string('utf8') - ComputeResource.create({'name': cr_name, 'provider': 'Libvirt', 'url': gen_url()}) - ComputeResource.update({'name': cr_name, 'set-console-password': set_console_password}) + module_target_sat.cli.ComputeResource.create( + {'name': cr_name, 'provider': 'Libvirt', 'url': gen_url()} + ) + module_target_sat.cli.ComputeResource.update( + {'name': cr_name, 'set-console-password': set_console_password} + ) + + +@pytest.mark.e2e +@pytest.mark.on_premises_provisioning +@pytest.mark.tier3 +@pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete=True'], indirect=True) +def test_positive_provision_end_to_end( + request, + setting_update, + module_libvirt_provisioning_sat, + module_sca_manifest_org, + module_location, + provisioning_hostgroup, + module_provisioning_rhel_content, +): + """Provision a host on Libvirt compute resource with the help of hostgroup. + + :id: b003faa9-2810-4176-94d2-ea84bed248ec + + :setup: Hostgroup and provisioning setup like domain, subnet etc. + + :steps: + 1. Create a Libvirt compute resource. + 2. Create a host on Libvirt compute resource using the Hostgroup + 3. Use compute-attributes parameter to specify key-value parameters + regarding the virtual machine. + 4. Provision the host. + + :expectedresults: Host should be provisioned with hostgroup + + :parametrized: yes + + :BZ: 2236693 + + :customerscenario: true + """ + sat = module_libvirt_provisioning_sat.sat + cr_name = gen_string('alpha') + hostname = gen_string('alpha').lower() + os_major_ver = module_provisioning_rhel_content.os.major + cpu_mode = 'host-passthrough' if is_open('BZ:2236693') and os_major_ver == '9' else 'default' + libvirt_cr = sat.cli.ComputeResource.create( + { + 'name': cr_name, + 'provider': FOREMAN_PROVIDERS['libvirt'], + 'url': LIBVIRT_URL, + 'organizations': module_sca_manifest_org.name, + 'locations': module_location.name, + } + ) + assert libvirt_cr['name'] == cr_name + host = sat.cli.Host.create( + { + 'name': hostname, + 'location': module_location.name, + 'organization': module_sca_manifest_org.name, + 'hostgroup': provisioning_hostgroup.name, + 'compute-resource-id': libvirt_cr['id'], + 'ip': None, + 'mac': None, + 'compute-attributes': f'cpus=1, memory=6442450944, cpu_mode={cpu_mode}, start=1', + 'interface': f'compute_type=bridge,compute_bridge=br-{settings.provisioning.vlan_id}', + 'volume': 'capacity=10', + 'provision-method': 'build', + } + ) + # teardown + request.addfinalizer(lambda: sat.provisioning_cleanup(host['name'], interface='CLI')) + + # checks + hostname = f'{hostname}.{module_libvirt_provisioning_sat.domain.name}' + assert hostname == host['name'] + host_info = sat.cli.Host.info({'name': hostname}) + # Check on Libvirt, if VM exists + result = sat.execute( + f'su foreman -s /bin/bash -c "virsh -c {LIBVIRT_URL} list --state-running"' + ) + assert hostname in result.stdout + + wait_for( + lambda: sat.cli.Host.info({'name': hostname})['status']['build-status'] + != 'Pending installation', + timeout=1800, + delay=30, + ) + host_info = sat.cli.Host.info({'id': host['id']}) + assert host_info['status']['build-status'] == 'Installed' diff --git a/tests/foreman/cli/test_computeresource_osp.py b/tests/foreman/cli/test_computeresource_osp.py index 36fe06c90b1..ff6779056ae 100644 --- a/tests/foreman/cli/test_computeresource_osp.py +++ b/tests/foreman/cli/test_computeresource_osp.py @@ -3,32 +3,24 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-OpenStack :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest +from box import Box from fauxfactory import gen_string +import pytest -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import CLIReturnCodeError -from robottelo.cli.factory import make_compute_resource from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError -OSP_SETTINGS = dict( +OSP_SETTINGS = Box( username=settings.osp.username, password=settings.osp.password, tenant=settings.osp.tenant, - hostname=settings.osp.hostname, project_domain_id=settings.osp.project_domain_id, ) @@ -36,19 +28,29 @@ class TestOSPComputeResourceTestCase: """OSPComputeResource CLI tests.""" - def cr_cleanup(self, cr_id): + def cr_cleanup(self, cr_id, id_type, target_sat): """Finalizer for removing CR from Satellite. This should remove ssh key pairs from OSP in case of test fail. """ try: - ComputeResource.delete({'id': cr_id}) + target_sat.cli.ComputeResource.delete({id_type: cr_id}) + assert not target_sat.cli.ComputeResource.exists(search=(id_type, cr_id)) except CLIReturnCodeError: pass + @pytest.fixture + def osp_version(request): + versions = {'osp16': settings.osp.api_url.osp16, 'osp17': settings.osp.api_url.osp17} + return versions[getattr(request, 'param', 'osp16')] + @pytest.mark.upgrade @pytest.mark.tier3 - @pytest.mark.parametrize('id_type', ['id', 'name']) - def test_crud_and_duplicate_name(self, request, id_type): + @pytest.mark.parametrize('osp_version', ['osp16', 'osp17'], indirect=True) + @pytest.mark.parametrize( + 'id_type', + ['id', 'name'], + ) + def test_crud_and_duplicate_name(self, request, id_type, osp_version, target_sat): """Create, list, update and delete Openstack compute resource :id: caf60bad-999d-483e-807f-95f52f35085d @@ -63,47 +65,47 @@ def test_crud_and_duplicate_name(self, request, id_type): """ # create name = gen_string('alpha') - compute_resource = make_compute_resource( + compute_resource = target_sat.cli.ComputeResource.create( { 'name': name, 'provider': 'Openstack', - 'user': OSP_SETTINGS['username'], - 'password': OSP_SETTINGS['password'], - 'tenant': OSP_SETTINGS['tenant'], - 'url': OSP_SETTINGS['hostname'], - 'project-domain-id': OSP_SETTINGS['project_domain_id'], + 'user': OSP_SETTINGS.username, + 'password': OSP_SETTINGS.password, + 'tenant': OSP_SETTINGS.tenant, + 'project-domain-id': OSP_SETTINGS.project_domain_id, + 'url': osp_version, } ) - request.addfinalizer(lambda: self.cr_cleanup(compute_resource['id'])) + request.addfinalizer(lambda: self.cr_cleanup(compute_resource['id'], id_type, target_sat)) assert compute_resource['name'] == name - assert ComputeResource.exists(search=(id_type, compute_resource[id_type])) + assert target_sat.cli.ComputeResource.exists(search=(id_type, compute_resource[id_type])) # negative create with same name - with pytest.raises(CLIFactoryError): - make_compute_resource( + with pytest.raises(CLIReturnCodeError): + target_sat.cli.ComputeResource.create( { 'name': name, 'provider': 'Openstack', - 'user': OSP_SETTINGS['username'], - 'password': OSP_SETTINGS['password'], - 'tenant': OSP_SETTINGS['tenant'], - 'url': OSP_SETTINGS['hostname'], - 'project-domain-id': OSP_SETTINGS['project_domain_id'], + 'user': OSP_SETTINGS.username, + 'password': OSP_SETTINGS.password, + 'tenant': OSP_SETTINGS.tenant, + 'project-domain-id': OSP_SETTINGS.project_domain_id, + 'url': osp_version, } ) # update new_name = gen_string('alpha') - ComputeResource.update({id_type: compute_resource[id_type], 'new-name': new_name}) + target_sat.cli.ComputeResource.update( + {id_type: compute_resource[id_type], 'new-name': new_name} + ) if id_type == 'name': - compute_resource = ComputeResource.info({'name': new_name}) + compute_resource = target_sat.cli.ComputeResource.info({'name': new_name}) else: - compute_resource = ComputeResource.info({'id': compute_resource['id']}) + compute_resource = target_sat.cli.ComputeResource.info({'id': compute_resource['id']}) assert new_name == compute_resource['name'] - ComputeResource.delete({id_type: compute_resource[id_type]}) - assert not ComputeResource.exists(search=(id_type, compute_resource[id_type])) @pytest.mark.tier3 - def test_negative_create_osp_with_url(self): + def test_negative_create_osp_with_url(self, target_sat): """Attempt to create Openstack compute resource with invalid URL :id: a6be8233-2641-4c87-8563-f48d6efbb6ac @@ -116,15 +118,15 @@ def test_negative_create_osp_with_url(self): """ name = gen_string('alpha') with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + target_sat.cli.ComputeResource.create( { 'name': name, 'provider': 'Openstack', - 'user': OSP_SETTINGS['username'], - 'password': OSP_SETTINGS['password'], - 'tenant': OSP_SETTINGS['tenant'], - 'url': 'invalid url', - 'project-domain-id': OSP_SETTINGS['project_domain_id'], + 'user': OSP_SETTINGS.username, + 'password': OSP_SETTINGS.password, + 'tenant': OSP_SETTINGS.tenant, + 'project-domain-id': OSP_SETTINGS.project_domain_id, + 'url': 'invalid_url', } ) diff --git a/tests/foreman/cli/test_computeresource_rhev.py b/tests/foreman/cli/test_computeresource_rhev.py index f3dcac7ee01..173b9c1c414 100644 --- a/tests/foreman/cli/test_computeresource_rhev.py +++ b/tests/foreman/cli/test_computeresource_rhev.py @@ -3,29 +3,20 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ComputeResources-RHEV :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from wait_for import wait_for from wrapanapi import RHEVMSystem -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import CLIReturnCodeError -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.host import Host from robottelo.config import settings +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError @pytest.fixture(scope='module') @@ -45,7 +36,7 @@ def rhev(): @pytest.mark.tier1 -def test_positive_create_rhev_with_valid_name(rhev): +def test_positive_create_rhev_with_valid_name(rhev, module_target_sat): """Create Compute Resource of type Rhev with valid name :id: 92a577db-144e-4761-a52e-e83887464986 @@ -56,7 +47,7 @@ def test_positive_create_rhev_with_valid_name(rhev): :BZ: 1602835 """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': f'cr {gen_string(str_type="alpha")}', 'provider': 'Ovirt', @@ -69,7 +60,7 @@ def test_positive_create_rhev_with_valid_name(rhev): @pytest.mark.tier1 -def test_positive_rhev_info(rhev): +def test_positive_rhev_info(rhev, module_target_sat): """List the info of RHEV compute resource :id: 1b18f6e8-c431-41ab-ae49-a2bbb74712f2 @@ -81,7 +72,7 @@ def test_positive_rhev_info(rhev): :BZ: 1602835 """ name = gen_string('utf8') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -95,7 +86,7 @@ def test_positive_rhev_info(rhev): @pytest.mark.tier1 -def test_positive_delete_by_name(rhev): +def test_positive_delete_by_name(rhev, module_target_sat): """Delete the RHEV compute resource by name :id: ac84acbe-3e02-4f49-9695-b668df28b353 @@ -106,7 +97,7 @@ def test_positive_delete_by_name(rhev): :BZ: 1602835 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -116,13 +107,13 @@ def test_positive_delete_by_name(rhev): } ) assert comp_res['name'] - ComputeResource.delete({'name': comp_res['name']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'name': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert not result @pytest.mark.tier1 -def test_positive_delete_by_id(rhev): +def test_positive_delete_by_id(rhev, module_target_sat): """Delete the RHEV compute resource by id :id: 4bcd4fa3-df8b-4773-b142-e47458116552 @@ -133,7 +124,7 @@ def test_positive_delete_by_id(rhev): :BZ: 1602835 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -143,13 +134,13 @@ def test_positive_delete_by_id(rhev): } ) assert comp_res['name'] - ComputeResource.delete({'id': comp_res['id']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'id': comp_res['id']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert not result @pytest.mark.tier2 -def test_negative_create_rhev_with_url(rhev): +def test_negative_create_rhev_with_url(rhev, module_target_sat): """RHEV compute resource negative create with invalid values :id: 1f318a4b-8dca-491b-b56d-cff773ed624e @@ -159,7 +150,7 @@ def test_negative_create_rhev_with_url(rhev): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'provider': 'Ovirt', 'user': rhev.username, @@ -171,7 +162,7 @@ def test_negative_create_rhev_with_url(rhev): @pytest.mark.tier2 -def test_negative_create_with_same_name(rhev): +def test_negative_create_with_same_name(rhev, module_target_sat): """RHEV compute resource negative create with the same name :id: f00813ef-df31-462c-aa87-479b8272aea3 @@ -186,7 +177,7 @@ def test_negative_create_with_same_name(rhev): :CaseImportance: High """ name = gen_string('alpha') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -198,7 +189,7 @@ def test_negative_create_with_same_name(rhev): ) assert compute_resource['name'] == name with pytest.raises(CLIFactoryError): - make_compute_resource( + module_target_sat.cli_factory.compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -212,7 +203,7 @@ def test_negative_create_with_same_name(rhev): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_update_name(rhev): +def test_positive_update_name(rhev, module_target_sat): """RHEV compute resource positive update :id: 5ca29b81-d1f0-409f-843d-aa5daf957d7f @@ -229,7 +220,7 @@ def test_positive_update_name(rhev): :BZ: 1602835 """ new_name = gen_string('alpha') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -239,12 +230,12 @@ def test_positive_update_name(rhev): } ) assert comp_res['name'] - ComputeResource.update({'name': comp_res['name'], 'new-name': new_name}) - assert new_name == ComputeResource.info({'id': comp_res['id']})['name'] + module_target_sat.cli.ComputeResource.update({'name': comp_res['name'], 'new-name': new_name}) + assert new_name == module_target_sat.cli.ComputeResource.info({'id': comp_res['id']})['name'] @pytest.mark.tier2 -def test_positive_add_image_rhev_with_name(rhev, module_os): +def test_positive_add_image_rhev_with_name(rhev, module_os, module_target_sat): """Add images to the RHEV compute resource :id: 2da84165-a56f-4282-9343-94828fa69c13 @@ -263,7 +254,7 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): if rhev.image_uuid is None: pytest.skip('Missing configuration for rhev.image_uuid') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -273,7 +264,7 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): } ) assert comp_res['name'] - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], 'name': f'img {gen_string(str_type="alpha")}', @@ -283,13 +274,15 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): 'username': "root", } ) - result = ComputeResource.image_list({'compute-resource': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.image_list( + {'compute-resource': comp_res['name']} + ) assert result[0]['uuid'] == rhev.image_uuid @pytest.mark.skip_if_open("BZ:1829239") @pytest.mark.tier2 -def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): +def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os, module_target_sat): """Attempt to add invalid image to the RHEV compute resource :id: e8a653f9-9749-4c76-95ed-2411a7c0a117 @@ -307,7 +300,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): :BZ: 1829239 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -318,7 +311,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): ) assert comp_res['name'] with pytest.raises(CLIReturnCodeError): - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], 'name': f'img {gen_string(str_type="alpha")}', @@ -331,7 +324,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): @pytest.mark.tier2 -def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): +def test_negative_add_image_rhev_with_invalid_name(rhev, module_os, module_target_sat): """Attempt to add invalid image name to the RHEV compute resource :id: 873a7d79-1e89-4e4f-81ca-b6db1e0246da @@ -346,12 +339,11 @@ def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): name parameter, compute-resource image create. :expectedresults: The image should not be added to the CR - """ if rhev.image_uuid is None: pytest.skip('Missing configuration for rhev.image_uuid') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -363,7 +355,7 @@ def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): assert comp_res['name'] with pytest.raises(CLIReturnCodeError): - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], # too long string (>255 chars) @@ -380,8 +372,10 @@ def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): @pytest.mark.on_premises_provisioning @pytest.mark.tier3 @pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete=True'], indirect=True) def test_positive_provision_rhev_with_host_group( request, + setting_update, module_provisioning_sat, rhev, module_sca_manifest_org, @@ -421,11 +415,11 @@ def test_positive_provision_rhev_with_host_group( :CaseAutomation: Automated """ - cli = module_provisioning_sat.sat.cli + sat = module_provisioning_sat.sat cr_name = gen_string('alpha') org_name = module_sca_manifest_org.name loc_name = module_location.name - rhv_cr = cli.ComputeResource.create( + rhv_cr = sat.cli.ComputeResource.create( { 'name': cr_name, 'provider': 'Ovirt', @@ -441,7 +435,7 @@ def test_positive_provision_rhev_with_host_group( assert rhv_cr['name'] == cr_name domain_name = module_provisioning_sat.domain.name subnet_name = module_provisioning_sat.subnet.name - hostgroup = cli.HostGroup.create( + hostgroup = sat.cli.HostGroup.create( { 'name': gen_string('alpha'), 'organization': org_name, @@ -454,13 +448,13 @@ def test_positive_provision_rhev_with_host_group( 'kickstart-repository-id': module_provisioning_rhel_content.ksrepo.id, 'lifecycle-environment-id': module_sca_manifest_org.library.id, 'operatingsystem': module_provisioning_rhel_content.os.title, - 'pxe-loader': "PXELinux BIOS", + 'pxe-loader': 'PXELinux BIOS', 'partition-table': default_partitiontable.name, 'compute-resource-id': rhv_cr.get('id'), } ) host_name = gen_string('alpha').lower() - host = cli.Host.create( + host = sat.cli.Host.create( { 'name': f'{host_name}', 'organization': org_name, @@ -473,7 +467,7 @@ def test_positive_provision_rhev_with_host_group( 'mac': None, 'compute-attributes': f"cluster={rhev.cluster_id}," "cores=1," - "memory=4294967296," # 4 GiB + "memory=6442450944," # 6 GiB "start=1", 'interface': ( f"compute_name=nic1, compute_network=" @@ -484,12 +478,12 @@ def test_positive_provision_rhev_with_host_group( } ) # cleanup - request.addfinalizer(lambda: cli.Host.delete({'id': host['id']})) + request.addfinalizer(lambda: sat.provisioning_cleanup(host['name'], interface='CLI')) # checks hostname = f'{host_name}.{domain_name}' assert hostname == host['name'] - host_info = Host.info({'name': hostname}) + host_info = sat.cli.Host.info({'name': hostname}) # Check on RHV, if VM exists assert rhev.rhv_api.does_vm_exist(hostname) # Get the information of created VM @@ -507,12 +501,12 @@ def test_positive_provision_rhev_with_host_group( # the result of the installation. Wait until Satellite reports that the host is installed. exp_st = 'Pending installation' wait_for( - lambda: cli.Host.info({'id': host['id']})['status']['build-status'] != exp_st, + lambda: sat.cli.Host.info({'id': host['id']})['status']['build-status'] != exp_st, # timeout=200, # no need to wait long, the host was already pingable timeout=4000, # wait long because the ping check has not been done, TODO delay=10, ) - host = cli.Host.info({'id': host['id']}) + host = sat.cli.Host.info({'id': host['id']}) assert host['status']['build-status'] == 'Installed' @@ -538,15 +532,13 @@ def test_positive_provision_rhev_without_host_group(rhev): :expectedresults: The host should be provisioned successfully :CaseAutomation: NotAutomated - - :CaseLevel: Integration """ @pytest.mark.on_premises_provisioning @pytest.mark.tier3 @pytest.mark.rhel_ver_match('[^6]') -@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete'], indirect=True) +@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete=True'], indirect=True) def test_positive_provision_rhev_image_based_and_disassociate( request, module_provisioning_sat, @@ -585,11 +577,11 @@ def test_positive_provision_rhev_image_based_and_disassociate( :CaseAutomation: Automated """ - cli = module_provisioning_sat.sat.cli + sat = module_provisioning_sat.sat org_name = module_org.name loc_name = module_location.name name = gen_string('alpha') - rhv_cr = cli.ComputeResource.create( + rhv_cr = sat.cli.ComputeResource.create( { 'name': name, 'provider': 'Ovirt', @@ -606,7 +598,7 @@ def test_positive_provision_rhev_image_based_and_disassociate( host_name = gen_string('alpha').lower() # use some RHEL (usually latest) os = module_provisioning_rhel_content.os - image = cli.ComputeResource.image_create( + image = sat.cli.ComputeResource.image_create( { 'compute-resource': rhv_cr['name'], 'name': f'img {gen_string(str_type="alpha")}', @@ -624,7 +616,7 @@ def test_positive_provision_rhev_image_based_and_disassociate( host = None # to avoid UnboundLocalError in finally block rhv_vm = None try: - host = cli.Host.create( + host = sat.cli.Host.create( { 'name': f'{host_name}', 'organization': org_name, @@ -639,7 +631,7 @@ def test_positive_provision_rhev_image_based_and_disassociate( 'mac': None, 'compute-attributes': f"cluster={rhev.cluster_id}," "cores=1," - "memory=4294967296," # 4 GiB + "memory=6442450944," # 6 GiB "start=1", 'interface': ( f"compute_name=nic1, compute_network=" @@ -654,7 +646,7 @@ def test_positive_provision_rhev_image_based_and_disassociate( ) hostname = f'{host_name}.{domain_name}' assert hostname == host['name'] - host_info = Host.info({'name': hostname}) + host_info = sat.cli.Host.info({'name': hostname}) # Check on RHV, if VM exists assert rhev.rhv_api.does_vm_exist(hostname) # Get the information of created VM @@ -669,15 +661,14 @@ def test_positive_provision_rhev_image_based_and_disassociate( # that's enough. # Disassociate the host from the CR, check it's disassociated - cli.Host.disassociate({'name': hostname}) - host_info = cli.Host.info({'name': hostname}) + sat.cli.Host.disassociate({'name': hostname}) + host_info = sat.cli.Host.info({'name': hostname}) assert 'compute-resource' not in host_info finally: - # Now, let's just remove the host if host is not None: - cli.Host.delete({'id': host['id']}) + sat.provisioning_cleanup(host['name'], interface='CLI') # Delete the VM since the disassociated VM won't get deleted if rhv_vm is not None: rhv_vm.delete() diff --git a/tests/foreman/cli/test_computeresource_vmware.py b/tests/foreman/cli/test_computeresource_vmware.py index f093ed84928..b72a5fca5c5 100644 --- a/tests/foreman/cli/test_computeresource_vmware.py +++ b/tests/foreman/cli/test_computeresource_vmware.py @@ -1,168 +1,155 @@ """ :Requirement: Computeresource Vmware -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-VMWare :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No +:CaseAutomation: Automated + """ -import pytest from fauxfactory import gen_string +import pytest +from wait_for import wait_for +from wrapanapi import VMWareSystem -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.org import Org from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS -@pytest.fixture(scope='module') -def vmware(module_org, module_location): - vmware = type('vmware', (object,), {})() - vmware.org = module_org - vmware.loc = module_location - Org.add_location({'id': vmware.org.id, 'location-id': vmware.loc.id}) - vmware.vmware_server = settings.vmware.vcenter - vmware.vmware_password = settings.vmware.password - vmware.vmware_username = settings.vmware.username - vmware.vmware_datacenter = settings.vmware.datacenter - vmware.vmware_img_name = settings.vmware.image_name - vmware.vmware_img_arch = settings.vmware.image_arch - vmware.vmware_img_os = settings.vmware.image_os - vmware.vmware_img_user = settings.vmware.image_username - vmware.vmware_img_pass = settings.vmware.image_password - vmware.vmware_vm_name = settings.vmware.vm_name - return vmware - - @pytest.mark.tier1 -def test_positive_create_with_server(vmware): - """Create VMware compute resource with server field +@pytest.mark.e2e +@pytest.mark.upgrade +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_vmware_cr_end_to_end(target_sat, module_org, module_location, vmware): + """Create, Read, Update and Delete VMware compute resources - :id: a06b02c4-fe6a-44ef-bf61-5a28c3905527 + :id: 96faae3f-bc64-4147-a9fc-09c858e0a68f :customerscenario: true - :expectedresults: Compute resource is created, server field saved - correctly + :expectedresults: Compute resource should be created, read, updated and deleted :BZ: 1387917 - :CaseAutomation: Automated - :CaseImportance: Critical """ cr_name = gen_string('alpha') - vmware_cr = make_compute_resource( + # Create + vmware_cr = target_sat.cli.ComputeResource.create( { 'name': cr_name, + 'organization-ids': module_org.id, + 'location-ids': module_location.id, 'provider': FOREMAN_PROVIDERS['vmware'], - 'server': vmware.vmware_server, - 'user': vmware.vmware_username, - 'password': vmware.vmware_password, - 'datacenter': vmware.vmware_datacenter, + 'server': vmware.hostname, + 'user': settings.vmware.username, + 'password': settings.vmware.password, + 'datacenter': settings.vmware.datacenter, } ) assert vmware_cr['name'] == cr_name - assert vmware_cr['server'] == vmware.vmware_server - - -@pytest.mark.tier1 -def test_positive_create_with_org(vmware): - """Create VMware Compute Resource with organizations - - :id: 807a1f70-4cc3-4925-b145-0c3b26c57559 - - :customerscenario: true - - :expectedresults: VMware Compute resource is created - - :BZ: 1387917 - - :CaseAutomation: Automated - - :CaseImportance: Critical - """ - cr_name = gen_string('alpha') - vmware_cr = make_compute_resource( - { - 'name': cr_name, - 'organization-ids': vmware.org.id, - 'provider': FOREMAN_PROVIDERS['vmware'], - 'server': vmware.vmware_server, - 'user': vmware.vmware_username, - 'password': vmware.vmware_password, - 'datacenter': vmware.vmware_datacenter, - } - ) + assert vmware_cr['locations'][0] == module_location.name + assert vmware_cr['organizations'][0] == module_org.name + assert vmware_cr['server'] == vmware.hostname + assert vmware_cr['datacenter'] == settings.vmware.datacenter + # List + target_sat.cli.ComputeResource.list({'search': f'name="{cr_name}"'}) assert vmware_cr['name'] == cr_name - - -@pytest.mark.tier1 -def test_positive_create_with_loc(vmware): - """Create VMware Compute Resource with locations - - :id: 214a0f54-6fc2-4e7b-91ab-a45760ffb2f2 - - :customerscenario: true - - :expectedresults: VMware Compute resource is created - - :BZ: 1387917 - - :CaseAutomation: Automated - - :CaseImportance: Critical - """ - cr_name = gen_string('alpha') - vmware_cr = make_compute_resource( - { - 'name': cr_name, - 'location-ids': vmware.loc.id, - 'provider': FOREMAN_PROVIDERS['vmware'], - 'server': vmware.vmware_server, - 'user': vmware.vmware_username, - 'password': vmware.vmware_password, - 'datacenter': vmware.vmware_datacenter, - } + assert vmware_cr['provider'] == FOREMAN_PROVIDERS['vmware'] + # Update CR + new_cr_name = gen_string('alpha') + description = gen_string('alpha') + target_sat.cli.ComputeResource.update( + {'name': cr_name, 'new-name': new_cr_name, 'description': description} ) - assert vmware_cr['name'] == cr_name - - -@pytest.mark.tier1 -@pytest.mark.upgrade -def test_positive_create_with_org_and_loc(vmware): - """Create VMware Compute Resource with organizations and locations - - :id: 96faae3f-bc64-4147-a9fc-09c858e0a68f - - :customerscenario: true - - :expectedresults: VMware Compute resource is created - - :BZ: 1387917 + # Check updated values + result = target_sat.cli.ComputeResource.info({'id': vmware_cr['id']}) + assert result['name'] == new_cr_name + assert result['description'] == description + # Delete CR + target_sat.cli.ComputeResource.delete({'name': result['name']}) + assert not target_sat.cli.ComputeResource.exists(search=('name', result['name'])) + + +@pytest.mark.e2e +@pytest.mark.on_premises_provisioning +@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete=True'], indirect=True) +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.parametrize('provision_method', ['build', 'bootdisk']) +@pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.tier3 +def test_positive_provision_end_to_end( + request, + setting_update, + module_provisioning_sat, + module_sca_manifest_org, + module_location, + pxe_loader, + module_vmware_cr, + module_vmware_hostgroup, + provision_method, + vmware, +): + """Provision a host on vmware compute resource with + the help of hostgroup. + + :id: ff9963fc-a2a7-4392-aa9a-190d5d1c8357 + + :steps: + + 1. Configure provisioning setup. + 2. Create VMware CR + 3. Configure host group setup. + 4. Provision a host on VMware + 5. Verify created host on VMware with wrapanapi + + :expectedresults: Host is provisioned succesfully with hostgroup :CaseAutomation: Automated - - :CaseImportance: Critical """ - cr_name = gen_string('alpha') - vmware_cr = make_compute_resource( + sat = module_provisioning_sat.sat + hostname = gen_string('alpha').lower() + host = sat.cli.Host.create( { - 'name': cr_name, - 'organization-ids': vmware.org.id, - 'location-ids': vmware.loc.id, - 'provider': FOREMAN_PROVIDERS['vmware'], - 'server': vmware.vmware_server, - 'user': vmware.vmware_username, - 'password': vmware.vmware_password, - 'datacenter': vmware.vmware_datacenter, + 'name': hostname, + 'organization': module_sca_manifest_org.name, + 'location': module_location.name, + 'hostgroup': module_vmware_hostgroup.name, + 'compute-resource-id': module_vmware_cr.id, + 'ip': None, + 'mac': None, + 'compute-attributes': f'cluster={settings.vmware.cluster},' + f'path=/Datacenters/{settings.vmware.datacenter}/vm/,' + 'scsi_controller_type=VirtualLsiLogicController,' + 'guest_id=rhel8_64Guest,firmware=automatic,' + 'cpus=1,memory_mb=6000, start=1', + 'interface': f'compute_type=VirtualVmxnet3,' + f'compute_network=VLAN {settings.provisioning.vlan_id}', + 'volume': f'name=Hard disk,size_gb=10,thin=true,eager_zero=false,datastore={settings.vmware.datastore}', + 'provision-method': provision_method, } ) - assert vmware_cr['name'] == cr_name + # teardown + request.addfinalizer(lambda: sat.provisioning_cleanup(host['name'], interface='CLI')) + + hostname = f'{hostname}.{module_provisioning_sat.domain.name}' + assert hostname == host['name'] + # check if vm is created on vmware + vmware = VMWareSystem( + hostname=vmware.hostname, + username=settings.vmware.username, + password=settings.vmware.password, + ) + assert vmware.does_vm_exist(hostname) is True + wait_for( + lambda: sat.cli.Host.info({'name': hostname})['status']['build-status'] + != 'Pending installation', + timeout=1800, + delay=30, + ) + host_info = sat.cli.Host.info({'id': host['id']}) + assert host_info['status']['build-status'] == 'Installed' diff --git a/tests/foreman/cli/test_container_management.py b/tests/foreman/cli/test_container_management.py index 3574b764165..17d0b0b4bd0 100644 --- a/tests/foreman/cli/test_container_management.py +++ b/tests/foreman/cli/test_container_management.py @@ -4,33 +4,25 @@ :CaseAutomation: Automated -:TestType: Functional - :Team: Phoenix-content :CaseComponent: ContainerManagement-Content -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from wait_for import wait_for -from robottelo.cli.factory import ContentView -from robottelo.cli.factory import LifecycleEnvironment -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_product_wait -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import Repository from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import REPO_TYPE +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + REPO_TYPE, +) from robottelo.logging import logger -def _repo(product_id, name=None, upstream_name=None, url=None): +def _repo(sat, product_id, name=None, upstream_name=None, url=None): """Creates a Docker-based repository. :param product_id: ID of the ``Product``. @@ -42,7 +34,7 @@ def _repo(product_id, name=None, upstream_name=None, url=None): CONTAINER_REGISTRY_HUB constant. :return: A ``Repository`` object. """ - return make_repository( + return sat.cli_factory.make_repository( { 'content-type': REPO_TYPE['docker'], 'docker-upstream-name': upstream_name or CONTAINER_UPSTREAM_NAME, @@ -57,8 +49,6 @@ class TestDockerClient: """Tests specific to using ``Docker`` as a client to pull Docker images from a Satellite 6 instance. - :CaseLevel: System - :CaseImportance: Medium """ @@ -69,7 +59,7 @@ def test_positive_pull_image(self, module_org, container_contenthost, target_sat :id: 023f0538-2aad-4f87-b8a8-6ccced648366 - :Steps: + :steps: 1. Publish and promote content view with Docker content 2. Register Docker-enabled client against Satellite 6. @@ -78,10 +68,10 @@ def test_positive_pull_image(self, module_org, container_contenthost, target_sat :parametrized: yes """ - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id']) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id']) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) try: result = container_contenthost.execute( f'docker login -u {settings.server.admin_username}' @@ -151,18 +141,22 @@ def test_positive_container_admin_end_to_end_search( # Satellite setup: create product and add Docker repository; # create content view and add Docker repository; # create lifecycle environment and promote content view to it - lce = make_lifecycle_environment({'organization-id': module_org.id}) - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id'], upstream_name=CONTAINER_UPSTREAM_NAME) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id'], upstream_name=CONTAINER_UPSTREAM_NAME) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': registry_name_pattern, 'registry-unauthenticated-pull': 'false', @@ -203,7 +197,7 @@ def test_positive_container_admin_end_to_end_search( assert docker_repo_uri not in result.stdout # 8. Set 'Unauthenticated Pull' option to true - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-unauthenticated-pull': 'true', 'id': lce['id'], @@ -255,18 +249,22 @@ def test_positive_container_admin_end_to_end_pull( # Satellite setup: create product and add Docker repository; # create content view and add Docker repository; # create lifecycle environment and promote content view to it - lce = make_lifecycle_environment({'organization-id': module_org.id}) - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id'], upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id'], upstream_name=docker_upstream_name) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': registry_name_pattern, 'registry-unauthenticated-pull': 'false', @@ -311,7 +309,7 @@ def test_positive_container_admin_end_to_end_pull( assert result.status == 1 # 8. Set 'Unauthenticated Pull' option to true - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-unauthenticated-pull': 'true', 'id': lce['id'], @@ -322,3 +320,81 @@ def test_positive_container_admin_end_to_end_pull( # 9. Pull in docker image result = container_contenthost.execute(docker_pull_command) assert result.status == 0 + + def test_negative_pull_content_with_longer_name( + self, target_sat, container_contenthost, module_org + ): + """Verify that long name CV publishes when CV & docker repo both have a larger name. + + :id: e0ac0be4-f5ff-4a88-bb29-33aa2d874f46 + + :steps: + + 1. Create Product, docker repo, CV and LCE with a long name + 2. Sync the repos + 3. Add repository to CV, Publish, and then Promote CV to LCE + 4. Pull in docker image + + :expectedresults: + + 1. Long Product, repository, CV and LCE should create successfully + 2. Sync repository successfully + 3. Publish & Promote should success + 4. Can pull in docker images + + :BZ: 2127470 + + :customerscenario: true + """ + pattern_postfix = gen_string('alpha', 10).lower() + + product_name = f'containers-{pattern_postfix}' + repo_name = f'repo-{pattern_postfix}' + lce_name = f'lce-{pattern_postfix}' + cv_name = f'cv-{pattern_postfix}' + + # 1. Create Product, docker repo, CV and LCE with a long name + product = target_sat.cli_factory.make_product_wait( + {'name': product_name, 'organization-id': module_org.id} + ) + + repo = _repo( + target_sat, product['id'], name=repo_name, upstream_name=CONTAINER_UPSTREAM_NAME + ) + + # 2. Sync the repos + target_sat.cli.Repository.synchronize({'id': repo['id']}) + + lce = target_sat.cli_factory.make_lifecycle_environment( + {'name': lce_name, 'organization-id': module_org.id} + ) + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'composite': False, 'organization-id': module_org.id} + ) + + # 3. Add repository to CV, Publish, and then Promote CV to LCE + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cv['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} + ) + + podman_pull_command = ( + f"podman pull --tls-verify=false {target_sat.hostname}/{module_org.label.lower()}" + f"-{lce['label'].lower()}-{cv['label'].lower()}-{product['label'].lower()}-{repo_name}" + ) + + # 4. Pull in docker image + assert ( + container_contenthost.execute( + f'podman login -u {settings.server.admin_username}' + f' -p {settings.server.admin_password} {target_sat.hostname}' + ).status + == 0 + ) + + assert container_contenthost.execute(podman_pull_command).status == 0 + + assert container_contenthost.execute(f'podman logout {target_sat.hostname}').status == 0 diff --git a/tests/foreman/cli/test_contentaccess.py b/tests/foreman/cli/test_contentaccess.py index 55b1a933328..0c59e5c70e6 100644 --- a/tests/foreman/cli/test_contentaccess.py +++ b/tests/foreman/cli/test_contentaccess.py @@ -2,30 +2,28 @@ :Requirement: Content Access -:CaseLevel: Acceptance - :CaseComponent: Hosts-Content :CaseAutomation: Automated :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import time -import pytest from nailgun import entities +import pytest -from robottelo.cli.host import Host -from robottelo.cli.package import Package from robottelo.config import settings -from robottelo.constants import REAL_0_ERRATA_ID -from robottelo.constants import REAL_RHEL7_0_2_PACKAGE_FILENAME -from robottelo.constants import REAL_RHEL7_0_2_PACKAGE_NAME -from robottelo.constants import REPOS +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + PRDS, + REAL_0_ERRATA_ID, + REAL_RHEL7_0_2_PACKAGE_FILENAME, + REAL_RHEL7_0_2_PACKAGE_NAME, + REPOS, + REPOSET, +) pytestmark = [ pytest.mark.skipif( @@ -36,42 +34,48 @@ @pytest.fixture(scope='module') -def module_lce(module_sca_manifest_org): - return entities.LifecycleEnvironment(organization=module_sca_manifest_org).create() - +def rh_repo_setup_ak(module_sca_manifest_org, module_target_sat): + """Use module sca manifest org, creates rhst repo & syncs it, + also create CV, LCE & AK and return AK""" + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=PRDS['rhel'], + repo=REPOS['rhst7']['name'], + reposet=REPOSET['rhst7'], + releasever=None, + ) + # Sync step because repo is not synced by default + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() + rh_repo.sync() -@pytest.fixture(scope="module") -def rh_repo_cv(module_sca_manifest_org, rh_repo_gt_manifest, module_lce): - rh_repo_cv = entities.ContentView(organization=module_sca_manifest_org).create() + # Create CV, LCE and AK + cv = module_target_sat.api.ContentView(organization=module_sca_manifest_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() # Add CV to AK - rh_repo_cv.repository = [rh_repo_gt_manifest] - rh_repo_cv.update(['repository']) - rh_repo_cv.publish() - rh_repo_cv = rh_repo_cv.read() + cv.repository = [rh_repo] + cv.update(['repository']) + cv.publish() + cv = cv.read() # promote the last version published into the module lce - rh_repo_cv.version[-1].promote(data={'environment_ids': module_lce.id, 'force': False}) - return rh_repo_cv - + cv.version[-1].promote(data={'environment_ids': lce.id, 'force': False}) -@pytest.fixture(scope="module") -def module_ak(rh_repo_cv, module_sca_manifest_org, module_lce): - module_ak = entities.ActivationKey( - content_view=rh_repo_cv, - environment=module_lce, + ak = module_target_sat.api.ActivationKey( + content_view=cv, + environment=lce, organization=module_sca_manifest_org, ).create() # Ensure tools repo is enabled in the activation key - module_ak.content_override( + ak.content_override( data={'content_overrides': [{'content_label': REPOS['rhst7']['id'], 'value': '1'}]} ) - return module_ak + return ak @pytest.fixture(scope="module") def vm( - rh_repo_gt_manifest, + rh_repo_setup_ak, module_sca_manifest_org, - module_ak, rhel7_contenthost_module, module_target_sat, ): @@ -80,8 +84,9 @@ def vm( 'rpm -Uvh https://download.fedoraproject.org/pub/epel/7/x86_64/Packages/p/' 'python2-psutil-5.6.7-1.el7.x86_64.rpm' ) - rhel7_contenthost_module.install_katello_ca(module_target_sat) - rhel7_contenthost_module.register_contenthost(module_sca_manifest_org.label, module_ak.name) + rhel7_contenthost_module.register( + module_sca_manifest_org, None, rh_repo_setup_ak.name, module_target_sat + ) host = entities.Host().search(query={'search': f'name={rhel7_contenthost_module.hostname}'}) host_id = host[0].id host_content = entities.Host(id=host_id).read_json() @@ -93,7 +98,7 @@ def vm( @pytest.mark.tier2 @pytest.mark.pit_client @pytest.mark.pit_server -def test_positive_list_installable_updates(vm): +def test_positive_list_installable_updates(vm, module_target_sat): """Ensure packages applicability is functioning properly. :id: 4feb692c-165b-4f96-bb97-c8447bd2cf6e @@ -117,7 +122,7 @@ def test_positive_list_installable_updates(vm): :CaseImportance: Critical """ for _ in range(30): - applicable_packages = Package.list( + applicable_packages = module_target_sat.cli.Package.list( { 'host': vm.hostname, 'packages-restrict-applicable': 'true', @@ -137,7 +142,7 @@ def test_positive_list_installable_updates(vm): @pytest.mark.upgrade @pytest.mark.pit_client @pytest.mark.pit_server -def test_positive_erratum_installable(vm): +def test_positive_erratum_installable(vm, module_target_sat): """Ensure erratum applicability is showing properly, without attaching any subscription. @@ -159,7 +164,9 @@ def test_positive_erratum_installable(vm): """ # check that package errata is applicable for _ in range(30): - erratum = Host.errata_list({'host': vm.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + erratum = module_target_sat.cli.Host.errata_list( + {'host': vm.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'} + ) if erratum: break time.sleep(10) @@ -225,8 +232,6 @@ def test_negative_unregister_and_pull_content(vm): :expectedresults: Host can no longer retrieve content from satellite - :CaseLevel: System - :parametrized: yes :CaseImportance: Critical diff --git a/tests/foreman/cli/test_contentcredentials.py b/tests/foreman/cli/test_contentcredentials.py index 25324aa2d3d..208812cb857 100644 --- a/tests/foreman/cli/test_contentcredentials.py +++ b/tests/foreman/cli/test_contentcredentials.py @@ -6,40 +6,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentCredentials :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from tempfile import mkstemp +from fauxfactory import gen_alphanumeric, gen_choice, gen_integer, gen_string import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_string - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.content_credentials import ContentCredential -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_content_credential -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.org import Org -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository + from robottelo.constants import DataFile -from robottelo.constants import DEFAULT_ORG -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) VALID_GPG_KEY_FILE_PATH = DataFile.VALID_GPG_KEY_FILE @@ -62,9 +47,8 @@ def create_gpg_key_file(content=None): search_key = 'name' -@pytest.mark.build_sanity @pytest.mark.tier1 -def test_verify_gpg_key_content_displayed(module_org): +def test_verify_gpg_key_content_displayed(target_sat, module_org): """content-credential info should display key content :id: 0ee87ee0-8bf1-4d15-b5f9-0ac364e61155 @@ -77,14 +61,14 @@ def test_verify_gpg_key_content_displayed(module_org): content = gen_alphanumeric() key_path = create_gpg_key_file(content=content) assert key_path, 'GPG Key file must be created' - gpg_key = make_content_credential( + gpg_key = target_sat.cli_factory.make_content_credential( {'path': key_path, 'name': gen_string('alpha'), 'organization-id': module_org.id} ) assert gpg_key['content'] == content @pytest.mark.tier1 -def test_positive_get_info_by_name(module_org): +def test_positive_get_info_by_name(target_sat, module_org): """Create single gpg key and get its info by name :id: 890456ea-0b31-4386-9231-f47572f26d08 @@ -95,15 +79,17 @@ def test_positive_get_info_by_name(module_org): :CaseImportance: Critical """ name = gen_string('utf8') - gpg_key = make_content_credential( - {'key': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id} + gpg_key = target_sat.cli_factory.make_content_credential( + {'path': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id} + ) + gpg_key = target_sat.cli.ContentCredential.info( + {'name': gpg_key['name'], 'organization-id': module_org.id} ) - gpg_key = ContentCredential.info({'name': gpg_key['name'], 'organization-id': module_org.id}) assert gpg_key['name'] == name @pytest.mark.tier1 -def test_positive_block_delete_key_in_use(module_org, target_sat): +def test_positive_block_delete_key_in_use(target_sat, module_org): """Create a product and single associated repository. Create a new gpg key and associate it with the product and repository. Attempt to delete the gpg key in use @@ -119,10 +105,12 @@ def test_positive_block_delete_key_in_use(module_org, target_sat): :CaseImportance: Critical """ name = gen_string('utf8') - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - gpg_key = make_content_credential( - {'key': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id} + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository( + {'product-id': product['id'], 'content-type': 'yum'} + ) + gpg_key = target_sat.cli_factory.make_content_credential( + {'path': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id} ) # Associate repo with the product, gpg key with product and repo @@ -150,7 +138,7 @@ def test_positive_block_delete_key_in_use(module_org, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_default_org(name, module_org, default_org): +def test_positive_create_with_default_org(target_sat, name, default_org): """Create gpg key with valid name and valid gpg key via file import using the default created organization @@ -162,20 +150,21 @@ def test_positive_create_with_default_org(name, module_org, default_org): :CaseImportance: Critical """ - org = Org.info({'name': DEFAULT_ORG}) - gpg_key = make_content_credential( - {'key': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': org['id']} + + gpg_key = target_sat.cli_factory.make_content_credential( + {'path': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': default_org.id} ) # Can we find the new object? - result = ContentCredential.exists( - {'organization-id': org['id']}, (search_key, gpg_key[search_key]) + result = target_sat.cli.ContentCredential.exists( + {'organization-id': default_org.id}, (search_key, gpg_key[search_key]) ) + target_sat.cli.ContentCredential.delete({'id': gpg_key.id}) assert gpg_key[search_key] == result[search_key] @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_custom_org(name, module_org): +def test_positive_create_with_custom_org(target_sat, name, module_org): """Create gpg key with valid name and valid gpg key via file import using a new organization @@ -187,15 +176,15 @@ def test_positive_create_with_custom_org(name, module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential( + gpg_key = target_sat.cli_factory.make_content_credential( { - 'key': VALID_GPG_KEY_FILE_PATH, + 'path': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id, } ) # Can we find the new object? - result = ContentCredential.exists( + result = target_sat.cli.ContentCredential.exists( {'organization-id': module_org.id}, (search_key, gpg_key[search_key]), ) @@ -203,7 +192,7 @@ def test_positive_create_with_custom_org(name, module_org): @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then try to create new one with same name @@ -214,19 +203,23 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - gpg_key = make_content_credential({'name': name, 'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential( + {'name': name, 'organization-id': module_org.id} + ) # Can we find the new object? - result = ContentCredential.exists( + result = target_sat.cli.ContentCredential.exists( {'organization-id': module_org.id}, (search_key, gpg_key[search_key]) ) assert gpg_key[search_key] == result[search_key] with pytest.raises(CLIFactoryError): - make_content_credential({'name': name, 'organization-id': module_org.id}) + target_sat.cli_factory.make_content_credential( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_negative_create_with_no_gpg_key(name, module_org): +def test_negative_create_with_no_gpg_key(name, target_sat, module_org): """Create gpg key with valid name and no gpg key :id: bbfd5306-cfe7-40c1-a3a2-35834108163c @@ -238,12 +231,12 @@ def test_negative_create_with_no_gpg_key(name, module_org): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentCredential.create({'name': name, 'organization-id': module_org.id}) + target_sat.cli.ContentCredential.create({'name': name, 'organization-id': module_org.id}) @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(name, module_org): +def test_negative_create_with_invalid_name(target_sat, name, module_org): """Create gpg key with invalid name and valid gpg key via file import @@ -257,13 +250,15 @@ def test_negative_create_with_invalid_name(name, module_org): """ with pytest.raises(CLIFactoryError): # factory will provide a valid key - make_content_credential({'name': name, 'organization-id': module_org.id}) + target_sat.cli_factory.make_content_credential( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete(name, module_org): +def test_positive_delete(target_sat, name, module_org): """Create gpg key with valid name and valid gpg key via file import then delete it @@ -275,14 +270,16 @@ def test_positive_delete(name, module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential({'name': name, 'organization-id': module_org.id}) - result = ContentCredential.exists( + gpg_key = target_sat.cli_factory.make_content_credential( + {'name': name, 'organization-id': module_org.id} + ) + result = target_sat.cli.ContentCredential.exists( {'organization-id': module_org.id}, (search_key, gpg_key[search_key]), ) assert gpg_key[search_key] == result[search_key] - ContentCredential.delete({'name': name, 'organization-id': module_org.id}) - result = ContentCredential.exists( + target_sat.cli.ContentCredential.delete({'name': name, 'organization-id': module_org.id}) + result = target_sat.cli.ContentCredential.exists( {'organization-id': module_org.id}, (search_key, gpg_key[search_key]), ) @@ -291,7 +288,7 @@ def test_positive_delete(name, module_org): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(new_name, module_org): +def test_positive_update_name(target_sat, new_name, module_org): """Create gpg key with valid name and valid gpg key via file import then update its name @@ -303,15 +300,17 @@ def test_positive_update_name(new_name, module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential({'organization-id': module_org.id}) - ContentCredential.update( + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + target_sat.cli.ContentCredential.update( { 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id, } ) - gpg_key = ContentCredential.info({'name': new_name, 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'name': new_name, 'organization-id': module_org.id} + ) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @@ -328,23 +327,25 @@ def test_positive_update_key(name, module_org, target_sat): :CaseImportance: Critical """ - gpg_key = make_content_credential({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) content = gen_alphanumeric(gen_integer(20, 50)) assert gpg_key['content'] != content local_key = create_gpg_key_file(content) assert gpg_key, 'GPG Key file must be created' key = f'/tmp/{gen_alphanumeric()}' target_sat.put(local_key, key) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'path': key, 'name': gpg_key['name'], 'organization-id': module_org.id} ) - gpg_key = ContentCredential.info({'name': gpg_key['name'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'name': gpg_key['name'], 'organization-id': module_org.id} + ) assert gpg_key['content'] == content @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(new_name, module_org): +def test_negative_update_name(target_sat, new_name, module_org): """Create gpg key with valid name and valid gpg key via file import then fail to update its name @@ -356,9 +357,9 @@ def test_negative_update_name(new_name, module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError): - ContentCredential.update( + target_sat.cli.ContentCredential.update( { 'name': gpg_key['name'], 'new-name': new_name, @@ -368,23 +369,23 @@ def test_negative_update_name(new_name, module_org): @pytest.mark.tier2 -def test_positive_add_empty_product(module_org): +def test_positive_add_empty_product(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product :id: 61c700db-43ab-4b8c-8527-f4cfc085afaa :expectedresults: gpg key is associated with product - - :CaseLevel: Integration """ - gpg_key = make_content_credential({'organization-id': module_org.id}) - product = make_product({'gpg-key-id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product( + {'gpg-key-id': gpg_key['id'], 'organization-id': module_org.id} + ) assert product['gpg']['gpg-key'] == gpg_key['name'] @pytest.mark.tier2 -def test_positive_add_product_with_repo(module_org): +def test_positive_add_product_with_repo(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository @@ -392,23 +393,23 @@ def test_positive_add_product_with_repo(module_org): :expectedresults: gpg key is associated with product as well as with the repository - - :CaseLevel: Integration """ - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - gpg_key = make_content_credential({'organization-id': module_org.id}) - Product.update( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository( + {'product-id': product['id'], 'content-type': 'yum'} + ) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + target_sat.cli.Product.update( {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key-id'] == gpg_key['id'] assert repo['gpg-key']['id'] == gpg_key['id'] @pytest.mark.tier2 -def test_positive_add_product_with_repos(module_org): +def test_positive_add_product_with_repos(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository @@ -417,24 +418,25 @@ def test_positive_add_product_with_repos(module_org): :expectedresults: gpg key is associated with product as well as with the repositories - - :CaseLevel: Integration """ - product = make_product({'organization-id': module_org.id}) - repos = [make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5))] - gpg_key = make_content_credential({'organization-id': module_org.id}) - Product.update( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repos = [ + target_sat.cli_factory.make_repository({'product-id': product['id'], 'content-type': 'yum'}) + for _ in range(gen_integer(2, 5)) + ] + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + target_sat.cli.Product.update( {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key-id'] == gpg_key['id'] for repo in repos: - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id'] @pytest.mark.tier2 -def test_positive_add_repo_from_product_with_repo(module_org): +def test_positive_add_repo_from_product_with_repo(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @@ -443,23 +445,23 @@ def test_positive_add_repo_from_product_with_repo(module_org): :expectedresults: gpg key is associated with the repository but not with the product - - :CaseLevel: Integration """ - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - gpg_key = make_content_credential({'organization-id': module_org.id}) - Repository.update( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository( + {'product-id': product['id'], 'content-type': 'yum'} + ) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + target_sat.cli.Repository.update( {'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': module_org.id} ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id'] assert product['gpg'].get('gpg-key-id') != gpg_key['id'] @pytest.mark.tier2 -def test_positive_add_repo_from_product_with_repos(module_org): +def test_positive_add_repo_from_product_with_repos(target_sat, module_org): """Create gpg key via file import and associate with custom repo GPGKey should contain valid name and valid key and should be associated @@ -469,28 +471,29 @@ def test_positive_add_repo_from_product_with_repos(module_org): :id: e3019a61-ec32-4044-9087-e420b8db4e09 :expectedresults: gpg key is associated with the repository - - :CaseLevel: Integration """ - product = make_product({'organization-id': module_org.id}) - repos = [make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5))] - gpg_key = make_content_credential({'organization-id': module_org.id}) - Repository.update( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repos = [ + target_sat.cli_factory.make_repository({'product-id': product['id'], 'content-type': 'yum'}) + for _ in range(gen_integer(2, 5)) + ] + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + target_sat.cli.Repository.update( {'gpg-key-id': gpg_key['id'], 'id': repos[0]['id'], 'organization-id': module_org.id} ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg'].get('gpg-key-id') != gpg_key['id'] # First repo should have a valid gpg key assigned - repo = Repository.info({'id': repos.pop(0)['id']}) + repo = target_sat.cli.Repository.info({'id': repos.pop(0)['id']}) assert repo['gpg-key']['id'] == gpg_key['id'] # The rest of repos should not for repo in repos: - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('id') != gpg_key['id'] @pytest.mark.tier2 -def test_positive_update_key_for_empty_product(module_org): +def test_positive_update_key_for_empty_product(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product then update the key @@ -499,34 +502,34 @@ def test_positive_update_key_for_empty_product(module_org): :expectedresults: gpg key is associated with product before/after update - - :CaseLevel: Integration """ # Create a product and a gpg key - product = make_product({'organization-id': module_org.id}) - gpg_key = make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) # Associate gpg key with a product - Product.update( + target_sat.cli.Product.update( {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} ) # Verify gpg key was associated - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) # Verify changes are reflected in the gpg key - gpg_key = ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'id': gpg_key['id'], 'organization-id': module_org.id} + ) assert gpg_key['name'] == new_name # Verify changes are reflected in the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] == new_name @pytest.mark.tier2 -def test_positive_update_key_for_product_with_repo(module_org): +def test_positive_update_key_for_product_with_repo(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then update the key @@ -535,41 +538,43 @@ def test_positive_update_key_for_product_with_repo(module_org): :expectedresults: gpg key is associated with product before/after update as well as with the repository - - :CaseLevel: Integration """ # Create a product and a gpg key - product = make_product({'organization-id': module_org.id}) - gpg_key = make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) # Create a repository and assign it to the product - repo = make_repository({'product-id': product['id']}) + repo = target_sat.cli_factory.make_repository( + {'product-id': product['id'], 'content-type': 'yum'} + ) # Associate gpg key with a product - Product.update( + target_sat.cli.Product.update( {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} ) # Verify gpg key was associated - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key'] == gpg_key['name'] assert repo['gpg-key'].get('name') == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) # Verify changes are reflected in the gpg key - gpg_key = ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'id': gpg_key['id'], 'organization-id': module_org.id} + ) assert gpg_key['name'] == new_name # Verify changes are reflected in the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] == new_name # Verify changes are reflected in the repository - repo = Repository.info({'id': repo['id']}) - assert repo['gpg-key'].get('id') == gpg_key['id'] + repo = target_sat.cli.Repository.info({'id': repo['id']}) + assert repo['gpg-key'].get('name') == new_name @pytest.mark.tier2 -def test_positive_update_key_for_product_with_repos(module_org): +def test_positive_update_key_for_product_with_repos(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key @@ -578,43 +583,46 @@ def test_positive_update_key_for_product_with_repos(module_org): :expectedresults: gpg key is associated with product before/after update as well as with the repositories - - :CaseLevel: Integration """ # Create a product and a gpg key - product = make_product({'organization-id': module_org.id}) - gpg_key = make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) # Create repositories and assign them to the product - repos = [make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5))] + repos = [ + target_sat.cli_factory.make_repository({'product-id': product['id'], 'content-type': 'yum'}) + for _ in range(gen_integer(2, 5)) + ] # Associate gpg key with a product - Product.update( + target_sat.cli.Product.update( {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} ) # Verify gpg key was associated - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] == gpg_key['name'] for repo in repos: - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) # Verify changes are reflected in the gpg key - gpg_key = ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'id': gpg_key['id'], 'organization-id': module_org.id} + ) assert gpg_key['name'] == new_name # Verify changes are reflected in the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] == new_name # Verify changes are reflected in the repositories for repo in repos: - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == new_name @pytest.mark.tier2 -def test_positive_update_key_for_repo_from_product_with_repo(module_org): +def test_positive_update_key_for_repo_from_product_with_repo(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository then update the key @@ -623,34 +631,36 @@ def test_positive_update_key_for_repo_from_product_with_repo(module_org): :expectedresults: gpg key is associated with the repository before/after update, but not with the product - - :CaseLevel: Integration """ # Create a product and a gpg key - product = make_product({'organization-id': module_org.id}) - gpg_key = make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) # Create repository, assign product and gpg-key - repo = make_repository({'gpg-key-id': gpg_key['id'], 'product-id': product['id']}) + repo = target_sat.cli_factory.make_repository( + {'gpg-key-id': gpg_key['id'], 'product-id': product['id'], 'content-type': 'yum'} + ) # Verify gpg key was associated assert repo['gpg-key'].get('name') == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) # Verify changes are reflected in the gpg key - gpg_key = ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'id': gpg_key['id'], 'organization-id': module_org.id} + ) assert gpg_key['name'] == new_name # Verify changes are reflected in the repositories - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == new_name # Verify gpg key wasn't added to the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] != new_name @pytest.mark.tier2 -def test_positive_update_key_for_repo_from_product_with_repos(module_org): +def test_positive_update_key_for_repo_from_product_with_repos(target_sat, module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key @@ -660,232 +670,46 @@ def test_positive_update_key_for_repo_from_product_with_repos(module_org): :expectedresults: gpg key is associated with a single repository before/after update and not associated with product or other repositories - - :CaseLevel: Integration """ # Create a product and a gpg key - product = make_product({'organization-id': module_org.id}) - gpg_key = make_content_credential({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) # Create repositories and assign them to the product - repos = [make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5))] + repos = [ + target_sat.cli_factory.make_repository({'product-id': product['id'], 'content-type': 'yum'}) + for _ in range(gen_integer(2, 5)) + ] # Associate gpg key with a single repository - Repository.update( + target_sat.cli.Repository.update( {'gpg-key-id': gpg_key['id'], 'id': repos[0]['id'], 'organization-id': module_org.id} ) # Verify gpg key was associated - repos[0] = Repository.info({'id': repos[0]['id']}) + repos[0] = target_sat.cli.Repository.info({'id': repos[0]['id']}) assert repos[0]['gpg-key']['name'] == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) - ContentCredential.update( + target_sat.cli.ContentCredential.update( {'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) # Verify changes are reflected in the gpg key - gpg_key = ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) + gpg_key = target_sat.cli.ContentCredential.info( + {'id': gpg_key['id'], 'organization-id': module_org.id} + ) assert gpg_key['name'] == new_name # Verify changes are reflected in the associated repository - repos[0] = Repository.info({'id': repos[0]['id']}) + repos[0] = target_sat.cli.Repository.info({'id': repos[0]['id']}) assert repos[0]['gpg-key'].get('name') == new_name # Verify changes are not reflected in the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['gpg']['gpg-key'] != new_name # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') != new_name -@pytest.mark.tier2 -def test_positive_delete_key_for_empty_product(module_org): - """Create gpg key with valid name and valid gpg key via file - import then associate it with empty (no repos) custom product - then delete it - - :id: 238a80f8-983a-4fd5-a168-6ef9442e2b1c - - :expectedresults: gpg key is associated with product during creation - but removed from product after deletion - - :CaseLevel: Integration - """ - # Create a product and a gpg key - gpg_key = make_content_credential({'organization-id': module_org.id}) - product = make_product({'gpg-key-id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key was associated - assert product['gpg']['gpg-key'] == gpg_key['name'] - # Delete the gpg key - ContentCredential.delete({'name': gpg_key['name'], 'organization-id': module_org.id}) - # Verify gpg key was actually deleted - with pytest.raises(CLIReturnCodeError): - ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key was disassociated from the product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - assert product['gpg']['gpg-key'] != gpg_key['name'] - - -@pytest.mark.tier2 -@pytest.mark.upgrade -def test_positive_delete_key_for_product_with_repo(module_org): - """Create gpg key with valid name and valid gpg key via file - import then associate it with custom product that has one repository - then delete it - - :id: 1e98e588-8b5d-475c-ad84-5d566df5619c - - :expectedresults: gpg key is associated with product but and its - repository during creation but removed from product and repository - after deletion - - :CaseLevel: Integration - """ - # Create product, repository and gpg key - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - gpg_key = make_content_credential({'organization-id': module_org.id}) - # Associate gpg key with a product - Product.update( - {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} - ) - # Verify gpg key was associated both with product and its repository - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) - assert product['gpg']['gpg-key'] == gpg_key['name'] - assert repo['gpg-key'].get('name') == gpg_key['name'] - # Delete the gpg key - ContentCredential.delete({'name': gpg_key['name'], 'organization-id': module_org.id}) - # Verify gpg key was actually deleted - with pytest.raises(CLIReturnCodeError): - ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key was disassociated from the product and its repository - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) - assert product['gpg']['gpg-key'] != gpg_key['name'] - assert repo['gpg-key'].get('name') != gpg_key['name'] - - -@pytest.mark.tier2 -def test_positive_delete_key_for_product_with_repos(module_org): - """Create gpg key with valid name and valid gpg key via file - import then associate it with custom product that has more than one - repository then delete it - - :id: 3848441f-746a-424c-afc3-4d5a15888af8 - - :expectedresults: gpg key is associated with product and its - repositories during creation but removed from the product and the - repositories after deletion - - :CaseLevel: Integration - """ - # Create product, repositories and gpg key - product = make_product({'organization-id': module_org.id}) - repos = [make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5))] - gpg_key = make_content_credential({'organization-id': module_org.id}) - # Associate gpg key with a product - Product.update( - {'gpg-key-id': gpg_key['id'], 'id': product['id'], 'organization-id': module_org.id} - ) - # Verify gpg key was associated with product and its repositories - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - assert product['gpg']['gpg-key'] == gpg_key['name'] - for repo in repos: - repo = Repository.info({'id': repo['id']}) - assert repo['gpg-key'].get('name') == gpg_key['name'] - # Delete the gpg key - ContentCredential.delete({'name': gpg_key['name'], 'organization-id': module_org.id}) - # Verify gpg key was actually deleted - with pytest.raises(CLIReturnCodeError): - ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key was disassociated from the product and its - # repositories - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - assert product['gpg']['gpg-key'] != gpg_key['name'] - for repo in repos: - repo = Repository.info({'id': repo['id']}) - assert repo['gpg-key'].get('name') != gpg_key['name'] - - -@pytest.mark.tier2 -def test_positive_delete_key_for_repo_from_product_with_repo(module_org): - """Create gpg key with valid name and valid gpg key via file - import then associate it to repository from custom product that has - one repository then delete the key - - :id: 2555b08f-8cee-4e84-8f4d-9b46743f5758 - - :expectedresults: gpg key is associated with the single repository but - not the product during creation and was removed from repository - after deletion - - :CaseLevel: Integration - """ - # Create product, repository and gpg key - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - gpg_key = make_content_credential({'organization-id': module_org.id}) - # Associate gpg key with a repository - Repository.update( - {'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': module_org.id} - ) - # Verify gpg key was associated with the repository but not with the - # product - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - repo = Repository.info({'id': repo['id']}) - assert product['gpg']['gpg-key'] != gpg_key['name'] - assert repo['gpg-key'].get('name') == gpg_key['name'] - # Delete the gpg key - ContentCredential.delete({'name': gpg_key['name'], 'organization-id': module_org.id}) - # Verify gpg key was actually deleted - with pytest.raises(CLIReturnCodeError): - ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key was disassociated from the repository - repo = Repository.info({'id': repo['id']}) - assert repo['gpg-key'].get('name') != gpg_key['name'] - - -@pytest.mark.tier2 -def test_positive_delete_key_for_repo_from_product_with_repos(module_org): - """Create gpg key with valid name and valid gpg key via file - import then associate it to repository from custom product that has - more than one repository then delete the key - - :id: 7d6a278b-1063-4e72-bc32-ca60bd17bb84 - - :expectedresults: gpg key is associated with a single repository but - not the product during creation and removed from repository after - deletion - - :CaseLevel: Integration - """ - # Create product, repositories and gpg key - product = make_product({'organization-id': module_org.id}) - repos = [] - for _ in range(gen_integer(2, 5)): - repos.append(make_repository({'product-id': product['id']})) - gpg_key = make_content_credential({'organization-id': module_org.id}) - # Associate gpg key with a repository - Repository.update( - {'gpg-key-id': gpg_key['id'], 'id': repos[0]['id'], 'organization-id': module_org.id} - ) - # Verify gpg key was associated with the repository - repos[0] = Repository.info({'id': repos[0]['id']}) - assert repos[0]['gpg-key']['name'] == gpg_key['name'] - # Delete the gpg key - ContentCredential.delete({'name': gpg_key['name'], 'organization-id': module_org.id}) - # Verify gpg key was actually deleted - with pytest.raises(CLIReturnCodeError): - ContentCredential.info({'id': gpg_key['id'], 'organization-id': module_org.id}) - # Verify gpg key is not associated with any repository or the product - # itself - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) - assert product['gpg']['gpg-key'] != gpg_key['name'] - for repo in repos: - repo = Repository.info({'id': repo['id']}) - assert repo['gpg-key'].get('name') != gpg_key['name'] - - @pytest.mark.tier1 -def test_positive_list(module_org): +def test_positive_list(module_target_sat, module_org): """Create gpg key and list it :id: ca69e23b-ca96-43dd-89a6-55b0e4ea322d @@ -894,16 +718,21 @@ def test_positive_list(module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential( - {'key': VALID_GPG_KEY_FILE_PATH, 'organization-id': module_org.id} + + gpg_key = module_target_sat.cli_factory.make_content_credential( + {'path': VALID_GPG_KEY_FILE_PATH, 'organization-id': module_org.id} ) - gpg_keys_list = ContentCredential.list({'organization-id': module_org.id}) - assert gpg_key['id'] in [gpg['id'] for gpg in gpg_keys_list] + + gpg_key_list = module_target_sat.cli.ContentCredential.list( + {'organization-id': module_org.id, 'name': gpg_key['name']} + ) + + assert gpg_key['id'] in [gpg['id'] for gpg in gpg_key_list] @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_search(name, module_org): +def test_positive_search(target_sat, name, module_org): """Create gpg key and search for it :id: f72648f1-b468-4662-9653-3464e7d0c349 @@ -914,15 +743,15 @@ def test_positive_search(name, module_org): :CaseImportance: Critical """ - gpg_key = make_content_credential( + gpg_key = target_sat.cli_factory.make_content_credential( { - 'key': VALID_GPG_KEY_FILE_PATH, + 'path': VALID_GPG_KEY_FILE_PATH, 'name': name, 'organization-id': module_org.id, } ) # Can we find the new object? - result = ContentCredential.exists( + result = target_sat.cli.ContentCredential.exists( {'organization-id': module_org.id}, search=('name', gpg_key['name']) ) assert gpg_key['name'] == result['name'] diff --git a/tests/foreman/cli/test_contentview.py b/tests/foreman/cli/test_contentview.py index 78217af34a5..9b261f57218 100644 --- a/tests/foreman/cli/test_contentview.py +++ b/tests/foreman/cli/test_contentview.py @@ -4,55 +4,38 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string +from fauxfactory import gen_alphanumeric, gen_string from nailgun import entities +import pytest from wrapanapi.entities.vm import VmState from robottelo import constants -from robottelo.cli import factory as cli_factory -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.capsule import Capsule -from robottelo.cli.contentview import ContentView -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.location import Location -from robottelo.cli.module_stream import ModuleStream -from robottelo.cli.org import Org -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.role import Role -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE_NAME -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_names_list +from robottelo.constants import ( + FAKE_2_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE_NAME, + DataFile, +) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + generate_strings_list, + invalid_names_list, + parametrized, + valid_names_list, +) @pytest.fixture(scope='module') -def module_rhel_content(module_entitlement_manifest_org): +def module_rhel_content(module_entitlement_manifest_org, module_target_sat): """Returns RH repo after syncing it""" product = entities.Product( name=constants.PRDS['rhel'], organization=module_entitlement_manifest_org @@ -61,14 +44,14 @@ def module_rhel_content(module_entitlement_manifest_org): data = {'basearch': 'x86_64', 'releasever': '6Server', 'product_id': product.id} reposet.enable(data=data) - repo = Repository.info( + repo = module_target_sat.cli.Repository.info( { 'name': constants.REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, 'product': product.name, } ) - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': constants.REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, @@ -78,12 +61,12 @@ def module_rhel_content(module_entitlement_manifest_org): return repo -def _get_content_view_version_lce_names_set(content_view_id, version_id): +def _get_content_view_version_lce_names_set(content_view_id, version_id, sat): """returns a set of content view version lifecycle environment names :rtype: set that it belongs under """ - lifecycle_environments = ContentView.version_info( + lifecycle_environments = sat.cli.ContentView.version_info( {'content-view-id': content_view_id, 'id': version_id} )['lifecycle-environments'] return {lce['name'] for lce in lifecycle_environments} @@ -94,7 +77,7 @@ class TestContentView: @pytest.mark.parametrize('name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_create_with_name(self, module_org, name): + def test_positive_create_with_name(self, module_org, module_target_sat, name): """create content views with different names :id: a154308c-3982-4cf1-a236-3051e740970e @@ -105,14 +88,14 @@ def test_positive_create_with_name(self, module_org, name): :parametrized: yes """ - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'name': name, 'organization-id': module_org.id} ) assert content_view['name'] == name.strip() @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, module_org, name): + def test_negative_create_with_invalid_name(self, module_org, module_target_sat, name): """create content views with invalid names :id: 83046271-76f9-4cda-b579-a2fe63493295 @@ -124,11 +107,13 @@ def test_negative_create_with_invalid_name(self, module_org, name): :parametrized: yes """ - with pytest.raises(cli_factory.CLIFactoryError): - cli_factory.make_content_view({'name': name, 'organization-id': module_org.id}) + with pytest.raises(CLIFactoryError): + module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.tier1 - def test_negative_create_with_org_name(self): + def test_negative_create_with_org_name(self, module_target_sat): """Create content view with invalid org name :id: f8b76e98-ccc8-41ac-af04-541650e8f5ba @@ -139,10 +124,10 @@ def test_negative_create_with_org_name(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.create({'organization-id': gen_string('alpha')}) + module_target_sat.cli.ContentView.create({'organization-id': gen_string('alpha')}) @pytest.mark.tier2 - def test_positive_create_with_repo_id(self, module_org, module_product): + def test_positive_create_with_repo_id(self, module_org, module_product, module_target_sat): """Create content view providing repository id :id: bb91affe-f8d4-4724-8b61-41f3cb898fd3 @@ -153,15 +138,17 @@ def test_positive_create_with_repo_id(self, module_org, module_product): :CaseImportance: High :BZ: 1213097 """ - repo = cli_factory.make_repository({'product-id': module_product.id}) - cv = cli_factory.make_content_view( + repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) + cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': [repo['id']]} ) assert cv['yum-repositories'][0]['id'] == repo['id'] @pytest.mark.parametrize('new_name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_update_name_by_id(self, module_org, new_name): + def test_positive_update_name_by_id(self, module_org, module_target_sat, new_name): """Find content view by its id and update its name afterwards :id: 35fccf2c-abc4-4ca8-a565-a7a6adaaf429 @@ -174,16 +161,16 @@ def test_positive_update_name_by_id(self, module_org, new_name): :parametrized: yes """ - cv = cli_factory.make_content_view( + cv = module_target_sat.cli_factory.make_content_view( {'name': gen_string('utf8'), 'organization-id': module_org.id} ) - ContentView.update({'id': cv['id'], 'new-name': new_name}) - cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.update({'id': cv['id'], 'new-name': new_name}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name.strip() @pytest.mark.parametrize('new_name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_update_name_by_name(self, module_org, new_name): + def test_positive_update_name_by_name(self, module_org, module_target_sat, new_name): """Find content view by its name and update it :id: aa9bced6-ee6c-4a18-90ac-874ab4979711 @@ -196,16 +183,16 @@ def test_positive_update_name_by_name(self, module_org, new_name): :parametrized: yes """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.update( + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.update( {'name': cv['name'], 'organization-label': module_org.label, 'new-name': new_name} ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name.strip() @pytest.mark.run_in_one_thread @pytest.mark.tier2 - def test_positive_update_filter(self, repo_setup): + def test_positive_update_filter(self, repo_setup, module_target_sat): """Edit content views for a rh content, add a filter and update filter :id: 4beab1e4-fc58-460e-af24-cdd2c3d283e6 @@ -213,43 +200,43 @@ def test_positive_update_filter(self, repo_setup): :expectedresults: Edited content view save is successful and info is updated - :CaseLevel: Integration - :CaseImportance: High """ # Create CV - new_cv = cli_factory.make_content_view({'organization-id': repo_setup['org'].id}) + new_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': repo_setup['org'].id} + ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': repo_setup['org'].id, 'repository-id': repo_setup['repo'].id, } ) - Repository.synchronize({'id': repo_setup['repo'].id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_setup['repo'].id}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == repo_setup['repo'].name - cvf = cli_factory.make_content_view_filter( + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'true', 'type': 'erratum'} ) - cvf_rule = cli_factory.make_content_view_filter_rule( + cvf_rule = module_target_sat.cli_factory.content_view_filter_rule( {'content-view-filter-id': cvf['filter-id'], 'types': ['bugfix', 'enhancement']} ) - cvf = ContentView.filter.info({'id': cvf['filter-id']}) + cvf = module_target_sat.cli.ContentView.filter.info({'id': cvf['filter-id']}) assert 'security' not in cvf['rules'][0]['types'] - ContentView.filter.rule.update( + module_target_sat.cli.ContentView.filter.rule.update( { 'id': cvf_rule['rule-id'], 'types': 'security', 'content-view-filter-id': cvf['filter-id'], } ) - cvf = ContentView.filter.info({'id': cvf['filter-id']}) - assert 'security' == cvf['rules'][0]['types'] + cvf = module_target_sat.cli.ContentView.filter.info({'id': cvf['filter-id']}) + assert cvf['rules'][0]['types'] == 'security' @pytest.mark.tier1 - def test_positive_delete_by_id(self, module_org): + def test_positive_delete_by_id(self, module_org, module_target_sat): """delete content view by its id :id: e96d6d47-8be4-4705-979f-e5c320eca293 @@ -258,13 +245,13 @@ def test_positive_delete_by_id(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.delete({'id': cv['id']}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.delete({'id': cv['id']}) with pytest.raises(CLIReturnCodeError): - ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.info({'id': cv['id']}) @pytest.mark.tier1 - def test_positive_delete_by_name(self, module_org): + def test_positive_delete_by_name(self, module_org, module_target_sat): """delete content view by its name :id: 014b85f3-003b-42d9-bbfe-21620e8eb84b @@ -275,13 +262,15 @@ def test_positive_delete_by_name(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.delete({'name': cv['name'], 'organization': module_org.name}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.delete( + {'name': cv['name'], 'organization': module_org.name} + ) with pytest.raises(CLIReturnCodeError): - ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.info({'id': cv['id']}) @pytest.mark.tier2 - def test_positive_delete_version_by_name(self, module_org): + def test_positive_delete_version_by_name(self, module_org, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'remove-from-environment' command and delete content view version from @@ -293,30 +282,31 @@ def test_positive_delete_version_by_name(self, module_org): :CaseImportance: High - :CaseLevel: Integration """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cvv = content_view['versions'][0] env_id = content_view['lifecycle-environments'][0]['id'] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': content_view['id'], 'lifecycle-environment-id': env_id} ) - ContentView.version_delete( + module_target_sat.cli.ContentView.version_delete( { 'content-view': content_view['name'], 'organization': module_org.name, 'version': cvv['version'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 0 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_delete_version_by_id(self, module_org, module_product): + def test_positive_delete_version_by_id(self, module_org, module_product, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'remove-from-environment' command and delete content view version from @@ -330,12 +320,14 @@ def test_positive_delete_version_by_id(self, module_org, module_product): :CaseImportance: High """ # Create new organization, product and repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -343,25 +335,25 @@ def test_positive_delete_version_by_id(self, module_org, module_product): } ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Store the associated environment_id env_id = new_cv['lifecycle-environments'][0]['id'] # Store the version1 id version1_id = new_cv['versions'][0]['id'] # Remove the CV from selected environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env_id} ) # Delete the version - ContentView.version_delete({'id': version1_id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_delete({'id': version1_id}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier2 - def test_negative_delete_version_by_id(self, module_org): + def test_negative_delete_version_by_id(self, module_org, module_target_sat): """Create content view and publish it. Try to delete content view version while content view is still associated with lifecycle environment @@ -372,22 +364,23 @@ def test_negative_delete_version_by_id(self, module_org): :CaseImportance: Critical - :CaseLevel: Integration """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cv = content_view['versions'][0] # Try to delete content view version while it is in environment Library with pytest.raises(CLIReturnCodeError): - ContentView.version_delete({'id': cv['id']}) + module_target_sat.cli.ContentView.version_delete({'id': cv['id']}) # Check that version was not actually removed from the cv - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 @pytest.mark.tier1 - def test_positive_remove_lce_by_id(self, module_org): + def test_positive_remove_lce_by_id(self, module_org, module_target_sat): """Remove content view from lifecycle environment :id: 1bf8a647-d82e-4145-b13b-f92bf6642532 @@ -396,16 +389,16 @@ def test_positive_remove_lce_by_id(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) env = new_cv['lifecycle-environments'][0] - ContentView.remove({'id': new_cv['id'], 'environment-ids': env['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove({'id': new_cv['id'], 'environment-ids': env['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['lifecycle-environments']) == 0 @pytest.mark.tier3 - def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): + def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org, module_target_sat): """Remove content view environment and re-assign activation key to another environment and content view @@ -415,26 +408,35 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): :CaseImportance: Medium - :CaseLevel: Integration """ env = [ - cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(2) ] - source_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': source_cv['id']}) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) cvv = source_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']} + ) - destination_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': destination_cv['id']}) - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) cvv = destination_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']} + ) - ac_key = cli_factory.make_activation_key( + ac_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': source_cv['id'], 'lifecycle-environment-id': env[0]['id'], @@ -442,12 +444,12 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): 'organization-id': module_org.id, } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['activation-keys'][0] == ac_key['name'] - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert len(destination_cv['activation-keys']) == 0 - ContentView.remove( + module_target_sat.cli.ContentView.remove( { 'id': source_cv['id'], 'environment-ids': env[0]['id'], @@ -455,14 +457,14 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): 'key-environment-id': env[1]['id'], } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert len(source_cv['activation-keys']) == 0 - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['activation-keys'][0] == ac_key['name'] @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): + def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org, module_target_sat): """Remove content view environment and re-assign content host to another environment and content view @@ -472,29 +474,38 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): :CaseImportance: Low - :CaseLevel: Integration """ env = [ - cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(2) ] - source_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': source_cv['id']}) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) cvv = source_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']} + ) - destination_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': destination_cv['id']}) - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) cvv = destination_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']} + ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': source_cv['id'], 'lifecycle-environment-id': env[0]['id'], @@ -503,12 +514,12 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '1' - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['content-host-count'] == '0' - ContentView.remove( + module_target_sat.cli.ContentView.remove( { 'environment-ids': env[0]['id'], 'id': source_cv['id'], @@ -516,13 +527,13 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): 'system-environment-id': env[1]['id'], } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '0' - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['content-host-count'] == '1' @pytest.mark.tier1 - def test_positive_remove_version_by_id(self, module_org): + def test_positive_remove_version_by_id(self, module_org, module_target_sat): """Delete content view version using 'remove' command by id :id: e8664353-6601-4566-8478-440be20a089d @@ -531,22 +542,24 @@ def test_positive_remove_version_by_id(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-version-ids': cvv['id'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-version-ids': cvv['id'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier1 - def test_positive_remove_version_by_name(self, module_org): + def test_positive_remove_version_by_name(self, module_org, module_target_sat): """Delete content view version using 'remove' command by name :id: 2c838716-dcd3-4017-bffc-da53727c22a3 @@ -557,21 +570,23 @@ def test_positive_remove_version_by_name(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-versions': cvv['version'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-versions': cvv['version'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier1 - def test_positive_remove_repository_by_id(self, module_org, module_product): + def test_positive_remove_repository_by_id(self, module_org, module_product, module_target_sat): """Remove associated repository from content view by id :id: 90703181-b3f8-44f6-959a-b65c79b6b6ee @@ -582,20 +597,28 @@ def test_positive_remove_repository_by_id(self, module_org, module_product): :CaseImportance: Critical """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Remove repository from CV - ContentView.remove_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 @pytest.mark.tier1 - def test_positive_remove_repository_by_name(self, module_org, module_product): + def test_positive_remove_repository_by_name( + self, module_org, module_product, module_target_sat + ): """Remove associated repository from content view by name :id: dc952fe7-eb89-4760-889b-6a3fa17c3e75 @@ -606,20 +629,26 @@ def test_positive_remove_repository_by_name(self, module_org, module_product): :CaseImportance: Critical """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Remove repository from CV - ContentView.remove_repository({'id': new_cv['id'], 'repository': new_repo['name']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository': new_repo['name']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 @pytest.mark.tier2 - def test_positive_create_composite(self, module_org): + def test_positive_create_composite(self, module_org, module_target_sat): """create a composite content view :id: bded6acd-8da3-45ea-9e39-19bdc6c06341 @@ -628,36 +657,40 @@ def test_positive_create_composite(self, module_org): :expectedresults: Composite content views are created - :CaseLevel: Integration - :CaseImportance: High """ # Create REPO - new_product = cli_factory.make_product({'organization-id': module_org.id}) - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Create CV - con_view = cli_factory.make_content_view( + con_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': con_view['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': con_view['id']} + ) # Assert whether version was associated to composite CV - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) assert con_view['components'][0]['id'] == version1_id @pytest.mark.tier2 - def test_positive_create_composite_by_name(self, module_org): + def test_positive_create_composite_by_name(self, module_org, module_target_sat): """Create a composite content view and add non-composite content view by its name @@ -668,28 +701,32 @@ def test_positive_create_composite_by_name(self, module_org): :BZ: 1416857 - :CaseLevel: Integration - :CaseImportance: High """ - new_product = cli_factory.make_product({'organization-id': module_org.id}) + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) # Create REPO - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] # Create CV - cv = cli_factory.make_content_view({'composite': True, 'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) assert len(cv['components']) == 0 # Associate version to composite CV - ContentView.add_version( + module_target_sat.cli.ContentView.add_version( { 'content-view-version': cvv['version'], 'content-view': new_cv['name'], @@ -698,12 +735,14 @@ def test_positive_create_composite_by_name(self, module_org): } ) # Assert whether version was associated to composite CV - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['components']) == 1 assert cv['components'][0]['id'] == cvv['id'] @pytest.mark.tier2 - def test_positive_remove_version_by_id_from_composite(self, module_org, module_product): + def test_positive_remove_version_by_id_from_composite( + self, module_org, module_product, module_target_sat + ): """Create a composite content view and remove its content version by id :id: 0ff675d0-45d6-4f15-9e84-3b5ce98ce7de @@ -711,17 +750,17 @@ def test_positive_remove_version_by_id_from_composite(self, module_org, module_p :expectedresults: Composite content view info output does not contain any values - :CaseLevel: Integration - :CaseImportance: High """ # Create new repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -729,30 +768,32 @@ def test_positive_remove_version_by_id_from_composite(self, module_org, module_p } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, 'component-ids': new_cv['versions'][0]['id'], } ) - ContentView.publish({'id': comp_cv['id']}) - new_cv = ContentView.info({'id': comp_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-version-ids': cvv['id'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-version-ids': cvv['id'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier2 - def test_positive_remove_component_by_name(self, module_org, module_product): + def test_positive_remove_component_by_name(self, module_org, module_product, module_target_sat): """Create a composite content view and remove component from it by name :id: 908f9cad-b985-4bae-96c0-037ea1d395a6 @@ -762,17 +803,17 @@ def test_positive_remove_component_by_name(self, module_org, module_product): :BZ: 1416857 - :CaseLevel: Integration - :CaseImportance: High """ # Create new repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -780,11 +821,11 @@ def test_positive_remove_component_by_name(self, module_org, module_product): } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, @@ -792,7 +833,7 @@ def test_positive_remove_component_by_name(self, module_org, module_product): } ) assert len(comp_cv['components']) == 1 - ContentView.remove_version( + module_target_sat.cli.ContentView.remove_version( { 'content-view-version': new_cv['versions'][0]['version'], 'content-view': new_cv['name'], @@ -800,11 +841,11 @@ def test_positive_remove_component_by_name(self, module_org, module_product): 'name': comp_cv['name'], } ) - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert len(comp_cv['components']) == 0 @pytest.mark.tier3 - def test_positive_create_composite_with_component_ids(self, module_org): + def test_positive_create_composite_with_component_ids(self, module_org, module_target_sat): """Create a composite content view with a component_ids option which ids are from different content views @@ -814,34 +855,32 @@ def test_positive_create_composite_with_component_ids(self, module_org): :BZ: 1487265 - :CaseLevel: Integration - :CaseImportance: High """ # Create first CV - cv1 = cli_factory.make_content_view({'organization-id': module_org.id}) + cv1 = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV - ContentView.publish({'id': cv1['id']}) - cv1 = ContentView.info({'id': cv1['id']}) + module_target_sat.cli.ContentView.publish({'id': cv1['id']}) + cv1 = module_target_sat.cli.ContentView.info({'id': cv1['id']}) # Create second CV - cv2 = cli_factory.make_content_view({'organization-id': module_org.id}) + cv2 = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV - ContentView.publish({'id': cv2['id']}) - cv2 = ContentView.info({'id': cv2['id']}) + module_target_sat.cli.ContentView.publish({'id': cv2['id']}) + cv2 = module_target_sat.cli.ContentView.info({'id': cv2['id']}) # Let us now store the version ids component_ids = [cv1['versions'][0]['id'], cv2['versions'][0]['id']] # Create CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id, 'component-ids': component_ids} ) # Assert whether the composite content view components IDs are equal # to the component_ids input values - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert {comp['id'] for comp in comp_cv['components']} == set(component_ids) @pytest.mark.tier3 - def test_negative_create_composite_with_component_ids(self, module_org): + def test_negative_create_composite_with_component_ids(self, module_org, module_target_sat): """Attempt to create a composite content view with a component_ids option which ids are from the same content view @@ -851,21 +890,19 @@ def test_negative_create_composite_with_component_ids(self, module_org): :BZ: 1487265 - :CaseLevel: Integration - :CaseImportance: Low """ # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV twice for _ in range(2): - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version ids component_ids = [version['id'] for version in new_cv['versions']] # Try create CV - with pytest.raises(cli_factory.CLIFactoryError) as context: - cli_factory.make_content_view( + with pytest.raises(CLIFactoryError) as context: + module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, @@ -875,7 +912,7 @@ def test_negative_create_composite_with_component_ids(self, module_org): assert 'Failed to create ContentView with data:' in str(context) @pytest.mark.tier3 - def test_positive_update_composite_with_component_ids(module_org): + def test_positive_update_composite_with_component_ids(module_org, module_target_sat): """Update a composite content view with a component_ids option :id: e6106ff6-c526-40f2-bdc0-ae291f7b267e @@ -883,31 +920,33 @@ def test_positive_update_composite_with_component_ids(module_org): :expectedresults: Composite content view component ids are similar to the nested content view versions ids - :CaseLevel: Integration - :CaseImportance: Low """ # Create a CV to add to the composite one - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of the CV - ContentView.publish({'id': cv['id']}) - new_cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) # Let us now store the version ids component_ids = new_cv['versions'][0]['id'] # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Update a composite content view with a component id version - ContentView.update({'id': comp_cv['id'], 'component-ids': component_ids}) + module_target_sat.cli.ContentView.update( + {'id': comp_cv['id'], 'component-ids': component_ids} + ) # Assert whether the composite content view components IDs are equal # to the component_ids input values - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert comp_cv['components'][0]['id'] == component_ids @pytest.mark.run_in_one_thread @pytest.mark.tier1 - def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, module_rhel_content): + def test_positive_add_rh_repo_by_id( + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat + ): """Associate Red Hat content to a content view :id: b31a85c3-aa56-461b-9e3a-f7754c742573 @@ -916,23 +955,21 @@ def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, modul :expectedresults: RH Content can be seen in the content view - :CaseLevel: Integration - :CaseImportance: Critical """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] @@ -940,7 +977,7 @@ def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, modul @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_add_rh_repo_by_id_and_create_filter( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Associate Red Hat content to a content view and create filter @@ -955,29 +992,27 @@ def test_positive_add_rh_repo_by_id_and_create_filter( :CaseImportance: Low - :CaseLevel: Integration - :BZ: 1359665 """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] name = gen_string('alphanumeric') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( {'content-view-id': new_cv['id'], 'inclusion': 'true', 'name': name, 'type': 'rpm'} ) - ContentView.filter.rule.create( + module_target_sat.cli.ContentView.filter.rule.create( {'content-view-filter': name, 'content-view-id': new_cv['id'], 'name': 'walgrind'} ) @@ -1000,7 +1035,6 @@ def test_positive_add_module_stream_filter_rule(self, module_org, target_sat): :CaseImportance: Low - :CaseLevel: Integration """ filter_name = gen_string('alpha') repo_name = gen_string('alpha') @@ -1013,10 +1047,10 @@ def test_positive_add_module_stream_filter_rule(self, module_org, target_sat): 0 ] content_view = entities.ContentView(organization=module_org.id, repository=[repo]).create() - walrus_stream = ModuleStream.list({'search': "name=walrus, stream=5.21"})[0] - content_view = ContentView.info({'id': content_view.id}) + walrus_stream = target_sat.cli.ModuleStream.list({'search': "name=walrus, stream=5.21"})[0] + content_view = target_sat.cli.ContentView.info({'id': content_view.id}) assert content_view['yum-repositories'][0]['name'] == repo.name - content_view_filter = ContentView.filter.create( + content_view_filter = target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -1024,18 +1058,20 @@ def test_positive_add_module_stream_filter_rule(self, module_org, target_sat): 'type': 'modulemd', } ) - content_view_filter_rule = ContentView.filter.rule.create( + content_view_filter_rule = target_sat.cli.ContentView.filter.rule.create( { 'content-view-filter': filter_name, 'content-view-id': content_view['id'], 'module-stream-ids': walrus_stream['id'], } ) - filter_info = ContentView.filter.info({'id': content_view_filter['filter-id']}) + filter_info = target_sat.cli.ContentView.filter.info( + {'id': content_view_filter['filter-id']} + ) assert filter_info['rules'][0]['id'] == content_view_filter_rule['rule-id'] @pytest.mark.tier2 - def test_positive_add_custom_repo_by_id(self, module_org, module_product): + def test_positive_add_custom_repo_by_id(self, module_org, module_product, module_target_sat): """Associate custom content to a Content view :id: b813b222-b984-47e0-8d9b-2daa43f9a221 @@ -1046,20 +1082,23 @@ def test_positive_add_custom_repo_by_id(self, module_org, module_product): :CaseImportance: High - :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] @pytest.mark.tier1 - def test_positive_add_custom_repo_by_name(self, module_org, module_product): + def test_positive_add_custom_repo_by_name(self, module_org, module_product, module_target_sat): """Associate custom content to a content view with name :id: 62431e11-bec6-4444-abb0-e3758ba25fd8 @@ -1070,13 +1109,15 @@ def test_positive_add_custom_repo_by_name(self, module_org, module_product): :BZ: 1343006 """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV with names. - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'name': new_cv['name'], 'organization': module_org.name, @@ -1084,11 +1125,13 @@ def test_positive_add_custom_repo_by_name(self, module_org, module_product): 'repository': new_repo['name'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] @pytest.mark.tier2 - def test_negative_add_component_in_non_composite_cv(self, module_org, module_product): + def test_negative_add_component_in_non_composite_cv( + self, module_org, module_product, module_target_sat + ): """attempt to associate components in a non-composite content view @@ -1098,28 +1141,33 @@ def test_negative_add_component_in_non_composite_cv(self, module_org, module_pro :CaseImportance: Low - :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create component CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Fetch version id - cv_version = ContentView.version_list({'content-view-id': new_cv['id']}) + cv_version = module_target_sat.cli.ContentView.version_list( + {'content-view-id': new_cv['id']} + ) # Create non-composite CV - with pytest.raises(cli_factory.CLIFactoryError): - cli_factory.make_content_view( + with pytest.raises(CLIFactoryError): + module_target_sat.cli_factory.make_content_view( {'component-ids': cv_version[0]['id'], 'organization-id': module_org.id} ) @pytest.mark.tier2 - def test_negative_add_same_yum_repo_twice(self, module_org, module_product): + def test_negative_add_same_yum_repo_twice(self, module_org, module_product, module_target_sat): """attempt to associate the same repo multiple times within a content view @@ -1129,27 +1177,32 @@ def test_negative_add_same_yum_repo_twice(self, module_org, module_product): :CaseImportance: Low - :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] repos_length = len(new_cv['yum-repositories']) # Re-associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == repos_length @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_promote_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to promote a content view containing RH content @@ -1161,32 +1214,33 @@ def test_positive_promote_rh_content( :CaseImportance: Critical - :CaseLevel: Integration """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) - env1 = cli_factory.make_lifecycle_environment( + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) + env1 = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': env1['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) environment = {'id': env1['id'], 'name': env1['name']} assert environment in new_cv['lifecycle-environments'] @pytest.mark.run_in_one_thread @pytest.mark.tier3 def test_positive_promote_rh_and_custom_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to promote a content view containing RH content and custom content using filters @@ -1199,29 +1253,33 @@ def test_positive_promote_rh_and_custom_content( :CaseImportance: Low - :CaseLevel: Integration """ # Create custom repo - new_repo = cli_factory.make_repository( + new_repo = module_target_sat.cli_factory.make_repository( { - 'product-id': cli_factory.make_product( + 'content-type': 'yum', + 'product-id': module_target_sat.cli_factory.make_product( {'organization-id': module_entitlement_manifest_org.id} - )['id'] + )['id'], } ) # Sync custom repo - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repos with CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - cvf = cli_factory.make_content_view_filter( + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'false', 'type': 'rpm'} ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'min-version': 5, @@ -1229,21 +1287,22 @@ def test_positive_promote_rh_and_custom_content( } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) - env1 = cli_factory.make_lifecycle_environment( + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) + env1 = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': env1['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) environment = {'id': env1['id'], 'name': env1['name']} assert environment in new_cv['lifecycle-environments'] + @pytest.mark.build_sanity @pytest.mark.tier2 - def test_positive_promote_custom_content(self, module_org, module_product): + def test_positive_promote_custom_content(self, module_org, module_product, module_target_sat): """attempt to promote a content view containing custom content :id: 64c2f1c2-7443-4836-a108-060b913ad2b1 @@ -1254,35 +1313,40 @@ def test_positive_promote_custom_content(self, module_org, module_product): :CaseImportance: High - :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': environment['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': environment['id'], 'name': environment['name']} in new_cv[ 'lifecycle-environments' ] @pytest.mark.tier2 - def test_positive_promote_ccv(self, module_org, module_product): + def test_positive_promote_ccv(self, module_org, module_product, module_target_sat): """attempt to promote a content view containing custom content :id: 9d31113d-39ec-4524-854c-7f03b0f028fe @@ -1295,47 +1359,54 @@ def test_positive_promote_ccv(self, module_org, module_product): :CaseImportance: High - :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Create CV - con_view = cli_factory.make_content_view( + con_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': con_view['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': con_view['id']} + ) # Publish a new version of CV - ContentView.publish({'id': con_view['id']}) + module_target_sat.cli.ContentView.publish({'id': con_view['id']}) # As version info is populated after publishing only - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': con_view['versions'][0]['id'], 'to-lifecycle-environment-id': environment['id'], } ) - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) assert {'id': environment['id'], 'name': environment['name']} in con_view[ 'lifecycle-environments' ] @pytest.mark.tier2 - def test_negative_promote_default_cv(self, module_org): + def test_negative_promote_default_cv(self, module_org, module_target_sat): """attempt to promote a default content view :id: ef25a4d9-8852-4d2c-8355-e9b07eb0560b @@ -1344,21 +1415,26 @@ def test_negative_promote_default_cv(self, module_org): :CaseImportance: Low - :CaseLevel: Integration """ - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) print("Hello, the org ID is currently", module_org.id) - result = ContentView.list({'organization-id': module_org.id}, per_page=False) + result = module_target_sat.cli.ContentView.list( + {'organization-id': module_org.id}, per_page=False + ) content_view = random.choice([cv for cv in result if cv['name'] == constants.DEFAULT_CV]) - cvv = ContentView.version_list({'content-view-id': content_view['content-view-id']})[0] + cvv = module_target_sat.cli.ContentView.version_list( + {'content-view-id': content_view['content-view-id']} + )[0] # Promote the Default CV to the next env with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': environment['id']} ) @pytest.mark.tier2 - def test_negative_promote_with_invalid_lce(self, module_org, module_product): + def test_negative_promote_with_invalid_lce(self, module_org, module_product, module_target_sat): """attempt to promote a content view using an invalid environment @@ -1368,23 +1444,26 @@ def test_negative_promote_with_invalid_lce(self, module_org, module_product): :CaseImportance: Low - :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'product-id': module_product.id, 'content-type': 'yum'} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Promote the Published version of CV, # to the previous env which is Library with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': new_cv['lifecycle-environments'][0]['id'], @@ -1397,7 +1476,7 @@ def test_negative_promote_with_invalid_lce(self, module_org, module_product): @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_publish_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to publish a content view containing RH content @@ -1409,17 +1488,18 @@ def test_positive_publish_rh_content( :CaseImportance: Critical - :CaseLevel: Integration """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] assert new_cv['versions'][0]['version'] == '1.0' @@ -1428,7 +1508,7 @@ def test_positive_publish_rh_content( @pytest.mark.pit_server @pytest.mark.tier3 def test_positive_publish_rh_and_custom_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to publish a content view containing a RH and custom repos and has filters @@ -1441,29 +1521,33 @@ def test_positive_publish_rh_and_custom_content( :CaseImportance: High - :CaseLevel: Integration """ # Create custom repo - new_repo = cli_factory.make_repository( + new_repo = module_target_sat.cli_factory.make_repository( { - 'product-id': cli_factory.make_product( + 'content-type': 'yum', + 'product-id': module_target_sat.cli_factory.make_product( {'organization-id': module_entitlement_manifest_org.id} - )['id'] + )['id'], } ) # Sync custom repo - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repos with CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - cvf = cli_factory.make_content_view_filter( + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'false', 'type': 'rpm'} ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'min-version': 5, @@ -1471,15 +1555,15 @@ def test_positive_publish_rh_and_custom_content( } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {module_rhel_content['name'], new_repo['name']}.issubset( {repo['name'] for repo in new_cv['yum-repositories']} ) assert new_cv['versions'][0]['version'] == '1.0' @pytest.mark.tier2 - def test_positive_publish_custom_content(self, module_org, module_product): + def test_positive_publish_custom_content(self, module_org, module_product, module_target_sat): """attempt to publish a content view containing custom content :id: 84158023-3980-45c6-87d8-faacea3c942f @@ -1490,27 +1574,64 @@ def test_positive_publish_custom_content(self, module_org, module_product): :CaseImportance: Critical - :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] assert new_cv['versions'][0]['version'] == '1.0' + @pytest.mark.tier2 + def test_positive_publish_custom_major_minor_cv_version(self, module_target_sat): + """CV can published with custom major and minor versions + + :id: 6697cd22-253a-4bdc-a108-7e0af22caaf4 + + :steps: + + 1. Create product and repository with custom contents + 2. Sync the repository + 3. Create CV with above repository + 4. Publish the CV with custom major and minor versions + + :expectedresults: + + 1. CV version with custom major and minor versions is created + + """ + org = module_target_sat.cli_factory.make_org() + major = random.randint(1, 1000) + minor = random.randint(1, 1000) + content_view = module_target_sat.cli_factory.make_content_view( + {'name': gen_string('alpha'), 'organization-id': org['id']} + ) + module_target_sat.cli.ContentView.publish( + {'id': content_view['id'], 'major': major, 'minor': minor} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + cvv = content_view['versions'][0]['version'] + assert cvv.split('.')[0] == str(major) + assert cvv.split('.')[1] == str(minor) + @pytest.mark.upgrade @pytest.mark.tier3 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_publish_custom_content_module_stream(self, module_org, module_product): + def test_positive_publish_custom_content_module_stream( + self, module_org, module_product, module_target_sat + ): """attempt to publish a content view containing custom content module streams @@ -1523,9 +1644,8 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ :CaseImportance: Medium - :CaseLevel: Integration """ - software_repo = cli_factory.make_repository( + software_repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', @@ -1533,7 +1653,7 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ } ) - animal_repo = cli_factory.make_repository( + animal_repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', @@ -1542,26 +1662,40 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ ) # Sync REPO's - Repository.synchronize({'id': animal_repo['id']}) - Repository.synchronize({'id': software_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': animal_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': software_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': animal_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': animal_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv_version_1 = ContentView.info({'id': new_cv['id']})['versions'][0] - module_streams = ModuleStream.list({'content-view-version-id': (new_cv_version_1['id'])}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv_version_1 = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][ + 0 + ] + module_streams = module_target_sat.cli.ModuleStream.list( + {'content-view-version-id': (new_cv_version_1['id'])} + ) assert len(module_streams) > 6 # Publish another new version of CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': software_repo['id']}) - ContentView.publish({'id': new_cv['id']}) - new_cv_version_2 = ContentView.info({'id': new_cv['id']})['versions'][1] - module_streams = ModuleStream.list({'content-view-version-id': new_cv_version_2['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': software_repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv_version_2 = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][ + 1 + ] + module_streams = module_target_sat.cli.ModuleStream.list( + {'content-view-version-id': new_cv_version_2['id']} + ) assert len(module_streams) > 13 @pytest.mark.tier2 - def test_positive_republish_after_content_removed(self, module_org, module_product): + def test_positive_republish_after_content_removed( + self, module_org, module_product, module_target_sat + ): """Attempt to re-publish content view after all associated content were removed from that CV @@ -1578,12 +1712,13 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ :customerscenario: true - :CaseLevel: Integration """ # Create new Yum repository - yum_repo = cli_factory.make_repository({'product-id': module_product.id}) + yum_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create new Docker repository - docker_repo = cli_factory.make_repository( + docker_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'quay/busybox', @@ -1593,9 +1728,9 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ ) # Sync all three repos for repo_id in [yum_repo['id'], docker_repo['id']]: - Repository.synchronize({'id': repo_id}) + module_target_sat.cli.Repository.synchronize({'id': repo_id}) # Create CV with different content types - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( { 'organization-id': module_org.id, 'repository-ids': [yum_repo['id'], docker_repo['id']], @@ -1608,27 +1743,29 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ ]: assert len(new_cv[repo_type]) == 1 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Remove content from CV for repo_id in [yum_repo['id'], docker_repo['id']]: - ContentView.remove_repository({'id': new_cv['id'], 'repository-id': repo_id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository-id': repo_id} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) for repo_type in [ 'yum-repositories', 'container-image-repositories', ]: assert len(new_cv[repo_type]) == 0 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 2 @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_republish_after_rh_content_removed( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to re-publish content view after all RH associated content was removed from that CV @@ -1642,42 +1779,40 @@ def test_positive_republish_after_rh_content_removed( :BZ: 1323751 - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium """ - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Remove content from CV - ContentView.remove_repository( + module_target_sat.cli.ContentView.remove_repository( {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 # Publish a new version of CV once more - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 2 @pytest.mark.tier2 - def test_positive_publish_ccv(self, module_org, module_product): + def test_positive_publish_ccv(self, module_org, module_product, module_target_sat): """attempt to publish a composite content view containing custom content @@ -1689,39 +1824,46 @@ def test_positive_publish_ccv(self, module_org, module_product): :CaseImportance: Critical - :CaseLevel: Integration """ - repository = cli_factory.make_repository({'product-id': module_product.id}) + repository = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': repository['id']}) + module_target_sat.cli.Repository.synchronize({'id': repository['id']}) # Create CV - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) # Associate repo to CV - ContentView.add_repository({'id': content_view['id'], 'repository-id': repository['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repository['id']} + ) # Publish a new version of CV - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) # Let us now store the version1 id version1_id = content_view['versions'][0]['id'] # Create composite CV - composite_cv = cli_factory.make_content_view( + composite_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': composite_cv['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': composite_cv['id']} + ) # Assert whether version was associated to composite CV - composite_cv = ContentView.info({'id': composite_cv['id']}) + composite_cv = module_target_sat.cli.ContentView.info({'id': composite_cv['id']}) assert composite_cv['components'][0]['id'] == version1_id # Publish a new version of CV - ContentView.publish({'id': composite_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': composite_cv['id']}) # Assert whether Version1 was created and exists in Library Env. - composite_cv = ContentView.info({'id': composite_cv['id']}) + composite_cv = module_target_sat.cli.ContentView.info({'id': composite_cv['id']}) assert composite_cv['lifecycle-environments'][0]['name'] == constants.ENVIRONMENT assert composite_cv['versions'][0]['version'] == '1.0' @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_update_version_once(self, module_org, module_product): + def test_positive_update_version_once(self, module_org, module_product, module_target_sat): # Dev notes: # If Dev has version x, then when I promote version y into # Dev, version x goes away (ie when I promote version 1 to Dev, @@ -1742,59 +1884,63 @@ def test_positive_update_version_once(self, module_org, module_product): environment. - :CaseLevel: Integration - :CaseImportance: Critical """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version1 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Actual assert for this test happens HERE # Test whether the version1 now belongs to Library - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert constants.ENVIRONMENT in [env['label'] for env in version1['lifecycle-environments']] # Promotion of version1 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version1_id, 'to-lifecycle-environment-id': environment['id']} ) # The only way to validate whether env has the version is to # validate that version has the env. - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert environment['id'] in [env['id'] for env in version1['lifecycle-environments']] # Now Publish version2 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version2 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) new_cv['versions'].sort(key=lambda version: version['id']) # Let us now store the version2 id version2_id = new_cv['versions'][1]['id'] # Test whether the version2 now belongs to Library - version2 = ContentView.version_info({'id': version2_id}) + version2 = module_target_sat.cli.ContentView.version_info({'id': version2_id}) assert constants.ENVIRONMENT in [env['label'] for env in version2['lifecycle-environments']] # Promotion of version2 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version2_id, 'to-lifecycle-environment-id': environment['id']} ) # Actual assert for this test happens here. # Test whether the version2 now belongs to next env - version2 = ContentView.version_info({'id': version2_id}) + version2 = module_target_sat.cli.ContentView.version_info({'id': version2_id}) assert environment['id'] in [env['id'] for env in version2['lifecycle-environments']] @pytest.mark.tier2 - def test_positive_update_version_multiple(self, module_org, module_product): + def test_positive_update_version_multiple(self, module_org, module_product, module_target_sat): # Dev notes: # Similarly when I publish version y, version x goes away from # Library (ie when I publish version 2, version 1 disappears) @@ -1815,64 +1961,71 @@ def test_positive_update_version_multiple(self, module_org, module_product): :CaseImportance: Low - :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version1 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Test whether the version1 now belongs to Library - version = ContentView.version_info({'id': version1_id}) + version = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert constants.ENVIRONMENT in [env['label'] for env in version['lifecycle-environments']] # Promotion of version1 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version1_id, 'to-lifecycle-environment-id': environment['id']} ) # The only way to validate whether env has the version is to # validate that version has the env. # Test whether the version1 now belongs to next env - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert environment['id'] in [env['id'] for env in version1['lifecycle-environments']] # Now Publish version2 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # As per Dev Notes: # Similarly when I publish version y, version x goes away from Library. # Actual assert for this test happens here. # Test that version1 does not exist in Library after publishing v2 - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert len(version1['lifecycle-environments']) == 1 assert constants.ENVIRONMENT not in [ env['label'] for env in version1['lifecycle-environments'] ] # Only after we publish version2 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) new_cv['versions'].sort(key=lambda version: version['id']) # Let us now store the version2 id version2_id = new_cv['versions'][1]['id'] # Promotion of version2 to next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version2_id, 'to-lifecycle-environment-id': environment['id']} ) # Actual assert for this test happens here. # Test that version1 does not exist in any/next env after, # promoting version2 to next env - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert len(version1['lifecycle-environments']) == 0 @pytest.mark.tier2 - def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): + def test_positive_auto_update_composite_to_latest_cv_version( + self, module_org, module_target_sat + ): """Ensure that composite content view component is auto updated to the latest content view version. @@ -1894,19 +2047,19 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): :BZ: 1177766 - :CaseLevel: Integration - :CaseImportance: High """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 version_1_id = content_view['versions'][0]['id'] - composite_cv = cli_factory.make_content_view( + composite_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) - ContentView.component_add( + module_target_sat.cli.ContentView.component_add( { 'composite-content-view-id': composite_cv['id'], 'component-content-view-id': content_view['id'], @@ -1914,21 +2067,25 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): } ) # Ensure that version 1 is in composite content view components - components = ContentView.component_list({'composite-content-view-id': composite_cv['id']}) + components = module_target_sat.cli.ContentView.component_list( + {'composite-content-view-id': composite_cv['id']} + ) assert len(components) == 1 component_id = components[0]['content-view-id'] assert components[0]['version-id'] == f'{version_1_id} (Latest)' assert components[0]['current-version'] == '1.0' # Publish the content view a second time - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 2 content_view['versions'].sort(key=lambda version: version['id']) # Ensure that composite content view component has been updated to # version 2 version_2_id = content_view['versions'][1]['id'] assert version_1_id != version_2_id - components = ContentView.component_list({'composite-content-view-id': composite_cv['id']}) + components = module_target_sat.cli.ContentView.component_list( + {'composite-content-view-id': composite_cv['id']} + ) assert len(components) == 1 # Ensure that this is the same component that is updated assert component_id == components[0]['content-view-id'] @@ -1936,7 +2093,7 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): assert components[0]['current-version'] == '2.0' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id(self, module_org): + def test_positive_subscribe_chost_by_id(self, module_org, module_target_sat): """Attempt to subscribe content host to content view :id: db0bfd9d-3150-427e-9683-a68af33813e7 @@ -1945,17 +2102,22 @@ def test_positive_subscribe_chost_by_id(self, module_org): :CaseImportance: High - :CaseLevel: System """ - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -1963,13 +2125,13 @@ def test_positive_subscribe_chost_by_id(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.run_in_one_thread @pytest.mark.tier3 def test_positive_subscribe_chost_by_id_using_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to subscribe content host to content view that has Red Hat repository assigned to it @@ -1979,32 +2141,32 @@ def test_positive_subscribe_chost_by_id_using_rh_content( :expectedresults: Content Host can be subscribed to content view with Red Hat repository - :CaseLevel: System - :CaseImportance: Medium """ - env = cli_factory.make_lifecycle_environment( + env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['yum-repositories'][0]['name'] == module_rhel_content['name'] - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2012,14 +2174,14 @@ def test_positive_subscribe_chost_by_id_using_rh_content( 'organization-id': module_entitlement_manifest_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.run_in_one_thread @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to subscribe content host to filtered content view that has Red Hat repository assigned to it @@ -2029,30 +2191,28 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( :expectedresults: Content Host can be subscribed to filtered content view with Red Hat repository - :CaseLevel: System - :BZ: 1359665 :CaseImportance: Low """ - env = cli_factory.make_lifecycle_environment( + env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['yum-repositories'][0]['name'] == module_rhel_content['name'] name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -2061,7 +2221,7 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( } ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter': name, 'content-view-id': content_view['id'], @@ -2069,15 +2229,17 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2085,11 +2247,13 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( 'organization-id': module_entitlement_manifest_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): + def test_positive_subscribe_chost_by_id_using_custom_content( + self, module_org, module_target_sat + ): """Attempt to subscribe content host to content view that has custom repository assigned to it @@ -2098,16 +2262,20 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): :expectedresults: Content Host can be subscribed to content view with custom repository - :CaseLevel: System - :CaseImportance: High """ - new_product = cli_factory.make_product({'organization-id': module_org.id}) - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': new_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2115,15 +2283,17 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2131,11 +2301,11 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): + def test_positive_subscribe_chost_by_id_using_ccv(self, module_org, module_target_sat): """Attempt to subscribe content host to composite content view :id: 4be340c0-9e58-4b96-ab37-d7e3b12c724f @@ -2145,21 +2315,24 @@ def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): :CaseImportance: High - :CaseLevel: System """ - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2167,7 +2340,7 @@ def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 @@ -2214,7 +2387,6 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( :parametrized: yes - :CaseLevel: System """ # Note: this test has been stubbed waitin for bug 1511481 resolution # prepare the user and the required permissions data @@ -2239,12 +2411,12 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( 'Architecture': ['view_architectures'], } # Create a location and organization - loc = cli_factory.make_location() - default_loc = Location.info({'name': constants.DEFAULT_LOC}) - org = cli_factory.make_org() - Org.add_location({'id': org['id'], 'location-id': loc['id']}) + loc = target_sat.cli_factory.make_location() + default_loc = target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) + org = target_sat.cli_factory.make_org() + target_sat.cli.Org.add_location({'id': org['id'], 'location-id': loc['id']}) # Create a non admin user, for the moment without any permissions - user = cli_factory.make_user( + user = target_sat.cli_factory.user( { 'admin': False, 'default-organization-id': org['id'], @@ -2256,9 +2428,9 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( } ) # Create a new role - role = cli_factory.make_role() + role = target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -2278,40 +2450,42 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - cli_factory.make_filter({'role-id': role['id'], 'permissions': permission_names}) + target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises(CLIReturnCodeError) as context: - Role.with_user(user_name, user_password).info({'id': role['id']}) + target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) assert 'Access denied' in str(context) # Create a lifecycle environment - env = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) # Create a product - product = cli_factory.make_product({'organization-id': org['id']}) + product = target_sat.cli_factory.make_product({'organization-id': org['id']}) # Create a yum repository and synchronize - repo = cli_factory.make_repository( - {'product-id': product['id'], 'url': settings.repos.yum_1.url} + repo = target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': product['id'], 'url': settings.repos.yum_1.url} ) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view, add the yum repository and publish - content_view = cli_factory.make_content_view({'organization-id': org['id']}) - ContentView.add_repository( + content_view = target_sat.cli_factory.make_content_view({'organization-id': org['id']}) + target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': repo['id']} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) # assert that the content view has been published and has versions assert len(content_view['versions']) > 0 content_view_version = content_view['versions'][0] # Promote the content view version to the created environment - ContentView.version_promote( + target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': env['id']} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( + user_content_view = target_sat.cli.ContentView.with_user(user_name, user_password).info( {'id': content_view['id']} ) # assert that this is the same content view @@ -2326,7 +2500,7 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( ) assert rhel7_contenthost.subscribed # check that the client host exist in the system - org_hosts = Host.list({'organization-id': org['id']}) + org_hosts = target_sat.cli.Host.list({'organization-id': org['id']}) assert len(org_hosts) == 1 assert org_hosts[0]['name'] == rhel7_contenthost.hostname @@ -2369,7 +2543,6 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( :parametrized: yes - :CaseLevel: System """ # prepare the user and the required permissions data user_name = gen_alphanumeric() @@ -2393,11 +2566,11 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( 'Architecture': ['view_architectures'], } # Create organization - loc = Location.info({'name': constants.DEFAULT_LOC}) - org = cli_factory.make_org() - Org.add_location({'id': org['id'], 'location-id': loc['id']}) + loc = target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) + org = target_sat.cli_factory.make_org() + target_sat.cli.Org.add_location({'id': org['id'], 'location-id': loc['id']}) # Create a non admin user, for the moment without any permissions - user = cli_factory.make_user( + user = target_sat.cli_factory.user( { 'admin': False, 'default-organization-id': org['id'], @@ -2409,9 +2582,9 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( } ) # Create a new role - role = cli_factory.make_role() + role = target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -2431,40 +2604,42 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - cli_factory.make_filter({'role-id': role['id'], 'permissions': permission_names}) + target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises(CLIReturnCodeError) as context: - Role.with_user(user_name, user_password).info({'id': role['id']}) - assert '403 Forbidden' in str(context) + target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) + assert '403 Forbidden' in str(context) # Create a lifecycle environment - env = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) # Create a product - product = cli_factory.make_product({'organization-id': org['id']}) + product = target_sat.cli_factory.make_product({'organization-id': org['id']}) # Create a yum repository and synchronize - repo = cli_factory.make_repository( - {'product-id': product['id'], 'url': settings.repos.yum_1.url} + repo = target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': product['id'], 'url': settings.repos.yum_1.url} ) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view, add the yum repository and publish - content_view = cli_factory.make_content_view({'organization-id': org['id']}) - ContentView.add_repository( + content_view = target_sat.cli_factory.make_content_view({'organization-id': org['id']}) + target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': repo['id']} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) # assert that the content view has been published and has versions assert len(content_view['versions']) > 0 content_view_version = content_view['versions'][0] # Promote the content view version to the created environment - ContentView.version_promote( + target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': env['id']} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( + user_content_view = target_sat.cli.ContentView.with_user(user_name, user_password).info( {'id': content_view['id']} ) # assert that this is the same content view @@ -2479,12 +2654,12 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( ) assert rhel7_contenthost.subscribed # check that the client host exist in the system - org_hosts = Host.list({'organization-id': org['id']}) + org_hosts = target_sat.cli.Host.list({'organization-id': org['id']}) assert len(org_hosts) == 1 assert org_hosts[0]['name'] == rhel7_contenthost.hostname @pytest.mark.tier1 - def test_positive_clone_by_id(self, module_org): + def test_positive_clone_by_id(self, module_org, module_target_sat): """Clone existing content view by id :id: e3b63e6e-0964-45fb-a765-e1885c0ecbdd @@ -2494,13 +2669,17 @@ def test_positive_clone_by_id(self, module_org): :CaseImportance: Critical """ cloned_cv_name = gen_string('alpha') - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - new_cv = ContentView.info({'id': new_cv['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['name'] == cloned_cv_name @pytest.mark.tier1 - def test_positive_clone_by_name(self, module_org): + def test_positive_clone_by_name(self, module_org, module_target_sat): """Clone existing content view by name :id: b4c94286-ebbe-4e4c-a1df-22cb7055984d @@ -2512,19 +2691,21 @@ def test_positive_clone_by_name(self, module_org): :CaseImportance: Critical """ cloned_cv_name = gen_string('alpha') - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - new_cv = ContentView.copy( + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli.ContentView.copy( { 'name': content_view['name'], 'organization-id': module_org.id, 'new-name': cloned_cv_name, } )[0] - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['name'] == cloned_cv_name @pytest.mark.tier2 - def test_positive_clone_within_same_env(self, module_org): + def test_positive_clone_within_same_env(self, module_org, module_target_sat): """Attempt to create, publish and promote new content view based on existing view within the same environment as the original content view @@ -2533,27 +2714,35 @@ def test_positive_clone_within_same_env(self, module_org): :expectedresults: Cloned content view can be published and promoted to the same environment as the original content view - :CaseLevel: Integration - :CaseImportance: High """ cloned_cv_name = gen_string('alpha') - lc_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': lc_env['id'], 'name': lc_env['name']} in new_cv['lifecycle-environments'] @pytest.mark.tier2 - def test_positive_clone_with_diff_env(self, module_org): + def test_positive_clone_with_diff_env(self, module_org, module_target_sat): """Attempt to create, publish and promote new content view based on existing view but promoted to a different environment @@ -2564,24 +2753,33 @@ def test_positive_clone_with_diff_env(self, module_org): :CaseImportance: Low - :CaseLevel: Integration """ cloned_cv_name = gen_string('alpha') - lc_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lc_env_cloned = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lc_env_cloned = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env_cloned['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': lc_env_cloned['id'], 'name': lc_env_cloned['name']} in new_cv[ 'lifecycle-environments' ] @@ -2622,12 +2820,14 @@ def test_positive_restart_dynflow_publish(self): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): + def test_positive_remove_renamed_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove version of renamed content view from Library environment :id: aa9bbfda-72e8-45ec-b26d-fdf2691980cf - :Steps: + :steps: 1. Create a content view 2. Add a yum repo to the content view @@ -2638,42 +2838,46 @@ def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): :expectedresults: content view version is removed from Library environment - :CaseLevel: Integration - :CaseImportance: Low """ new_name = gen_string('alpha') - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': custom_yum_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) # ensure that the published content version is in Library environment - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] assert constants.ENVIRONMENT in _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) # rename the content view - ContentView.update({'id': content_view['id'], 'new-name': new_name}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.update({'id': content_view['id'], 'new-name': new_name}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert new_name == content_view['name'] # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2683,19 +2887,21 @@ def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): # ensure that the published content version is not in Library # environment assert constants.ENVIRONMENT not in _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove promoted content view version from Library environment :id: 6643837a-560a-47de-aa4d-90778914dcfa - :Steps: + :steps: 1. Create a content view 2. Add a yum repo to the content view @@ -2708,38 +2914,44 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): 1. Content view version exist only in DEV and not in Library 2. The yum repo exists in content view version - :CaseLevel: Integration - :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - custom_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + custom_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': custom_yum_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce_dev['id']} ) # ensure that the published content version is in Library and DEV # environments - content_view_version_info = ContentView.version_info( + content_view_version_info = module_target_sat.cli.ContentView.version_info( { 'organization-id': module_org.id, 'content-view-id': content_view['id'], @@ -2751,7 +2963,7 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): } assert {constants.ENVIRONMENT, lce_dev['name']} == content_view_version_lce_names # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2759,7 +2971,7 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): } ) # ensure content view version not in Library and only in DEV - content_view_version_info = ContentView.version_info( + content_view_version_info = module_target_sat.cli.ContentView.version_info( { 'organization-id': module_org.id, 'content-view-id': content_view['id'], @@ -2772,12 +2984,14 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): assert {lce_dev['name']} == content_view_version_lce_names @pytest.mark.tier2 - def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_qe_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove QE promoted content view version from Library environment :id: e286697f-4113-40a3-b8e8-9ca50647e6d5 - :Steps: + :steps: 1. Create a content view 2. Add docker repo(s) to it @@ -2789,16 +3003,18 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or :expectedresults: Content view version exist only in DEV, QE and not in Library - :CaseLevel: Integration - :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -2807,21 +3023,25 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': docker_repository['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV and QE @@ -2830,9 +3050,11 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or constants.ENVIRONMENT, lce_dev['name'], lce_qe['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2842,19 +3064,21 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or # ensure content view version is not in Library and only in DEV and QE # environments assert {lce_dev['name'], lce_qe['name']} == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_prod_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove PROD promoted content view version from Library environment :id: ffe3d64e-c3d2-4889-9454-ccc6b10f4db7 - :Steps: + :steps: 1. Create a content view 2. Add yum repositories and docker repositories to CV @@ -2868,26 +3092,31 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ :CaseImportance: High - :CaseLevel: Integration """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -2896,22 +3125,26 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE and @@ -2921,9 +3154,11 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ lce_dev['name'], lce_qe['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2936,18 +3171,20 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ lce_dev['name'], lce_qe['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_cv_version_from_env(self, module_org): + def test_positive_remove_cv_version_from_env(self, module_org, module_target_sat): """Remove promoted content view version from environment :id: 577757ac-b184-4ece-9310-182dd5ceb718 - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker repo to the content view @@ -2962,31 +3199,35 @@ def test_positive_remove_cv_version_from_env(self, module_org): :expectedresults: Content view version exist in Library, DEV, QE, STAGE, PROD - :CaseLevel: Integration - :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -2995,22 +3236,26 @@ def test_positive_remove_cv_version_from_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, @@ -3021,9 +3266,11 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from PROD lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3037,9 +3284,11 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_dev['name'], lce_qe['name'], lce_stage['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # promote content view version to PROD environment again - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce_prod['id']} ) assert { @@ -3048,18 +3297,20 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) @pytest.mark.tier3 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_cv_version_from_multi_env(self, module_org): + def test_positive_remove_cv_version_from_multi_env(self, module_org, module_target_sat): """Remove promoted content view version from multiple environment :id: 997cfd7d-9029-47e2-a41e-84f4370b5ce5 - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker to the content view @@ -3070,31 +3321,35 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): :expectedresults: Content view version exists only in Library, DEV - :CaseLevel: Integration - :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3103,22 +3358,26 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, @@ -3129,11 +3388,13 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from QE, STAGE, PROD lifecycle # environments for lce in [lce_qe, lce_stage, lce_prod]: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3143,17 +3404,17 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): # ensure content view version is not in PROD and only in Library, DEV, # QE and STAGE environments assert {constants.ENVIRONMENT, lce_dev['name']} == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier3 - def test_positive_delete_cv_promoted_to_multi_env(self, module_org): + def test_positive_delete_cv_promoted_to_multi_env(self, module_org, module_target_sat): """Delete published content view with version promoted to multiple environments :id: 93dd7518-5901-4a71-a4c3-0f1215238b26 - :Steps: + :steps: 1. Create a content view 2. Add a yum repo and a docker to the content view @@ -3165,31 +3426,35 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): :expectedresults: The content view doesn't exists - :CaseLevel: Integration - :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3198,28 +3463,32 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, # STAGE and PROD environments promoted_lce_names_set = _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) assert { constants.ENVIRONMENT, @@ -3230,7 +3499,7 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): } == promoted_lce_names_set # remove from all promoted lifecycle environments for lce_name in promoted_lce_names_set: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3238,12 +3507,12 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): } ) # ensure content view in content views list - content_views = ContentView.list({'organization-id': module_org.id}) + content_views = module_target_sat.cli.ContentView.list({'organization-id': module_org.id}) assert content_view['name'] in [cv['name'] for cv in content_views] # delete the content view - ContentView.delete({'id': content_view['id']}) + module_target_sat.cli.ContentView.delete({'id': content_view['id']}) # ensure the content view is not in content views list - content_views = ContentView.list({'organization-id': module_org.id}) + content_views = module_target_sat.cli.ContentView.list({'organization-id': module_org.id}) assert content_view['name'] not in [cv['name'] for cv in content_views] @pytest.mark.stubbed @@ -3255,7 +3524,7 @@ def test_positive_remove_cv_version_from_env_with_host_registered(self): :id: 001a2b76-a87b-4c11-8837-f5fe3c04a075 - :Steps: + :steps: 1. Create a content view cv1 2. Add a yum repo to the content view @@ -3282,7 +3551,6 @@ def test_positive_remove_cv_version_from_env_with_host_registered(self): :CaseAutomation: NotAutomated - :CaseLevel: System """ @pytest.mark.stubbed @@ -3294,7 +3562,7 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(self): :id: 82442d23-45b5-4d39-b867-c5d46bbcbbf9 - :Steps: + :steps: 1. Create two content view cv1 and cv2 2. Add a yum repo to both content views @@ -3322,7 +3590,6 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(self): :CaseAutomation: NotAutomated - :CaseLevel: System """ @pytest.mark.run_in_one_thread @@ -3332,14 +3599,14 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(self): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) def test_positive_remove_cv_version_from_multi_env_capsule_scenario( - self, module_org, capsule_configured + self, module_org, capsule_configured, module_target_sat ): """Remove promoted content view version from multiple environment, with satellite setup to use capsule :id: 3725fef6-73a4-4dcb-a306-70e6ba826a3d - :Steps: + :steps: 1. Create a content view 2. Setup satellite to use a capsule and to sync all lifecycle @@ -3364,47 +3631,51 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( :CaseAutomation: Automated - :CaseLevel: System - :CaseImportance: High """ # Note: This test case requires complete external capsule # configuration. - dev_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - qe_env = cli_factory.make_lifecycle_environment( + dev_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + qe_env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': dev_env['name']} ) - prod_env = cli_factory.make_lifecycle_environment( + prod_env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': qe_env['name']} ) - capsule = Capsule().info({'name': capsule_configured.hostname}) + capsule = module_target_sat.cli.Capsule().info({'name': capsule_configured.hostname}) # Add all environments to capsule environments = {constants.ENVIRONMENT, dev_env['name'], qe_env['name'], prod_env['name']} for env_name in environments: - Capsule.content_add_lifecycle_environment( + module_target_sat.cli.Capsule.content_add_lifecycle_environment( { 'id': capsule['id'], 'organization-id': module_org.id, 'environment': env_name, } ) - capsule_environments = Capsule.content_lifecycle_environments( + capsule_environments = module_target_sat.cli.Capsule.content_lifecycle_environments( {'id': capsule['id'], 'organization-id': module_org.id} ) capsule_environments_names = {env['name'] for env in capsule_environments} assert environments == capsule_environments_names # Setup a yum repo - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3413,10 +3684,12 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3424,11 +3697,13 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( } ) # Publish the content view - ContentView.publish({'id': content_view['id']}) - content_view_version = ContentView.info({'id': content_view['id']})['versions'][-1] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_version = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ][-1] # Promote the content view to DEV, QE, PROD for env in [dev_env, qe_env, prod_env]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': content_view_version['id'], 'organization-id': module_org.id, @@ -3436,8 +3711,10 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( } ) # Synchronize the capsule content - Capsule.content_synchronize({'id': capsule['id'], 'organization-id': module_org.id}) - capsule_content_info = Capsule.content_info( + module_target_sat.cli.Capsule.content_synchronize( + {'id': capsule['id'], 'organization-id': module_org.id} + ) + capsule_content_info = module_target_sat.cli.Capsule.content_info( {'id': capsule['id'], 'organization-id': module_org.id} ) # Ensure that all environments exists in capsule content @@ -3467,7 +3744,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( pytest.skip('Power control host workflow is not defined') # Remove the content view version from Library and DEV environments for lce_name in [constants.ENVIRONMENT, dev_env['name']]: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3478,7 +3755,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( # DEV and exist only in QE and PROD environments_with_cv = {qe_env['name'], prod_env['name']} assert environments_with_cv == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) # Resume the capsule with ensure True to ping the virtual machine try: @@ -3487,7 +3764,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( pytest.skip('Power control host workflow is not defined') # Assert that in capsule content the content view version # does not exit in Library and DEV and exist only in QE and PROD - capsule_content_info = Capsule.content_info( + capsule_content_info = module_target_sat.cli.Capsule.content_info( {'id': capsule['id'], 'organization-id': module_org.id} ) capsule_content_info_lces = capsule_content_info['lifecycle-environments'] @@ -3506,7 +3783,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( assert content_view['name'] not in capsule_content_info_lce_cvs_names @pytest.mark.tier2 - def test_negative_user_with_no_create_view_cv_permissions(self, module_org): + def test_negative_user_with_no_create_view_cv_permissions(self, module_org, module_target_sat): """Unauthorized users are not able to create/view content views :id: 17617893-27c2-4cb2-a2ed-47378ef90e7a @@ -3519,24 +3796,26 @@ def test_negative_user_with_no_create_view_cv_permissions(self, module_org): :CaseImportance: Critical """ password = gen_alphanumeric() - no_rights_user = cli_factory.make_user({'password': password}) + no_rights_user = module_target_sat.cli_factory.user({'password': password}) no_rights_user['password'] = password - org_id = cli_factory.make_org(cached=True)['id'] + org_id = module_target_sat.cli_factory.make_org(cached=True)['id'] for name in generate_strings_list(exclude_types=['cjk']): # test that user can't create with pytest.raises(CLIReturnCodeError): - ContentView.with_user(no_rights_user['login'], no_rights_user['password']).create( - {'name': name, 'organization-id': org_id} - ) + module_target_sat.cli.ContentView.with_user( + no_rights_user['login'], no_rights_user['password'] + ).create({'name': name, 'organization-id': org_id}) # test that user can't read - con_view = cli_factory.make_content_view({'name': name, 'organization-id': org_id}) + con_view = module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': org_id} + ) with pytest.raises(CLIReturnCodeError): - ContentView.with_user(no_rights_user['login'], no_rights_user['password']).info( - {'id': con_view['id']} - ) + module_target_sat.cli.ContentView.with_user( + no_rights_user['login'], no_rights_user['password'] + ).info({'id': con_view['id']}) @pytest.mark.tier2 - def test_negative_user_with_read_only_cv_permission(self, module_org): + def test_negative_user_with_read_only_cv_permission(self, module_org, module_target_sat): """Read-only user is able to view content view :id: 588f57b5-9855-4c14-80d0-64b617c6b6dc @@ -3549,16 +3828,16 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): :BZ: 1922134 - :CaseLevel: Integration - :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) password = gen_string('alphanumeric') - user = cli_factory.make_user({'password': password}) - role = cli_factory.make_role() - cli_factory.make_filter( + user = module_target_sat.cli_factory.user({'password': password}) + role = module_target_sat.cli_factory.make_role() + module_target_sat.cli_factory.make_filter( { 'organization-ids': module_org.id, 'permissions': 'view_content_views', @@ -3566,26 +3845,28 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): 'override': 1, } ) - User.add_role({'id': user['id'], 'role-id': role['id']}) - ContentView.with_user(user['login'], password).info({'id': cv['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).info({'id': cv['id']}) # Verify read-only user can't either edit CV with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).update( + module_target_sat.cli.ContentView.with_user(user['login'], password).update( {'id': cv['id'], 'new-name': gen_string('alphanumeric')} ) # or create a new one with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).create( + module_target_sat.cli.ContentView.with_user(user['login'], password).create( {'name': gen_string('alphanumeric'), 'organization-id': module_org.id} ) # or publish with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).publish({'id': cv['id']}) - ContentView.publish({'id': cv['id']}) - cvv = ContentView.info({'id': cv['id']})['versions'][-1] + module_target_sat.cli.ContentView.with_user(user['login'], password).publish( + {'id': cv['id']} + ) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + cvv = module_target_sat.cli.ContentView.info({'id': cv['id']})['versions'][-1] # or promote with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).version_promote( + module_target_sat.cli.ContentView.with_user(user['login'], password).version_promote( { 'id': cvv['id'], 'organization-id': module_org.id, @@ -3594,22 +3875,22 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): ) # or create a product with pytest.raises(CLIReturnCodeError): - Product.with_user(user['login'], password).create( + module_target_sat.cli.Product.with_user(user['login'], password).create( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) # cannot create activation key with pytest.raises(CLIReturnCodeError): - ActivationKey.with_user(user['login'], password).create( + module_target_sat.cli.ActivationKey.with_user(user['login'], password).create( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) # cannot create host collection with pytest.raises(CLIReturnCodeError): - HostCollection.with_user(user['login'], password).create( + module_target_sat.cli.HostCollection.with_user(user['login'], password).create( {'organization-id': module_org.id} ) @pytest.mark.tier2 - def test_positive_user_with_all_cv_permissions(self, module_org): + def test_positive_user_with_all_cv_permissions(self, module_org, module_target_sat): """A user with all content view permissions is able to create, read, modify, promote, publish content views @@ -3622,61 +3903,65 @@ def test_positive_user_with_all_cv_permissions(self, module_org): :BZ: 1464414 - :CaseLevel: Integration - :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) password = gen_string('alphanumeric') - user = cli_factory.make_user({'password': password, 'organization-ids': module_org.id}) - role = cli_factory.make_role({'organization-ids': module_org.id}) + user = module_target_sat.cli_factory.user( + {'password': password, 'organization-ids': module_org.id} + ) + role = module_target_sat.cli_factory.make_role({'organization-ids': module_org.id}) # note: the filters inherit role organizations - cli_factory.make_filter( + module_target_sat.cli_factory.make_filter( {'permissions': constants.PERMISSIONS['Katello::ContentView'], 'role-id': role['id']} ) - cli_factory.make_filter( + module_target_sat.cli_factory.make_filter( {'permissions': constants.PERMISSIONS['Katello::KTEnvironment'], 'role-id': role['id']} ) - User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # Make sure user is not admin and has only expected roles assigned - user = User.info({'id': user['id']}) + user = module_target_sat.cli.User.info({'id': user['id']}) assert user['admin'] == 'no' assert set(user['roles']) == {role['name']} # Verify user can either edit CV - ContentView.with_user(user['login'], password).info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).info({'id': cv['id']}) new_name = gen_string('alphanumeric') - ContentView.with_user(user['login'], password).update( + module_target_sat.cli.ContentView.with_user(user['login'], password).update( {'id': cv['id'], 'new-name': new_name} ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name # or create a new one new_cv_name = gen_string('alphanumeric') - new_cv = ContentView.with_user(user['login'], password).create( + new_cv = module_target_sat.cli.ContentView.with_user(user['login'], password).create( {'name': new_cv_name, 'organization-id': module_org.id} ) assert new_cv['name'] == new_cv_name # or publish - ContentView.with_user(user['login'], password).publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).publish( + {'id': cv['id']} + ) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 1 # or promote - ContentView.with_user(user['login'], password).version_promote( + module_target_sat.cli.ContentView.with_user(user['login'], password).version_promote( { 'id': cv['versions'][-1]['id'], 'organization-id': module_org.id, 'to-lifecycle-environment-id': environment['id'], } ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert environment['id'] in [env['id'] for env in cv['lifecycle-environments']] @pytest.mark.tier3 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_inc_update_no_lce(self, module_org, module_product): + def test_positive_inc_update_no_lce(self, module_org, module_product, module_target_sat): """Publish incremental update without providing lifecycle environment for a content view version not promoted to any lifecycle environment @@ -3690,27 +3975,26 @@ def test_positive_inc_update_no_lce(self, module_org, module_product): :CaseImportance: Medium - :CaseLevel: Integration """ - repo = cli_factory.make_repository( + repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': repo['id']}) - content_view = cli_factory.make_content_view( + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': repo['id']} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - cvf = cli_factory.make_content_view_filter( + cvf = module_target_sat.cli_factory.make_content_view_filter( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -3718,27 +4002,69 @@ def test_positive_inc_update_no_lce(self, module_org, module_product): 'type': 'rpm', }, ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'name': FAKE_2_CUSTOM_PACKAGE_NAME, 'version': 5.21, } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cvv = content_view['versions'][0] - result = ContentView.version_incremental_update( + result = module_target_sat.cli.ContentView.version_incremental_update( {'content-view-version-id': cvv['id'], 'errata-ids': settings.repos.yum_1.errata[1]} ) # Inc update output format is pretty weird - list of dicts where each # key's value is actual line from stdout result = [line.strip() for line_dict in result for line in line_dict.values()] assert FAKE_2_CUSTOM_PACKAGE not in [line.strip() for line in result] - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert '1.1' in [cvv_['version'] for cvv_ in content_view['versions']] + @pytest.mark.tier2 + def test_version_info_by_lce(self, module_org, module_target_sat): + """Hammer version info can be passed the lce id/name argument without error + + :id: 6ab0c46c-c62a-488b-a30f-5500d6c7ec96 + + :steps: + 1. Lookup CV version info passing the lce id as an argument + + :expectedresults: LCE is able to be passed to version info command without error + + :BZ: 2139834 + + :customerscenario: true + """ + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.version_promote( + {'id': content_view['id'], 'to-lifecycle-environment-id': lce['id']} + ) + content_view = module_target_sat.cli.ContentView.version_info( + { + 'id': content_view['id'], + 'lifecycle-environment-id': lce['id'], + 'organization-id': module_org.id, + } + ) + assert content_view['version'] == '1.0' + content_view = module_target_sat.cli.ContentView.version_info( + { + 'id': content_view['id'], + 'lifecycle-environment': lce['name'], + 'organization-id': module_org.id, + } + ) + assert content_view['version'] == '1.0' + class TestContentViewFileRepo: """Specific tests for Content Views with File Repositories containing @@ -3753,11 +4079,11 @@ def make_file_repository_upload_contents( if options is None: options = {'product-id': module_product.id, 'content-type': 'file'} if not options.get('content-type'): - raise cli_factory.CLIFactoryError('Please provide a valid Content Type.') - new_repo = cli_factory.make_repository(options) + raise CLIFactoryError('Please provide a valid Content Type.') + new_repo = satellite.cli_factory.make_repository(options) remote_path = f'/tmp/{constants.RPM_TO_UPLOAD}' satellite.put(DataFile.RPM_TO_UPLOAD, remote_path) - Repository.upload_content( + satellite.cli.Repository.upload_content( { 'name': new_repo['name'], 'organization-id': module_org.id, @@ -3765,13 +4091,15 @@ def make_file_repository_upload_contents( 'product-id': new_repo['product']['id'], } ) - new_repo = Repository.info({'id': new_repo['id']}) + new_repo = satellite.cli.Repository.info({'id': new_repo['id']}) assert int(new_repo['content-counts']['files']) > 0 return new_repo @pytest.mark.skip_if_open('BZ:1610309') @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_addition( + self, module_org, module_product, module_target_sat + ): """Check a File Repository with Arbitrary File can be added to a Content View @@ -3782,25 +4110,23 @@ def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, 2. Upload an arbitrary file to it 3. Create a Content View (CV) - :Steps: + :steps: 1. Add the FR to the CV :expectedresults: Check FR is added to CV :CaseAutomation: Automated - :CaseLevel: Integration - :CaseImportance: High :BZ: 1610309, 1908465 """ repo = self.make_file_repository_upload_contents( - module_org, module_product, satellite=target_sat + module_org, module_product, satellite=module_target_sat ) - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV with names. - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'name': cv['name'], 'organization-id': module_org.id, @@ -3808,12 +4134,14 @@ def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, 'repository': repo['name'], } ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['file-repositories'][0]['name'] == repo['name'] @pytest.mark.skip_if_open('BZ:1908465') @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_removal(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_removal( + self, module_org, module_product, module_target_sat + ): """Check a File Repository with Arbitrary File can be removed from a Content View @@ -3825,25 +4153,25 @@ def test_positive_arbitrary_file_repo_removal(self, module_org, module_product, 3. Create a Content View (CV) 4. Add the FR to the CV - :Steps: + :steps: 1. Remove the FR from the CV :expectedresults: Check FR is removed from CV :CaseAutomation: Automated - :CaseLevel: Integration - :BZ: 1908465 """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) repo = self.make_file_repository_upload_contents( - module_org, module_product, satellite=target_sat + module_org, module_product, satellite=module_target_sat ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': cv['id'], 'repository-id': repo['id'], 'organization-id': module_org.id} ) - ContentView.remove_repository({'id': cv['id'], 'repository-id': repo['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': cv['id'], 'repository-id': repo['id']} + ) assert cv['file-repositories'][0]['id'] != repo['id'] @pytest.mark.stubbed @@ -3863,18 +4191,19 @@ def test_positive_arbitrary_file_sync_over_capsule(self): 5. Create a Capsule 6. Connect the Capsule with Satellite/Foreman host - :Steps: + :steps: 1. Start synchronization :expectedresults: Check CV with FR is synced over Capsule :CaseAutomation: NotAutomated - :CaseLevel: System """ @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_promotion(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_promotion( + self, module_org, module_product, module_target_sat + ): """Check arbitrary files availability for Content view version after content-view promotion. @@ -3887,7 +4216,7 @@ def test_positive_arbitrary_file_repo_promotion(self, module_org, module_product 4. Add the FR to the CV 5. Create an Environment - :Steps: + :steps: 1. Promote the CV to the Environment :expectedresults: Check arbitrary files from FR is available on @@ -3895,25 +4224,27 @@ def test_positive_arbitrary_file_repo_promotion(self, module_org, module_product :CaseAutomation: Automated - :CaseLevel: Integration - :CaseImportance: High """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) repo = self.make_file_repository_upload_contents( - module_product, module_product, satellite=target_sat + module_product, module_product, satellite=module_target_sat ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': cv['id'], 'repository-id': repo['id'], 'organization-id': module_org.id} ) - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - ContentView.publish({'id': cv['id']}) - content_view_info = ContentView.version_info({'content-view-id': cv['id'], 'version': 1}) - ContentView.version_promote( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view_info = module_target_sat.cli.ContentView.version_info( + {'content-view-id': cv['id'], 'version': 1} + ) + module_target_sat.cli.ContentView.version_promote( {'id': content_view_info['id'], 'to-lifecycle-environment-id': env['id']} ) - expected_repo = ContentView.version_info( + expected_repo = module_target_sat.cli.ContentView.version_info( { 'content-view-id': cv['id'], 'lifecycle-environment': env['name'], @@ -3945,7 +4276,7 @@ def test_positive_katello_repo_rpms_max_int(self, target_sat): assert 'id|bigint' in result.stdout.splitlines()[3].replace(' ', '') @pytest.mark.tier3 - def test_positive_inc_update_should_not_fail(self, module_org): + def test_positive_inc_update_should_not_fail(self, module_org, module_target_sat): """Incremental update after removing a package should not give a 400 error code :BZ: 2041497 @@ -3956,41 +4287,43 @@ def test_positive_inc_update_should_not_fail(self, module_org): :expectedresults: Incremental update is successful """ - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - repo = Repository.info({'id': custom_yum_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': custom_yum_repo['id']}) assert repo['content-counts']['packages'] == '32' # grab and remove the 'bear' package - package = Package.list({'repository-id': repo['id']})[0] - Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) - repo = Repository.info({'id': repo['id']}) + package = module_target_sat.cli.Package.list({'repository-id': repo['id']})[0] + module_target_sat.cli.Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '31' - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': repo['id']} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - Repository.synchronize({'id': custom_yum_repo['id']}) - repo = Repository.info({'id': custom_yum_repo['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': custom_yum_repo['id']}) assert repo['content-counts']['packages'] == '32' - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - result = ContentView.version_incremental_update( + result = module_target_sat.cli.ContentView.version_incremental_update( {'content-view-version-id': cvv['id'], 'errata-ids': settings.repos.yum_1.errata[0]} ) assert result[2] - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert '1.1' in [cvv_['version'] for cvv_ in content_view['versions']] diff --git a/tests/foreman/cli/test_contentviewfilter.py b/tests/foreman/cli/test_contentviewfilter.py index 4881b8ee28f..65ffeb940a5 100644 --- a/tests/foreman/cli/test_contentviewfilter.py +++ b/tests/foreman/cli/test_contentviewfilter.py @@ -4,43 +4,40 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest +import random + from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_repository -from robottelo.cli.repository import Repository from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) @pytest.fixture(scope='module') -def sync_repo(module_org, module_product): - repo = make_repository({'organization-id': module_org.id, 'product-id': module_product.id}) - Repository.synchronize({'id': repo['id']}) +def sync_repo(module_org, module_product, module_target_sat): + repo = module_target_sat.cli_factory.make_repository( + {'organization-id': module_org.id, 'product-id': module_product.id} + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) return repo @pytest.fixture -def content_view(module_org, sync_repo): - return make_content_view( +def content_view(module_org, sync_repo, module_target_sat): + return module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': [sync_repo['id']]} ) @@ -52,7 +49,7 @@ class TestContentViewFilter: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.parametrize('filter_content_type', ['rpm', 'package_group', 'erratum', 'modulemd']) def test_positive_create_with_name_by_cv_id( - self, name, filter_content_type, module_org, content_view + self, name, filter_content_type, module_org, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter @@ -67,7 +64,7 @@ def test_positive_create_with_name_by_cv_id( :CaseImportance: Critical """ - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -75,14 +72,16 @@ def test_positive_create_with_name_by_cv_id( 'type': filter_content_type, }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) assert cvf['name'] == name assert cvf['type'] == filter_content_type @pytest.mark.tier1 @pytest.mark.parametrize('filter_content_type', ['rpm', 'package_group', 'erratum', 'modulemd']) def test_positive_create_with_content_type_by_cv_id( - self, filter_content_type, module_org, content_view + self, filter_content_type, module_org, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by id. Use different content types as a parameter @@ -97,7 +96,7 @@ def test_positive_create_with_content_type_by_cv_id( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -105,12 +104,16 @@ def test_positive_create_with_content_type_by_cv_id( 'type': filter_content_type, }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['type'] == filter_content_type @pytest.mark.tier1 @pytest.mark.parametrize('inclusion', ['true', 'false']) - def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, content_view): + def test_positive_create_with_inclusion_by_cv_id( + self, inclusion, module_org, content_view, module_target_sat + ): """Create new content view filter and assign it to existing content view by id. Use different inclusions as a parameter @@ -124,7 +127,7 @@ def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, co :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': inclusion, @@ -133,11 +136,15 @@ def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, co 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == inclusion @pytest.mark.tier1 - def test_positive_create_with_description_by_cv_id(self, module_org, content_view): + def test_positive_create_with_description_by_cv_id( + self, module_org, content_view, module_target_sat + ): """Create new content view filter with description and assign it to existing content view. @@ -150,7 +157,7 @@ def test_positive_create_with_description_by_cv_id(self, module_org, content_vie """ description = gen_string('utf8') cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'description': description, @@ -159,13 +166,15 @@ def test_positive_create_with_description_by_cv_id(self, module_org, content_vie 'type': 'package_group', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['description'] == description @pytest.mark.run_in_one_thread @pytest.mark.tier1 def test_positive_create_with_default_taxonomies( - self, module_org, module_location, content_view + self, module_org, module_location, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by name. Use default organization and location to find necessary @@ -184,7 +193,7 @@ def test_positive_create_with_default_taxonomies( Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) Defaults.add({'param-name': 'location_id', 'param-value': module_location.id}) try: - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'name': name, @@ -192,14 +201,16 @@ def test_positive_create_with_default_taxonomies( 'inclusion': 'true', }, ) - cvf = ContentView.filter.info({'content-view': content_view['name'], 'name': name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view': content_view['name'], 'name': name} + ) assert cvf['name'] == name finally: Defaults.delete({'param-name': 'organization_id'}) Defaults.delete({'param-name': 'location_id'}) @pytest.mark.tier1 - def test_positive_list_by_name_and_org(self, module_org, content_view): + def test_positive_list_by_name_and_org(self, module_org, content_view, module_target_sat): """Create new content view filter and try to list it by its name and organization it belongs @@ -214,7 +225,7 @@ def test_positive_list_by_name_and_org(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -222,14 +233,14 @@ def test_positive_list_by_name_and_org(self, module_org, content_view): 'type': 'package_group', }, ) - cv_filters = ContentView.filter.list( + cv_filters = module_target_sat.cli.ContentView.filter.list( {'content-view': content_view['name'], 'organization': module_org.name} ) assert len(cv_filters) >= 1 assert cvf_name in [cvf['name'] for cvf in cv_filters] @pytest.mark.tier1 - def test_positive_create_by_cv_name(self, module_org, content_view): + def test_positive_create_by_cv_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization id for reference @@ -242,7 +253,7 @@ def test_positive_create_by_cv_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'true', @@ -251,10 +262,12 @@ def test_positive_create_by_cv_name(self, module_org, content_view): 'type': 'package_group', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_by_org_name(self, module_org, content_view): + def test_positive_create_by_org_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization name for reference @@ -265,7 +278,7 @@ def test_positive_create_by_org_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'false', @@ -274,10 +287,12 @@ def test_positive_create_by_org_name(self, module_org, content_view): 'type': 'erratum', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_by_org_label(self, module_org, content_view): + def test_positive_create_by_org_label(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization label for reference @@ -288,7 +303,7 @@ def test_positive_create_by_org_label(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'true', @@ -297,10 +312,14 @@ def test_positive_create_by_org_label(self, module_org, content_view): 'type': 'erratum', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_view): + def test_positive_create_with_repo_by_id( + self, module_org, sync_repo, content_view, module_target_sat + ): """Create new content view filter and assign it to existing content view that has repository assigned to it. Use that repository id for proper filter assignment. @@ -313,7 +332,7 @@ def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_vi :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -323,14 +342,16 @@ def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_vi 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) # Check that only one, specified above, repository is displayed assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier1 def test_positive_create_with_repo_by_name( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view that has repository assigned to it. Use that repository name for @@ -346,7 +367,7 @@ def test_positive_create_with_repo_by_name( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'false', @@ -357,13 +378,15 @@ def test_positive_create_with_repo_by_name( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) # Check that only one, specified above, repository is displayed assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier1 - def test_positive_create_with_original_pkgs(self, sync_repo, content_view): + def test_positive_create_with_original_pkgs(self, sync_repo, content_view, module_target_sat): """Create new content view filter and assign it to existing content view that has repository assigned to it. Enable 'original packages' option for that filter @@ -376,7 +399,7 @@ def test_positive_create_with_original_pkgs(self, sync_repo, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -386,12 +409,14 @@ def test_positive_create_with_original_pkgs(self, sync_repo, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier2 def test_positive_create_with_repos_yum_and_docker( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new docker repository and add to content view that has yum repo already assigned to it. Create new content view filter and assign @@ -403,7 +428,7 @@ def test_positive_create_with_repos_yum_and_docker( :expectedresults: Content view filter created successfully and has both repositories affected (yum and docker) """ - docker_repository = make_repository( + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'busybox', @@ -413,12 +438,12 @@ def test_positive_create_with_repos_yum_and_docker( }, ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'repository-id': docker_repository['id']} ) repos = [sync_repo['id'], docker_repository['id']] cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -428,14 +453,18 @@ def test_positive_create_with_repos_yum_and_docker( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 2 for repo in cvf['repositories']: assert repo['id'] in repos @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name, module_org, content_view): + def test_negative_create_with_invalid_name( + self, name, module_org, content_view, module_target_sat + ): """Try to create content view filter using invalid names only :id: f3497a23-6e34-4fee-9964-f95762fc737c @@ -447,7 +476,7 @@ def test_negative_create_with_invalid_name(self, name, module_org, content_view) :CaseImportance: Low """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -457,7 +486,7 @@ def test_negative_create_with_invalid_name(self, name, module_org, content_view) ) @pytest.mark.tier1 - def test_negative_create_with_same_name(self, module_org, content_view): + def test_negative_create_with_same_name(self, module_org, content_view, module_target_sat): """Try to create content view filter using same name twice :id: 7e7444f4-e2b5-406d-a210-49b4008c88d9 @@ -467,7 +496,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): :CaseImportance: Low """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -476,7 +505,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -486,7 +515,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_create_without_type(self, module_org, content_view): + def test_negative_create_without_type(self, module_org, content_view, module_target_sat): """Try to create content view filter without providing required parameter 'type' @@ -497,7 +526,7 @@ def test_negative_create_without_type(self, module_org, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': gen_string('utf8'), @@ -506,7 +535,7 @@ def test_negative_create_without_type(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_create_without_cv(self): + def test_negative_create_without_cv(self, module_target_sat): """Try to create content view filter without providing content view information which should be used as basis for filter @@ -517,10 +546,14 @@ def test_negative_create_without_cv(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create({'name': gen_string('utf8'), 'type': 'rpm'}) + module_target_sat.cli.ContentView.filter.create( + {'name': gen_string('utf8'), 'type': 'rpm'} + ) @pytest.mark.tier1 - def test_negative_create_with_invalid_repo_id(self, module_org, content_view): + def test_negative_create_with_invalid_repo_id( + self, module_org, content_view, module_target_sat + ): """Try to create content view filter using incorrect repository :id: 21fdbeca-ad0a-4e29-93dc-f850b5639f4f @@ -530,7 +563,7 @@ def test_negative_create_with_invalid_repo_id(self, module_org, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': gen_string('utf8'), @@ -542,7 +575,7 @@ def test_negative_create_with_invalid_repo_id(self, module_org, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) - def test_positive_update_name(self, new_name, module_org, content_view): + def test_positive_update_name(self, new_name, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to update that filter using different value types as a name @@ -554,12 +587,10 @@ def test_positive_update_name(self, new_name, module_org, content_view): :expectedresults: Content view filter updated successfully and has proper and expected name - :CaseLevel: Integration - :CaseImportance: Critical """ cvf_name = gen_string('utf8') - cvf = ContentView.filter.create( + cvf = module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -567,19 +598,21 @@ def test_positive_update_name(self, new_name, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'id': cvf['filter-id'], 'new-name': new_name, } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': new_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': new_name} + ) assert cvf['name'] == new_name @pytest.mark.tier2 def test_positive_update_repo_with_same_type( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and apply it to existing content view that has repository assigned to it. Try to update that filter and @@ -590,12 +623,10 @@ def test_positive_update_repo_with_same_type( :expectedresults: Content view filter updated successfully and has new repository affected - :CaseLevel: Integration - :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -603,16 +634,20 @@ def test_positive_update_repo_with_same_type( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] - new_repo = make_repository( + new_repo = module_target_sat.cli_factory.make_repository( {'organization-id': module_org.id, 'product-id': module_product.id}, ) - ContentView.add_repository({'id': content_view['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': new_repo['id']} + ) - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -620,7 +655,9 @@ def test_positive_update_repo_with_same_type( } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] != sync_repo['name'] assert cvf['repositories'][0]['name'] == new_repo['name'] @@ -628,7 +665,7 @@ def test_positive_update_repo_with_same_type( @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_update_repo_with_different_type( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and apply it to existing content view that has repository assigned to it. Try to update that filter and @@ -640,10 +677,9 @@ def test_positive_update_repo_with_different_type( :expectedresults: Content view filter updated successfully and has new repository affected - :CaseLevel: Integration """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -651,10 +687,12 @@ def test_positive_update_repo_with_different_type( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] - docker_repo = make_repository( + docker_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'busybox', @@ -663,21 +701,25 @@ def test_positive_update_repo_with_different_type( 'url': CONTAINER_REGISTRY_HUB, }, ) - ContentView.add_repository({'id': content_view['id'], 'repository-id': docker_repo['id']}) - ContentView.filter.update( + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': docker_repo['id']} + ) + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, 'repository-ids': docker_repo['id'], } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] != sync_repo['name'] assert cvf['repositories'][0]['name'] == docker_repo['name'] @pytest.mark.tier2 - def test_positive_update_inclusion(self, module_org, content_view): + def test_positive_update_inclusion(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to update that filter and assign opposite inclusion value for it @@ -687,10 +729,9 @@ def test_positive_update_inclusion(self, module_org, content_view): :expectedresults: Content view filter updated successfully and has correct and expected value for inclusion parameter - :CaseLevel: Integration """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -699,21 +740,25 @@ def test_positive_update_inclusion(self, module_org, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'true' - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, 'inclusion': 'false', } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'false' @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_with_name(self, new_name, content_view): + def test_negative_update_with_name(self, new_name, content_view, module_target_sat): """Try to update content view filter using invalid names only :id: 6c40e452-f786-4e28-9f03-b1935b55b33a @@ -727,11 +772,11 @@ def test_negative_update_with_name(self, new_name, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( {'content-view-id': content_view['id'], 'name': cvf_name, 'type': 'rpm'} ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -739,10 +784,12 @@ def test_negative_update_with_name(self, new_name, content_view): } ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': new_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': new_name} + ) @pytest.mark.tier1 - def test_negative_update_with_same_name(self, module_org, content_view): + def test_negative_update_with_same_name(self, module_org, content_view, module_target_sat): """Try to update content view filter using name of already existing entity @@ -753,7 +800,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -762,7 +809,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): }, ) new_name = gen_string('alpha', 100) - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': new_name, @@ -771,7 +818,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': new_name, @@ -780,7 +827,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_update_inclusion(self, module_org, content_view): + def test_negative_update_inclusion(self, module_org, content_view, module_target_sat): """Try to update content view filter and assign incorrect inclusion value for it @@ -791,7 +838,7 @@ def test_negative_update_inclusion(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -801,18 +848,22 @@ def test_negative_update_inclusion(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'inclusion': 'wrong_value', 'name': cvf_name, } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'true' @pytest.mark.tier1 - def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view): + def test_negative_update_with_non_existent_repo_id( + self, sync_repo, content_view, module_target_sat + ): """Try to update content view filter using non-existing repository ID :id: 457af8c2-fb32-4164-9e19-98676f4ea063 @@ -822,7 +873,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -831,7 +882,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -841,7 +892,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view @pytest.mark.tier1 def test_negative_update_with_invalid_repo_id( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Try to update filter and assign repository which does not belong to filter content view @@ -853,7 +904,7 @@ def test_negative_update_with_invalid_repo_id( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -861,11 +912,11 @@ def test_negative_update_with_invalid_repo_id( 'type': 'rpm', }, ) - new_repo = make_repository( + new_repo = module_target_sat.cli_factory.make_repository( {'organization-id': module_org.id, 'product-id': module_product.id}, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -875,7 +926,7 @@ def test_negative_update_with_invalid_repo_id( @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_delete_by_name(self, name, module_org, content_view): + def test_positive_delete_by_name(self, name, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using different value types as a name @@ -888,7 +939,7 @@ def test_positive_delete_by_name(self, name, module_org, content_view): :CaseImportance: Critical """ - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -896,14 +947,20 @@ def test_positive_delete_by_name(self, name, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) - ContentView.filter.delete({'content-view-id': content_view['id'], 'name': name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) + module_target_sat.cli.ContentView.filter.delete( + {'content-view-id': content_view['id'], 'name': name} + ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_by_id(self, module_org, content_view): + def test_positive_delete_by_id(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using its id as a parameter @@ -914,7 +971,7 @@ def test_positive_delete_by_id(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -922,13 +979,17 @@ def test_positive_delete_by_id(self, module_org, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) - ContentView.filter.delete({'id': cvf['filter-id']}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) + module_target_sat.cli.ContentView.filter.delete({'id': cvf['filter-id']}) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_delete_by_org_name(self, module_org, content_view): + def test_positive_delete_by_org_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using organization and content view names where that filter was applied @@ -940,7 +1001,7 @@ def test_positive_delete_by_org_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -948,8 +1009,10 @@ def test_positive_delete_by_org_name(self, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) - ContentView.filter.delete( + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) + module_target_sat.cli.ContentView.filter.delete( { 'content-view': content_view['name'], 'name': cvf_name, @@ -957,10 +1020,12 @@ def test_positive_delete_by_org_name(self, module_org, content_view): } ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_negative_delete_by_name(self, content_view): + def test_negative_delete_by_name(self, content_view, module_target_sat): """Try to delete non-existent filter using generated name :id: 84509061-6652-4594-b68a-4566c04bc289 @@ -970,6 +1035,89 @@ def test_negative_delete_by_name(self, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.delete( + module_target_sat.cli.ContentView.filter.delete( {'content-view-id': content_view['id'], 'name': gen_string('utf8')} ) + + @pytest.mark.tier2 + def test_positive_check_filters_applied(self, target_sat, module_org, content_view): + """Ensure the applied filters are indicated and listed correctly in the CVV info. + + :id: ab72af3f-6bee-4aa8-a74a-637fe9b7e34a + + :steps: + 1. Publish first CVV with no filters applied, assert no applied filters are indicated + nor listed. + 2. Randomly add filters to the CV, publish new CVVs and assert that the applied filters + are indicated and the correct filters are listed. + 3. Randomly remove filters from the CV, publish new CVVs and assert that the applied + filters are indicated when at least one filter exists and the correct filters were + removed. + + :expectedresults: + 1. Hammer shows correctly if a CVV has filter(s) applied, no matter the filter type, + count, nor order. + 2. Hammer lists correctly the applied filter(s), no matter the filter type, count + nor order. + """ + f_types = ['rpm', 'package_group', 'erratum', 'modulemd', 'docker'] + filters_applied = [] + + # Publish first CVV with no filters applied + target_sat.cli.ContentView.publish({'id': content_view['id']}) + cvv = target_sat.cli.ContentView.info({'id': content_view['id']})['versions'][0] + cvv_info = target_sat.cli.ContentView.version_info( + {'id': cvv['id'], 'include-applied-filters': 'true'} + ) + assert cvv_info['has-applied-filters'] == 'no' + assert 'applied-filters' not in cvv_info + + # Randomly add filters to the CV, assert correct CVV info values + for f_type in random.choices(f_types, k=random.randint(1, 5)): + cvf = target_sat.cli.ContentView.filter.create( + { + 'content-view-id': content_view['id'], + 'name': gen_string('alpha'), + 'organization-id': module_org.id, + 'type': f_type, + 'inclusion': random.choice(['true', 'false']), + }, + ) + filters_applied.append(cvf) + + target_sat.cli.ContentView.publish({'id': content_view['id']}) + cvv = max( + target_sat.cli.ContentView.info({'id': content_view['id']})['versions'], + key=lambda x: int(x['id']), + ) + cvv_info = target_sat.cli.ContentView.version_info( + {'id': cvv['id'], 'include-applied-filters': 'true'} + ) + assert cvv_info['has-applied-filters'] == 'yes' + assert len(cvv_info['applied-filters']) == len(filters_applied) + f_listed = [f for f in cvv_info['applied-filters'] if f['id'] == cvf['filter-id']] + assert len(f_listed) == 1 + assert f_listed[0]['name'] == cvf['name'] + + # Randomly remove filters from the CV, assert correct CVV info values + random.shuffle(filters_applied) + for _ in range(len(filters_applied)): + cvf = filters_applied.pop() + target_sat.cli.ContentView.filter.delete({'id': cvf['filter-id']}) + + target_sat.cli.ContentView.publish({'id': content_view['id']}) + cvv = max( + target_sat.cli.ContentView.info({'id': content_view['id']})['versions'], + key=lambda x: int(x['id']), + ) + cvv_info = target_sat.cli.ContentView.version_info( + {'id': cvv['id'], 'include-applied-filters': 'true'} + ) + if len(filters_applied) > 0: + assert cvv_info['has-applied-filters'] == 'yes' + assert len(cvv_info['applied-filters']) == len(filters_applied) + assert cvf['filter-id'] not in [f['id'] for f in cvv_info['applied-filters']] + assert cvf['name'] not in [f['name'] for f in cvv_info['applied-filters']] + else: + assert cvv_info['has-applied-filters'] == 'no' + assert 'applied-filters' not in cvv_info diff --git a/tests/foreman/cli/test_discoveredhost.py b/tests/foreman/cli/test_discoveredhost.py index ac138e66858..e578f35e1ac 100644 --- a/tests/foreman/cli/test_discoveredhost.py +++ b/tests/foreman/cli/test_discoveredhost.py @@ -8,56 +8,137 @@ :Team: Rocket -:TestType: Functional - -:CaseLevel: System - -:Upstream: No """ -from time import sleep - import pytest - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.discoveredhost import DiscoveredHost +from wait_for import wait_for pytestmark = [pytest.mark.run_in_one_thread] -def _assertdiscoveredhost(hostname): - """Check if host is discovered and information about it can be - retrieved back - - Introduced a delay of 300secs by polling every 10 secs to get expected - host - """ - for _ in range(30): - try: - discovered_host = DiscoveredHost.info({'name': hostname}) - except CLIReturnCodeError: - sleep(10) - continue - return discovered_host - +@pytest.mark.e2e +@pytest.mark.on_premises_provisioning +@pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.rhel_ver_match('7') +def test_rhel_pxe_discovery_provisioning( + module_provisioning_rhel_content, + module_discovery_sat, + provisioning_host, + provisioning_hostgroup, + request, +): + """Provision a PXE-based discovered host -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_pxe_based_discovery(): - """Discover a host via PXE boot by setting "proxy.type=proxy" in - PXE default + :id: b32a3b05-86bc-4ba6-ab6c-22b2f81e4315 - :id: 25e935fe-18f4-477e-b791-7ea5a395b4f6 + :parametrized: yes - :Setup: Provisioning should be configured + :Setup: Satellite with Provisioning and Discovery features configured - :Steps: PXE boot a host/VM + :steps: + 1. Boot up the host to discover + 2. Provision the host - :expectedresults: Host should be successfully discovered + :expectedresults: Host should be successfully discovered and provisioned :CaseImportance: Critical :BZ: 1731112 """ + sat = module_discovery_sat.sat + provisioning_host.power_control(ensure=False) + mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] + + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=1500, + delay=40, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + result = sat.cli.DiscoveredHost.provision( + { + 'id': discovered_host.id, + 'hostgroup-id': discovered_host.hostgroup.id, + 'organization-id': discovered_host.organization.id, + 'location-id': discovered_host.location.id, + } + ) + + assert 'Host created' in result[0]['message'] + host = sat.api.Host().search(query={"search": f'id={discovered_host.id}'})[0] + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + assert host + + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + assert host.read().build_status_label == 'Installed' + assert not sat.api.DiscoveredHost().search(query={'mac': mac}) + + +@pytest.mark.e2e +@pytest.mark.on_premises_provisioning +@pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.rhel_ver_match('7') +def test_rhel_pxeless_discovery_provisioning( + module_discovery_sat, + pxeless_discovery_host, + module_provisioning_rhel_content, + provisioning_hostgroup, + request, +): + """Provision a PXE-less discovered host + + :id: e75ee13a-9edc-4182-b02a-6b106a459751 + + :Setup: Provisioning should be configured and a host should be + discovered via cli + + :expectedresults: Host should be provisioned successfully + + :CaseImportance: Critical + """ + sat = module_discovery_sat.sat + pxeless_discovery_host.power_control(ensure=False) + mac = pxeless_discovery_host._broker_args['provisioning_nic_mac_addr'] + + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=1500, + delay=40, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + result = sat.cli.DiscoveredHost.provision( + { + 'id': discovered_host.id, + 'hostgroup-id': discovered_host.hostgroup.id, + 'organization-id': discovered_host.organization.id, + 'location-id': discovered_host.location.id, + } + ) + assert 'Host created' in result[0]['message'] + host = sat.api.Host().search(query={"search": f'id={discovered_host.id}'})[0] + request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + assert host + + wait_for( + lambda: host.read().build_status_label != 'Pending installation', + timeout=1500, + delay=10, + ) + assert host.read().build_status_label == 'Installed' + assert not sat.api.DiscoveredHost().search(query={'mac': mac}) @pytest.mark.stubbed @@ -71,7 +152,7 @@ def test_positive_provision_pxeless_bios_syslinux(): :Setup: 1. Craft the FDI with remaster the image to have ssh enabled - :Steps: + :steps: 1. Create a BIOS VM and set it to boot from the FDI 2. Run assertion steps #1-2 3. Provision the discovered host using PXELinux loader @@ -149,57 +230,6 @@ def test_positive_provision_pxe_host_with_bios_syslinux(): """ -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_provision_pxe_host_with_uefi_grub2(): - """Provision the pxe-based UEFI discovered host from cli using PXEGRUB2 - loader - - :id: 0002af1b-6f4b-40e2-8f2f-343387be6f72 - - :Setup: - 1. Create an UEFI VM and set it to boot from a network - 2. Synchronize RHEL7 kickstart repo (rhel6 kernel too old for GRUB) - 3. for getting more detailed info from FDI, remaster the image to - have ssh enabled - - :steps: - 1. Build a default PXE template - 2. Run assertion step #1 - 3. Boot the VM (from NW) - 4. Run assertion steps #2-4 - 5. Provision the discovered host - 6. Run assertion steps #5-9 - - :expectedresults: Host should be provisioned successfully - 1. Ensure the tftpboot files are updated - - 1.1 Ensure fdi-image files have been placed under tftpboot/boot/ - 1.2 Ensure the 'default' pxelinux config has been placed under - tftpboot/pxelinux.cfg/ - 1.3 Ensure the discovery section exists inside pxelinux config, - it leads to the FDI kernel and the ONTIMEOUT is set to discovery - - 2. Ensure PXE handoff goes as expected (tcpdump -p tftp) - 3. Ensure FDI loaded and successfully sent out facts - - 3.1 ping vm - 3.2 ssh to the VM and read the logs (if ssh enabled) - 3.3 optionally sniff the HTTP traffic coming from the host - - 4. Ensure host appeared in Discovered Hosts on satellite - 5. Ensure the tftpboot files are updated for the hosts mac - 6. Ensure PXE handoff goes as expected (tcpdump -p tftp) - 7. Optionally ensure anaconda loaded and the installation finished - 8. Ensure the host is provisioned with correct attributes - 9. Ensure the entry from discovered host list disappeared - - :CaseAutomation: NotAutomated - - :CaseImportance: High - """ - - @pytest.mark.stubbed @pytest.mark.tier3 def test_positive_delete(): @@ -373,7 +403,7 @@ def test_positive_list_facts(): :Setup: 1. Provisioning is configured and Host is already discovered - :Steps: Validate specified builtin and custom facts + :steps: Validate specified builtin and custom facts :expectedresults: All checked facts should be displayed correctly diff --git a/tests/foreman/cli/test_discoveryrule.py b/tests/foreman/cli/test_discoveryrule.py index 66ffc5a49e6..2d9059f196b 100644 --- a/tests/foreman/cli/test_discoveryrule.py +++ b/tests/foreman/cli/test_discoveryrule.py @@ -4,49 +4,37 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: DiscoveryPlugin :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import random from functools import partial +import random -import pytest from box import Box -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_string -from nailgun.entities import Role as RoleEntity -from nailgun.entities import User as UserEntity +from fauxfactory import gen_choice, gen_integer, gen_string +from nailgun.entities import Role as RoleEntity, User as UserEntity +import pytest from requests import HTTPError -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.discoveryrule import DiscoveryRule -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_discoveryrule -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, + valid_data_list, +) @filtered_datapoint def invalid_hostnames_list(): """Generates a list of invalid host names. - :return: Returns the invalid host names list + :return: Returns the invalid host names list. """ return { 'cjk': gen_string('cjk'), @@ -67,8 +55,8 @@ def gen_int32(min_value=1): class TestDiscoveryRule: """Implements Foreman discovery Rules tests in CLI.""" - @pytest.fixture(scope='function') - def discoveryrule_factory(self, class_org, class_location, class_hostgroup): + @pytest.fixture + def discoveryrule_factory(self, class_org, class_location, class_hostgroup, target_sat): def _create_discoveryrule(org, loc, hostgroup, options=None): """Makes a new discovery rule and asserts its success""" options = options or {} @@ -95,7 +83,7 @@ def _create_discoveryrule(org, loc, hostgroup, options=None): # create a simple object from the dictionary that the CLI factory provides # This allows for consistent attributized access of all fixture entities in the tests - return Box(make_discoveryrule(options)) + return Box(target_sat.cli_factory.discoveryrule(options)) return partial( _create_discoveryrule, org=class_org, loc=class_location, hostgroup=class_hostgroup @@ -103,20 +91,22 @@ def _create_discoveryrule(org, loc, hostgroup, options=None): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name, discoveryrule_factory, request, target_sat): + def test_positive_create_with_name(self, name, discoveryrule_factory, target_sat): """Create Discovery Rule using different names :id: 066e66bc-c572-4ae9-b458-90daf83bab54 - :expectedresults: Rule should be successfully created + :expectedresults: Rule should be successfully created. :CaseImportance: Critical :parametrized: yes """ rule = discoveryrule_factory(options={'name': name, 'priority': gen_int32()}) - request.addfinalizer(target_sat.api.DiscoveryRule(id=rule.id).delete) assert rule.name == name + target_sat.cli.DiscoveryRule.delete({'id': rule.id}) + with pytest.raises(CLIReturnCodeError): + target_sat.cli.DiscoveryRule.info({'id': rule.id}) @pytest.mark.tier1 def test_positive_create_with_search(self, discoveryrule_factory): @@ -132,6 +122,9 @@ def test_positive_create_with_search(self, discoveryrule_factory): search_query = 'cpu_count = 2' rule = discoveryrule_factory(options={'search': search_query}) assert rule.search == search_query + custom_query = 'processor = x86' + rule = discoveryrule_factory(options={'search': custom_query}) + assert rule.search == custom_query @pytest.mark.tier2 def test_positive_create_with_hostname(self, discoveryrule_factory): @@ -142,21 +135,20 @@ def test_positive_create_with_hostname(self, discoveryrule_factory): :expectedresults: Rule should be successfully created and has expected hostname value - :CaseLevel: Component """ host_name = 'myhost' rule = discoveryrule_factory(options={'hostname': host_name}) assert rule['hostname-template'] == host_name @pytest.mark.tier1 - def test_positive_create_with_org_loc_id( - self, discoveryrule_factory, class_org, class_location, class_hostgroup + def test_positive_create_and_update_with_org_loc_id( + self, discoveryrule_factory, class_org, class_location, class_hostgroup, target_sat ): - """Create discovery rule by associating org and location ids + """Create discovery rule by associating org and location ids and update :id: bdb4c581-d27a-4d1a-920b-89689e68a57f - :expectedresults: Rule was created and with given org & location names. + :expectedresults: Rule was created with given org & location ids and updated. :BZ: 1377990, 1523221 @@ -172,15 +164,33 @@ def test_positive_create_with_org_loc_id( assert class_org.name in rule.organizations assert class_location.name in rule.locations + new_org = target_sat.cli_factory.make_org() + new_loc = target_sat.cli_factory.make_location() + new_hostgroup = target_sat.cli_factory.hostgroup( + {'organization-ids': new_org.id, 'location-ids': new_loc.id} + ) + target_sat.cli.DiscoveryRule.update( + { + 'id': rule.id, + 'organization-ids': new_org.id, + 'location-ids': new_loc.id, + 'hostgroup-id': new_hostgroup.id, + } + ) + rule = target_sat.cli.DiscoveryRule.info({'id': rule.id}, output_format='json') + assert new_org.name == rule['organizations'][0]['name'] + assert new_loc.name == rule['locations'][0]['name'] + assert new_hostgroup.name == rule['host-group']['name'] + @pytest.mark.tier2 - def test_positive_create_with_org_loc_name( - self, discoveryrule_factory, class_org, class_location, class_hostgroup + def test_positive_create_and_update_with_org_loc_name( + self, discoveryrule_factory, class_org, class_location, class_hostgroup, target_sat ): - """Create discovery rule by associating org and location names + """Create discovery rule by associating org and location names and update :id: f0d550ae-16d8-48ec-817e-d2e5b7405b46 - :expectedresults: Rule was created and with given org & location names. + :expectedresults: Rule was created and with given org & location names and updated :BZ: 1377990 """ @@ -194,6 +204,25 @@ def test_positive_create_with_org_loc_name( assert class_org.name in rule.organizations assert class_location.name in rule.locations + new_org = target_sat.cli_factory.make_org() + new_loc = target_sat.cli_factory.make_location() + new_hostgroup = target_sat.cli_factory.hostgroup( + {'organization-ids': new_org.id, 'location-ids': new_loc.id} + ) + + target_sat.cli.DiscoveryRule.update( + { + 'id': rule.id, + 'organizations': new_org.name, + 'locations': new_loc.name, + 'hostgroup-id': new_hostgroup.id, + } + ) + rule = target_sat.cli.DiscoveryRule.info({'id': rule.id}, output_format='json') + assert new_org.name == rule['organizations'][0]['name'] + assert new_loc.name == rule['locations'][0]['name'] + assert new_hostgroup.name == rule['host-group']['name'] + @pytest.mark.tier2 def test_positive_create_with_hosts_limit(self, discoveryrule_factory): """Create Discovery Rule providing any number from range 1..100 for @@ -204,28 +233,33 @@ def test_positive_create_with_hosts_limit(self, discoveryrule_factory): :expectedresults: Rule should be successfully created and has expected hosts limit value - :CaseLevel: Component """ hosts_limit = '5' rule = discoveryrule_factory(options={'hosts-limit': hosts_limit}) assert rule['hosts-limit'] == hosts_limit @pytest.mark.tier1 - def test_positive_create_with_priority(self, discoveryrule_factory): + def test_positive_create_and_update_with_priority(self, discoveryrule_factory, target_sat): """Create Discovery Rule providing any number from range 1..100 for - priority option + priority option and update :id: 8ef58279-0ad3-41a4-b8dd-65594afdb655 - :expectedresults: Rule should be successfully created and has expected + :expectedresults: Rule should be successfully created/updated and has expected priority value :CaseImportance: Critical """ - available = set(range(1, 1000)) - {int(Box(r).priority) for r in DiscoveryRule.list()} + available = set(range(1, 1000)) - { + int(Box(r).priority) for r in target_sat.cli.DiscoveryRule.list() + } rule_priority = random.sample(sorted(available), 1) rule = discoveryrule_factory(options={'priority': rule_priority[0]}) assert rule.priority == str(rule_priority[0]) + # Update + target_sat.cli.DiscoveryRule.update({'id': rule.id, 'priority': rule_priority[0]}) + rule = Box(target_sat.cli.DiscoveryRule.info({'id': rule.id})) + assert rule.priority == str(rule_priority[0]) @pytest.mark.tier2 def test_positive_create_disabled_rule(self, discoveryrule_factory): @@ -235,7 +269,6 @@ def test_positive_create_disabled_rule(self, discoveryrule_factory): :expectedresults: Disabled rule should be successfully created - :CaseLevel: Component """ rule = discoveryrule_factory(options={'enabled': 'false'}) assert rule.enabled == 'false' @@ -251,8 +284,6 @@ def test_negative_create_with_invalid_name(self, name, discoveryrule_factory): :CaseImportance: Medium - :CaseLevel: Component - :parametrized: yes """ with pytest.raises(CLIFactoryError): @@ -269,8 +300,6 @@ def test_negative_create_with_invalid_hostname(self, name, discoveryrule_factory :CaseImportance: Medium - :CaseLevel: Component - :BZ: 1378427 :parametrized: yes @@ -307,189 +336,43 @@ def test_negative_create_with_same_name(self, discoveryrule_factory): with pytest.raises(CLIFactoryError): discoveryrule_factory(options={'name': name}) - @pytest.mark.tier1 - def test_positive_delete(self, discoveryrule_factory): - """Delete existing Discovery Rule - - :id: c9b88a94-13c4-496f-a5c1-c088187250dc - - :expectedresults: Rule should be successfully deleted - - :CaseImportance: Critical - """ - rule = discoveryrule_factory() - DiscoveryRule.delete({'id': rule.id}) - with pytest.raises(CLIReturnCodeError): - DiscoveryRule.info({'id': rule.id}) - @pytest.mark.tier3 - def test_positive_update_name(self, discoveryrule_factory): - """Update discovery rule name + def test_positive_update_discovery_params(self, discoveryrule_factory, class_org, target_sat): + """Update discovery rule parameters :id: 1045e2c4-e1f7-42c9-95f7-488fc79bf70b - :expectedresults: Rule name is updated - - :CaseLevel: Component + :expectedresults: Rule params are updated :CaseImportance: Medium """ - rule = discoveryrule_factory() - new_name = gen_string('numeric') - DiscoveryRule.update({'id': rule.id, 'name': new_name}) - rule = Box(DiscoveryRule.info({'id': rule.id})) - assert rule.name == new_name - - @pytest.mark.tier2 - def test_positive_update_org_loc_by_id(self, discoveryrule_factory): - """Update org and location of selected discovery rule using org/loc ids - - :id: 26da79aa-30e5-4052-98ae-141de071a68a - - :expectedresults: Rule was updated and with given org & location. - - :BZ: 1377990 - - :CaseLevel: Component - """ - new_org = Box(make_org()) - new_loc = Box(make_location()) - new_hostgroup = Box( - make_hostgroup({'organization-ids': new_org.id, 'location-ids': new_loc.id}) - ) - rule = discoveryrule_factory() - DiscoveryRule.update( - { - 'id': rule.id, - 'organization-ids': new_org.id, - 'location-ids': new_loc.id, - 'hostgroup-id': new_hostgroup.id, - } - ) - rule = Box(DiscoveryRule.info({'id': rule.id})) - assert new_org.name in rule.organizations - assert new_loc.name in rule.locations - - @pytest.mark.tier3 - def test_positive_update_org_loc_by_name(self, discoveryrule_factory): - """Update org and location of selected discovery rule using org/loc - names - - :id: 7a5d61ac-6a2d-48f6-a00d-df437a7dc3c4 - - :expectedresults: Rule was updated and with given org & location. - - :BZ: 1377990 - - :CaseLevel: Component + rule = discoveryrule_factory(options={'hosts-limit': '5'}) + new_name = gen_string('alpha') + new_query = 'model = KVM' + new_hostname = gen_string('alpha') + new_limit = '10' + new_hostgroup = target_sat.cli_factory.hostgroup({'organization-ids': class_org.id}) - :CaseImportance: Medium - """ - new_org = Box(make_org()) - new_loc = Box(make_location()) - new_hostgroup = Box( - make_hostgroup({'organization-ids': new_org.id, 'location-ids': new_loc.id}) - ) - rule = discoveryrule_factory() - DiscoveryRule.update( + target_sat.cli.DiscoveryRule.update( { 'id': rule.id, - 'organizations': new_org.name, - 'locations': new_loc.name, - 'hostgroup-id': new_hostgroup.id, + 'name': new_name, + 'search': new_query, + 'hostgroup': new_hostgroup.name, + 'hostname': new_hostname, + 'hosts-limit': new_limit, } ) - rule = Box(DiscoveryRule.info({'id': rule.id})) - assert new_org.name in rule.organizations - assert new_loc.name in rule.locations - - @pytest.mark.tier2 - def test_positive_update_query(self, discoveryrule_factory): - """Update discovery rule search query - - :id: 86943095-acc5-40ff-8e3c-88c76b36333d - - :expectedresults: Rule search field is updated - :CaseLevel: Component - """ - rule = discoveryrule_factory() - new_query = 'model = KVM' - DiscoveryRule.update({'id': rule.id, 'search': new_query}) - rule = Box(DiscoveryRule.info({'id': rule.id})) + rule = Box(target_sat.cli.DiscoveryRule.info({'id': rule.id})) + assert rule.name == new_name assert rule.search == new_query - - @pytest.mark.tier2 - def test_positive_update_hostgroup(self, discoveryrule_factory, class_org): - """Update discovery rule host group - - :id: 07992a3f-2aa9-4e45-b2e8-ef3d2f255292 - - :expectedresults: Rule host group is updated - - :CaseLevel: Component - """ - new_hostgroup = Box(make_hostgroup({'organization-ids': class_org.id})) - rule = discoveryrule_factory() - DiscoveryRule.update({'id': rule.id, 'hostgroup': new_hostgroup.name}) - rule = DiscoveryRule.info({'id': rule.id}) assert rule['host-group'] == new_hostgroup.name - - @pytest.mark.tier2 - def test_positive_update_hostname(self, discoveryrule_factory): - """Update discovery rule hostname value - - - :id: 4c123488-92df-42f6-afe3-8a88cd90ffc2 - - :expectedresults: Rule host name is updated - - :CaseLevel: Component - """ - new_hostname = gen_string('alpha') - rule = discoveryrule_factory() - DiscoveryRule.update({'id': rule.id, 'hostname': new_hostname}) - rule = Box(DiscoveryRule.info({'id': rule.id})) assert rule['hostname-template'] == new_hostname - - @pytest.mark.tier2 - def test_positive_update_limit(self, discoveryrule_factory): - """Update discovery rule limit value - - :id: efa6f5bc-4d56-4449-90f5-330affbcfb09 - - :expectedresults: Rule host limit field is updated - - :CaseLevel: Component - """ - rule = discoveryrule_factory(options={'hosts-limit': '5'}) - new_limit = '10' - DiscoveryRule.update({'id': rule.id, 'hosts-limit': new_limit}) - rule = Box(DiscoveryRule.info({'id': rule.id})) assert rule['hosts-limit'] == new_limit @pytest.mark.tier1 - def test_positive_update_priority(self, discoveryrule_factory): - """Update discovery rule priority value - - :id: 0543cc73-c692-4bbf-818b-37353ec98986 - - :expectedresults: Rule priority is updated - - :CaseImportance: Critical - """ - available = set(range(1, 1000)) - {int(Box(r).priority) for r in DiscoveryRule.list()} - rule_priority = random.sample(sorted(available), 1) - rule = discoveryrule_factory(options={'priority': rule_priority[0]}) - assert rule.priority == str(rule_priority[0]) - available = set(range(1, 1000)) - {int(Box(r).priority) for r in DiscoveryRule.list()} - rule_priority = random.sample(sorted(available), 1) - DiscoveryRule.update({'id': rule.id, 'priority': rule_priority[0]}) - rule = Box(DiscoveryRule.info({'id': rule.id})) - assert rule.priority == str(rule_priority[0]) - - @pytest.mark.tier1 - def test_positive_update_disable_enable(self, discoveryrule_factory): + def test_positive_update_disable_enable(self, discoveryrule_factory, target_sat): """Update discovery rule enabled state. (Disabled->Enabled) :id: 64e8b21b-2ab0-49c3-a12d-02dbdb36647a @@ -500,78 +383,55 @@ def test_positive_update_disable_enable(self, discoveryrule_factory): """ rule = discoveryrule_factory(options={'enabled': 'false'}) assert rule.enabled == 'false' - DiscoveryRule.update({'id': rule.id, 'enabled': 'true'}) - rule = Box(DiscoveryRule.info({'id': rule.id})) + target_sat.cli.DiscoveryRule.update({'id': rule.id, 'enabled': 'true'}) + rule = Box(target_sat.cli.DiscoveryRule.info({'id': rule.id})) assert rule.enabled == 'true' @pytest.mark.tier3 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, name, discoveryrule_factory): - """Update discovery rule name using invalid names only + def test_negative_update_discovery_params(self, name, discoveryrule_factory, target_sat): + """Update discovery rule name using invalid parameters :id: 8293cc6a-d983-460a-b76e-221ad02b54b7 - :expectedresults: Rule name is not updated - - :CaseLevel: Component + :expectedresults: Rule params are not updated :CaseImportance: Medium :parametrized: yes """ rule = discoveryrule_factory() - with pytest.raises(CLIReturnCodeError): - DiscoveryRule.update({'id': rule.id, 'name': name}) - - @pytest.mark.tier3 - def test_negative_update_hostname(self, discoveryrule_factory): - """Update discovery rule host name using number as a value - - :id: c382dbc7-9509-4060-9038-1617f7fef038 - - :expectedresults: Rule host name is not updated - - :CaseImportance: Medium - - :CaseLevel: Component - """ - rule = discoveryrule_factory() - with pytest.raises(CLIReturnCodeError): - DiscoveryRule.update({'id': rule.id, 'hostname': '$#@!*'}) - - @pytest.mark.tier3 - def test_negative_update_limit(self, discoveryrule_factory): - """Update discovery rule host limit using invalid values - - :id: e3257d8a-91b9-406f-bd74-0fd1fb05bb77 - - :expectedresults: Rule host limit is not updated - - :CaseLevel: Component - - :CaseImportance: Medium - """ - rule = discoveryrule_factory() + priority = gen_string('alpha') host_limit = gen_string('alpha') + params = { + 'name': name, + 'hostname': '$#@!*', + 'hosts-limit': host_limit, + 'priority': priority, + } + key = random.choice(list(params.keys())) with pytest.raises(CLIReturnCodeError): - DiscoveryRule.update({'id': rule.id, 'hosts-limit': host_limit}) - - @pytest.mark.tier3 - def test_negative_update_priority(self, discoveryrule_factory): - """Update discovery rule priority using invalid values + target_sat.cli.DiscoveryRule.update( + { + 'id': rule.id, + key: params[key], + } + ) - :id: 0778dd00-aa19-4062-bdf3-752e1b546ec2 + @pytest.mark.tier1 + def test_positive_delete(self, discoveryrule_factory, target_sat): + """Delete existing Discovery Rule - :expectedresults: Rule priority is not updated + :id: c9b88a94-13c4-496f-a5c1-c088187250dc - :CaseLevel: Component + :expectedresults: Rule should be successfully deleted - :CaseImportance: Medium + :CaseImportance: Critical """ rule = discoveryrule_factory() - priority = gen_string('alpha') + target_sat.cli.DiscoveryRule.delete({'id': rule.id}) with pytest.raises(CLIReturnCodeError): - DiscoveryRule.update({'id': rule.id, 'priority': priority}) + target_sat.cli.DiscoveryRule.info({'id': rule.id}) class TestDiscoveryRuleRole: @@ -592,8 +452,9 @@ def class_user_manager(self, class_user_password, class_org, class_location): yield user try: user.delete() - except HTTPError: - logger.exception('Exception while deleting class scope user entity in teardown') + except HTTPError as err: + logger.exception(err) + logger.error('Exception while deleting class scope user entity in teardown') @pytest.fixture(scope='class') def class_user_reader(self, class_user_password, class_org, class_location): @@ -610,24 +471,33 @@ def class_user_reader(self, class_user_password, class_org, class_location): yield user try: user.delete() - except HTTPError: - logger.exception('Exception while deleting class scope user entity in teardown') + except HTTPError as err: + logger.exception(err) + logger.error('Exception while deleting class scope user entity in teardown') @pytest.mark.tier2 - def test_positive_create_rule_with_non_admin_user( - self, class_org, class_location, class_user_password, class_user_manager, class_hostgroup + def test_positive_crud_with_non_admin_user( + self, + class_org, + class_location, + class_user_password, + class_user_manager, + class_hostgroup, + target_sat, ): - """Create rule with non-admin user by associating discovery_manager role + """Create, update and delete rule with non-admin user by associating discovery_manager role :id: 056535aa-3338-4c1e-8a4b-ebfc8bd6e456 - :expectedresults: Rule should be created successfully. + :expectedresults: Rule should be created and deleted successfully. - :CaseLevel: Integration """ rule_name = gen_string('alpha') + new_name = gen_string('alpha') rule = Box( - DiscoveryRule.with_user(class_user_manager.login, class_user_password).create( + target_sat.cli.DiscoveryRule.with_user( + class_user_manager.login, class_user_password + ).create( { 'name': rule_name, 'search': 'cpu_count = 5', @@ -638,90 +508,37 @@ def test_positive_create_rule_with_non_admin_user( ) ) rule = Box( - DiscoveryRule.with_user(class_user_manager.login, class_user_password).info( - {'id': rule.id} - ) + target_sat.cli.DiscoveryRule.with_user( + class_user_manager.login, class_user_password + ).info({'id': rule.id}) ) assert rule.name == rule_name - @pytest.mark.tier2 - def test_positive_delete_rule_with_non_admin_user( - self, class_org, class_location, class_user_manager, class_hostgroup, class_user_password - ): - """Delete rule with non-admin user by associating discovery_manager role - - :id: 87ab969b-7d92-478d-a5c0-1c0d50e9bdd6 - - :expectedresults: Rule should be deleted successfully. - - :CaseLevel: Integration - """ - rule_name = gen_string('alpha') - rule = Box( - DiscoveryRule.with_user(class_user_manager.login, class_user_password).create( - { - 'name': rule_name, - 'search': 'cpu_count = 5', - 'organizations': class_org.name, - 'locations': class_location.name, - 'hostgroup-id': class_hostgroup.id, - } - ) - ) - rule = Box( - DiscoveryRule.with_user(class_user_manager.login, class_user_password).info( - {'id': rule.id} - ) - ) - - DiscoveryRule.with_user(class_user_manager.login, class_user_password).delete( - {'id': rule.id} + target_sat.cli.DiscoveryRule.update( + { + 'id': rule.id, + 'name': new_name, + } ) - with pytest.raises(CLIReturnCodeError): - DiscoveryRule.info({'id': rule.id}) - - @pytest.mark.tier2 - def test_positive_view_existing_rule_with_non_admin_user( - self, class_org, class_location, class_user_password, class_user_reader, class_hostgroup - ): - """Existing rule should be viewed to non-admin user by associating - discovery_reader role. - - :id: 7b1d90b9-fc2d-4ccb-93d3-605c2da876f7 - :Steps: - - 1. create a rule with admin user - 2. create a non-admin user and assign 'Discovery Reader' role - 3. Login with non-admin user - - :expectedresults: Rule should be visible to non-admin user. + rule = Box(target_sat.cli.DiscoveryRule.info({'id': rule.id})) + assert rule.name == new_name - :CaseLevel: Integration - """ - rule_name = gen_string('alpha') - rule = Box( - make_discoveryrule( - { - 'name': rule_name, - 'enabled': 'false', - 'search': "last_report = Today", - 'organizations': class_org.name, - 'locations': class_location.name, - 'hostgroup-id': class_hostgroup.id, - } - ) - ) - rule = Box( - DiscoveryRule.with_user(class_user_reader.login, class_user_password).info( - {'id': rule.id} - ) - ) - assert rule.name == rule_name + target_sat.cli.DiscoveryRule.with_user( + class_user_manager.login, class_user_password + ).delete({'id': rule.id}) + with pytest.raises(CLIReturnCodeError): + target_sat.cli.DiscoveryRule.info({'id': rule.id}) @pytest.mark.tier2 def test_negative_delete_rule_with_non_admin_user( - self, class_org, class_location, class_user_password, class_user_reader, class_hostgroup + self, + class_org, + class_location, + class_user_password, + class_user_reader, + class_hostgroup, + target_sat, ): """Delete rule with non-admin user by associating discovery_reader role @@ -730,25 +547,23 @@ def test_negative_delete_rule_with_non_admin_user( :expectedresults: User should validation error and rule should not be deleted successfully. - :CaseLevel: Integration """ - rule = Box( - make_discoveryrule( - { - 'enabled': 'false', - 'search': "last_report = Today", - 'organizations': class_org.name, - 'locations': class_location.name, - 'hostgroup-id': class_hostgroup.id, - } - ) + rule = target_sat.cli_factory.make_discoveryrule( + { + 'enabled': 'false', + 'search': "last_report = Today", + 'organizations': class_org.name, + 'locations': class_location.name, + 'hostgroup-id': class_hostgroup.id, + } ) + rule = Box( - DiscoveryRule.with_user(class_user_reader.login, class_user_password).info( - {'id': rule.id} - ) + target_sat.cli.DiscoveryRule.with_user( + class_user_reader.login, class_user_password + ).info({'id': rule.id}) ) with pytest.raises(CLIReturnCodeError): - DiscoveryRule.with_user(class_user_reader.login, class_user_password).delete( - {'id': rule.id} - ) + target_sat.cli.DiscoveryRule.with_user( + class_user_reader.login, class_user_password + ).delete({'id': rule.id}) diff --git a/tests/foreman/cli/test_docker.py b/tests/foreman/cli/test_docker.py index 91ac3446a17..10b1d1dd4fc 100644 --- a/tests/foreman/cli/test_docker.py +++ b/tests/foreman/cli/test_docker.py @@ -4,45 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import choice -from random import randint +from random import choice, randint +from fauxfactory import gen_string, gen_url import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url - -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.docker import Docker -from robottelo.cli.factory import make_activation_key -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_product_wait -from robottelo.cli.factory import make_repository -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_RH_REGISTRY_UPSTREAM_NAME -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import REPO_TYPE -from robottelo.utils.datafactory import invalid_docker_upstream_names -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_docker_repository_names -from robottelo.utils.datafactory import valid_docker_upstream_names - -def _repo(product_id, name=None, upstream_name=None, url=None): +from robottelo.config import settings +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_RH_REGISTRY_UPSTREAM_NAME, + CONTAINER_UPSTREAM_NAME, + REPO_TYPE, +) +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_docker_upstream_names, + parametrized, + valid_docker_repository_names, + valid_docker_upstream_names, +) + + +def _repo(sat, product_id, name=None, upstream_name=None, url=None): """Creates a Docker-based repository. :param product_id: ID of the ``Product``. @@ -54,7 +40,7 @@ def _repo(product_id, name=None, upstream_name=None, url=None): CONTAINER_REGISTRY_HUB constant. :return: A ``Repository`` object. """ - return make_repository( + return sat.cli_factory.make_repository( { 'content-type': REPO_TYPE['docker'], 'docker-upstream-name': upstream_name or CONTAINER_UPSTREAM_NAME, @@ -65,39 +51,41 @@ def _repo(product_id, name=None, upstream_name=None, url=None): ) -def _content_view(repo_id, org_id): +def _content_view(sat, repo_id, org_id): """Create a content view and link it to the given repository.""" - content_view = make_content_view({'composite': False, 'organization-id': org_id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo_id}) - return ContentView.info({'id': content_view['id']}) + content_view = sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': org_id} + ) + sat.cli.ContentView.add_repository({'id': content_view['id'], 'repository-id': repo_id}) + return sat.cli.ContentView.info({'id': content_view['id']}) @pytest.fixture -def repo(module_product): - return _repo(module_product.id) +def repo(module_product, target_sat): + return _repo(target_sat, module_product.id) @pytest.fixture -def content_view(module_org, repo): - return _content_view(repo['id'], module_org.id) +def content_view(module_org, target_sat, repo): + return _content_view(target_sat, repo['id'], module_org.id) @pytest.fixture -def content_view_publish(content_view): - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - return ContentView.version_info({'id': content_view['versions'][0]['id']}) +def content_view_publish(content_view, target_sat): + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + return target_sat.cli.ContentView.version_info({'id': content_view['versions'][0]['id']}) @pytest.fixture -def content_view_promote(content_view_publish, module_lce): - ContentView.version_promote( +def content_view_promote(content_view_publish, module_lce, target_sat): + target_sat.cli.ContentView.version_promote( { 'id': content_view_publish['id'], 'to-lifecycle-environment-id': module_lce.id, } ) - return ContentView.version_info({'id': content_view_publish['id']}) + return target_sat.cli.ContentView.version_info({'id': content_view_publish['id']}) class TestDockerManifest: @@ -109,7 +97,7 @@ class TestDockerManifest: """ @pytest.mark.tier2 - def test_positive_read_docker_tags(self, repo): + def test_positive_read_docker_tags(self, repo, module_target_sat): """docker manifest displays tags information for a docker manifest :id: 59b605b5-ac2d-46e3-a85e-a259e78a07a8 @@ -121,18 +109,18 @@ def test_positive_read_docker_tags(self, repo): :BZ: 1658274 """ - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Grab all available manifests related to repository - manifests_list = Docker.manifest.list({'repository-id': repo['id']}) + manifests_list = module_target_sat.cli.Docker.manifest.list({'repository-id': repo['id']}) # Some manifests do not have tags associated with it, ignore those # because we want to check the tag information - manifests = [m_iter for m_iter in manifests_list if not m_iter['tags'] == ''] + manifests = [m_iter for m_iter in manifests_list if m_iter['tags'] != ''] assert manifests - tags_list = Docker.tag.list({'repository-id': repo['id']}) + tags_list = module_target_sat.cli.Docker.tag.list({'repository-id': repo['id']}) # Extract tag names for the repository out of docker tag list repo_tag_names = [tag['tag'] for tag in tags_list] for manifest in manifests: - manifest_info = Docker.manifest.info({'id': manifest['id']}) + manifest_info = module_target_sat.cli.Docker.manifest.info({'id': manifest['id']}) # Check that manifest's tag is listed in tags for the repository for t_iter in manifest_info['tags']: assert t_iter['name'] in repo_tag_names @@ -148,7 +136,7 @@ class TestDockerRepository: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_docker_repository_names())) - def test_positive_create_with_name(self, module_org, module_product, name): + def test_positive_create_with_name(self, module_product, name, module_target_sat): """Create one Docker-type repository :id: e82a36c8-3265-4c10-bafe-c7e07db3be78 @@ -160,13 +148,15 @@ def test_positive_create_with_name(self, module_org, module_product, name): :CaseImportance: Critical """ - repo = _repo(module_product.id, name) + repo = _repo(module_target_sat, module_product.id, name) assert repo['name'] == name assert repo['upstream-repository-name'] == CONTAINER_UPSTREAM_NAME assert repo['content-type'] == REPO_TYPE['docker'] @pytest.mark.tier2 - def test_positive_create_repos_using_same_product(self, module_org, module_product): + def test_positive_create_repos_using_same_product( + self, module_org, module_product, module_target_sat + ): """Create multiple Docker-type repositories :id: 6dd25cf4-f8b6-4958-976a-c116daf27b44 @@ -174,17 +164,18 @@ def test_positive_create_repos_using_same_product(self, module_org, module_produ :expectedresults: Multiple docker repositories are created with a Docker upstream repository and they all belong to the same product. - :CaseLevel: Integration """ repo_names = set() for _ in range(randint(2, 5)): - repo = _repo(module_product.id) + repo = _repo(module_target_sat, module_product.id) repo_names.add(repo['name']) - product = Product.info({'id': module_product.id, 'organization-id': module_org.id}) + product = module_target_sat.cli.Product.info( + {'id': module_product.id, 'organization-id': module_org.id} + ) assert repo_names.issubset({repo_['repo-name'] for repo_ in product['content']}) @pytest.mark.tier2 - def test_positive_create_repos_using_multiple_products(self, module_org): + def test_positive_create_repos_using_multiple_products(self, module_org, module_target_sat): """Create multiple Docker-type repositories on multiple products. @@ -194,19 +185,22 @@ def test_positive_create_repos_using_multiple_products(self, module_org): Docker upstream repository and they all belong to their respective products. - :CaseLevel: Integration """ for _ in range(randint(2, 5)): - product = make_product_wait({'organization-id': module_org.id}) + product = module_target_sat.cli_factory.make_product_wait( + {'organization-id': module_org.id} + ) repo_names = set() for _ in range(randint(2, 3)): - repo = _repo(product['id']) + repo = _repo(module_target_sat, product['id']) repo_names.add(repo['name']) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) assert repo_names == {repo_['repo-name'] for repo_ in product['content']} @pytest.mark.tier1 - def test_positive_sync(self, repo): + def test_positive_sync(self, repo, module_target_sat): """Create and sync a Docker-type repository :id: bff1d40e-181b-48b2-8141-8c86e0db62a2 @@ -217,13 +211,13 @@ def test_positive_sync(self, repo): :CaseImportance: Critical """ assert int(repo['content-counts']['container-image-manifests']) == 0 - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['container-image-manifests']) > 0 @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_docker_repository_names())) - def test_positive_update_name(self, repo, new_name): + def test_positive_update_name(self, repo, new_name, module_target_sat): """Create a Docker-type repository and update its name. :id: 8b3a8496-e9bd-44f1-916f-6763a76b9b1b @@ -235,13 +229,15 @@ def test_positive_update_name(self, repo, new_name): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'new-name': new_name, 'url': repo['url']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'new-name': new_name, 'url': repo['url']} + ) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['name'] == new_name @pytest.mark.tier1 @pytest.mark.parametrize('new_upstream_name', **parametrized(valid_docker_upstream_names())) - def test_positive_update_upstream_name(self, repo, new_upstream_name): + def test_positive_update_upstream_name(self, repo, new_upstream_name, module_target_sat): """Create a Docker-type repository and update its upstream name. :id: 1a6985ed-43ec-4ea6-ba27-e3870457ac56 @@ -253,19 +249,19 @@ def test_positive_update_upstream_name(self, repo, new_upstream_name): :CaseImportance: Critical """ - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': new_upstream_name, 'id': repo['id'], 'url': repo['url'], } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['upstream-repository-name'] == new_upstream_name @pytest.mark.tier1 @pytest.mark.parametrize('new_upstream_name', **parametrized(invalid_docker_upstream_names())) - def test_negative_update_upstream_name(self, repo, new_upstream_name): + def test_negative_update_upstream_name(self, repo, new_upstream_name, module_target_sat): """Attempt to update upstream name for a Docker-type repository. :id: 798651af-28b2-4907-b3a7-7c560bf66c7c @@ -279,7 +275,7 @@ def test_negative_update_upstream_name(self, repo, new_upstream_name): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError, match='Validation failed: Docker upstream name'): - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': new_upstream_name, 'id': repo['id'], @@ -289,7 +285,7 @@ def test_negative_update_upstream_name(self, repo, new_upstream_name): @pytest.mark.skip_if_not_set('docker') @pytest.mark.tier1 - def test_positive_create_with_long_upstream_name(self, module_product): + def test_positive_create_with_long_upstream_name(self, module_product, module_target_sat): """Create a docker repository with upstream name longer than 30 characters @@ -304,6 +300,7 @@ def test_positive_create_with_long_upstream_name(self, module_product): :CaseImportance: Critical """ repo = _repo( + module_target_sat, module_product.id, upstream_name=CONTAINER_RH_REGISTRY_UPSTREAM_NAME, url=settings.docker.external_registry_1, @@ -312,7 +309,7 @@ def test_positive_create_with_long_upstream_name(self, module_product): @pytest.mark.skip_if_not_set('docker') @pytest.mark.tier1 - def test_positive_update_with_long_upstream_name(self, repo): + def test_positive_update_with_long_upstream_name(self, repo, module_target_sat): """Create a docker repository and update its upstream name with longer than 30 characters value @@ -324,18 +321,18 @@ def test_positive_update_with_long_upstream_name(self, repo): :CaseImportance: Critical """ - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': CONTAINER_RH_REGISTRY_UPSTREAM_NAME, 'id': repo['id'], 'url': settings.docker.external_registry_1, } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['upstream-repository-name'] == CONTAINER_RH_REGISTRY_UPSTREAM_NAME @pytest.mark.tier2 - def test_positive_update_url(self, repo): + def test_positive_update_url(self, repo, module_target_sat): """Create a Docker-type repository and update its URL. :id: 73caacd4-7f17-42a7-8d93-3dee8b9341fa @@ -344,12 +341,12 @@ def test_positive_update_url(self, repo): repository and that its URL can be updated. """ new_url = gen_url() - Repository.update({'id': repo['id'], 'url': new_url}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'url': new_url}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['url'] == new_url @pytest.mark.tier1 - def test_positive_delete_by_id(self, repo): + def test_positive_delete_by_id(self, repo, module_target_sat): """Create and delete a Docker-type repository :id: ab1e8228-92a8-45dc-a863-7181711f2745 @@ -359,12 +356,12 @@ def test_positive_delete_by_id(self, repo): :CaseImportance: Critical """ - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier2 - def test_positive_delete_random_repo_by_id(self, module_org): + def test_positive_delete_random_repo_by_id(self, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -374,22 +371,23 @@ def test_positive_delete_random_repo_by_id(self, module_org): without altering the other products. """ products = [ - make_product_wait({'organization-id': module_org.id}) for _ in range(randint(2, 5)) + module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + for _ in range(randint(2, 5)) ] repos = [] for product in products: for _ in range(randint(2, 3)): - repos.append(_repo(product['id'])) + repos.append(_repo(module_target_sat, product['id'])) # Select random repository and delete it repo = choice(repos) repos.remove(repo) - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) # Verify other repositories were not touched product_ids = [product['id'] for product in products] for repo in repos: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['product']['id'] in product_ids @@ -399,12 +397,10 @@ class TestDockerContentView: :CaseComponent: ContentViews :team: Phoenix-content - - :CaseLevel: Integration """ @pytest.mark.tier2 - def test_positive_add_docker_repo_by_id(self, module_org, repo): + def test_positive_add_docker_repo_by_id(self, module_org, repo, module_target_sat): """Add one Docker-type repository to a non-composite content view :id: 87d6c7bb-92f8-4a32-8ad2-2a1af896500b @@ -412,13 +408,17 @@ def test_positive_add_docker_repo_by_id(self, module_org, repo): :expectedresults: A repository is created with a Docker repository and the product is added to a non-composite content view """ - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert repo['id'] in [repo_['id'] for repo_ in content_view['container-image-repositories']] @pytest.mark.tier2 - def test_positive_add_docker_repos_by_id(self, module_org, module_product): + def test_positive_add_docker_repos_by_id(self, module_org, module_product, module_target_sat): """Add multiple Docker-type repositories to a non-composite CV. :id: 2eb19e28-a633-4c21-9469-75a686c83b34 @@ -427,18 +427,22 @@ def test_positive_add_docker_repos_by_id(self, module_org, module_product): repositories and the product is added to a non-composite content view. """ - repos = [_repo(module_product.id) for _ in range(randint(2, 5))] - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + repos = [_repo(module_target_sat, module_product.id) for _ in range(randint(2, 5))] + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for repo in repos: - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert {repo['id'] for repo in repos} == { repo['id'] for repo in content_view['container-image-repositories'] } @pytest.mark.tier2 - def test_positive_add_synced_docker_repo_by_id(self, module_org, repo): + def test_positive_add_synced_docker_repo_by_id(self, module_org, repo, module_target_sat): """Create and sync a Docker-type repository :id: 6f51d268-ed23-48ab-9dea-cd3571daa647 @@ -446,17 +450,23 @@ def test_positive_add_synced_docker_repo_by_id(self, module_org, repo): :expectedresults: A repository is created with a Docker repository and it is synchronized. """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['container-image-manifests']) > 0 - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert repo['id'] in [repo_['id'] for repo_ in content_view['container-image-repositories']] @pytest.mark.tier2 - def test_positive_add_docker_repo_by_id_to_ccv(self, module_org, content_view): + def test_positive_add_docker_repo_by_id_to_ccv( + self, module_org, content_view, module_target_sat + ): """Add one Docker-type repository to a composite content view :id: 8e2ef5ba-3cdf-4ef9-a22a-f1701e20a5d5 @@ -467,23 +477,27 @@ def test_positive_add_docker_repo_by_id_to_ccv(self, module_org, content_view): :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'id': comp_content_view['id'], 'component-ids': content_view['versions'][0]['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] @pytest.mark.tier2 - def test_positive_add_docker_repos_by_id_to_ccv(self, module_org, module_product): + def test_positive_add_docker_repos_by_id_to_ccv( + self, module_org, module_product, module_target_sat + ): """Add multiple Docker-type repositories to a composite content view. :id: b79cbc97-3dba-4059-907d-19316684d569 @@ -496,27 +510,33 @@ def test_positive_add_docker_repos_by_id_to_ccv(self, module_org, module_product """ cv_versions = [] for _ in range(randint(2, 5)): - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - repo = _repo(module_product.id) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + repo = _repo(module_target_sat, module_product.id) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cv_versions.append(content_view['versions'][0]) - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': [cv_version['id'] for cv_version in cv_versions], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) comp_ids = [component['id'] for component in comp_content_view['components']] for cv_version in cv_versions: assert cv_version['id'] in comp_ids @pytest.mark.tier2 - def test_positive_publish_with_docker_repo(self, content_view): + def test_positive_publish_with_docker_repo(self, content_view, module_target_sat): """Add Docker-type repository to content view and publish it once. :id: 28480de3-ffb5-4b8e-8174-fffffeef6af4 @@ -526,12 +546,14 @@ def test_positive_publish_with_docker_repo(self, content_view): published only once. """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 @pytest.mark.tier2 - def test_positive_publish_with_docker_repo_composite(self, content_view, module_org): + def test_positive_publish_with_docker_repo_composite( + self, content_view, module_org, module_target_sat + ): """Add Docker-type repository to composite CV and publish it once. :id: 2d75419b-73ed-4f29-ae0d-9af8d9624c87 @@ -545,28 +567,30 @@ def test_positive_publish_with_docker_repo_composite(self, content_view, module_ """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert len(comp_content_view['versions']) == 1 @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo(self, content_view): + def test_positive_publish_multiple_with_docker_repo(self, content_view, module_target_sat): """Add Docker-type repository to content view and publish it multiple times. @@ -580,12 +604,14 @@ def test_positive_publish_multiple_with_docker_repo(self, content_view): publish_amount = randint(2, 5) for _ in range(publish_amount): - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == publish_amount @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo_composite(self, module_org, content_view): + def test_positive_publish_multiple_with_docker_repo_composite( + self, module_org, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it multiple times. @@ -600,30 +626,34 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org, """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] publish_amount = randint(2, 5) for _ in range(publish_amount): - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert len(comp_content_view['versions']) == publish_amount @pytest.mark.tier2 - def test_positive_promote_with_docker_repo(self, module_org, module_lce, content_view): + def test_positive_promote_with_docker_repo( + self, module_org, module_lce, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then promote it to the next available lifecycle-environment. @@ -632,20 +662,28 @@ def test_positive_promote_with_docker_repo(self, module_org, module_lce, content :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': module_lce.id}) - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': module_lce.id} + ) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 2 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_promote_multiple_with_docker_repo(self, module_org, content_view): + def test_positive_promote_multiple_with_docker_repo( + self, module_org, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then promote it to multiple available lifecycle-environments. @@ -654,26 +692,32 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org, content_vi :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 lces = [ - make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(1, randint(3, 6)) ] for expected_lces, lce in enumerate(lces, start=2): - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) - cvv = ContentView.version_info({'id': cvv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) + cvv = module_target_sat.cli.ContentView.version_info({'id': cvv['id']}) assert len(cvv['lifecycle-environments']) == expected_lces @pytest.mark.tier2 def test_positive_promote_with_docker_repo_composite( - self, module_org, module_lce, content_view + self, module_org, module_lce, content_view, module_target_sat ): """Add Docker-type repository to composite content view and publish it. Then promote it to the next available lifecycle-environment. @@ -685,39 +729,47 @@ def test_positive_promote_with_docker_repo_composite( :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': comp_content_view['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id, } ) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 2 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_promote_multiple_with_docker_repo_composite(self, content_view, module_org): + def test_positive_promote_multiple_with_docker_repo_composite( + self, content_view, module_org, module_target_sat + ): """Add Docker-type repository to composite content view and publish it. Then promote it to the multiple available lifecycle-environments. @@ -728,45 +780,51 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, content_view :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 lces = [ - make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(1, randint(3, 6)) ] for expected_lces, lce in enumerate(lces, start=2): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': cvv['id'], 'to-lifecycle-environment-id': lce['id'], } ) - cvv = ContentView.version_info({'id': cvv['id']}) + cvv = module_target_sat.cli.ContentView.version_info({'id': cvv['id']}) assert len(cvv['lifecycle-environments']) == expected_lces @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_name_pattern_change(self, module_org): + def test_positive_name_pattern_change(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change registry name pattern for that environment. Verify that repository name on product changed according to new pattern. @@ -776,7 +834,9 @@ def test_positive_name_pattern_change(self, module_org): :expectedresults: Container repository name is changed according to new pattern. """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) pattern_prefix = gen_string('alpha', 5) docker_upstream_name = 'hello-world' new_pattern = ( @@ -784,37 +844,49 @@ def test_positive_name_pattern_change(self, module_org): ) repo = _repo( - make_product_wait({'organization-id': module_org.id})['id'], + module_target_sat, + module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id})[ + 'id' + ], name=gen_string('alpha', 5), upstream_name=docker_upstream_name, ) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['registry-name-pattern'] == new_pattern - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( {'name': repo['name'], 'environment-id': lce['id'], 'organization-id': module_org.id} )[0] expected_name = f'{pattern_prefix}-{content_view["label"]}/{docker_upstream_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_positive_product_name_change_after_promotion(self, module_org): + def test_positive_product_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change product name. Verify that repository name on product changed according to new pattern. @@ -829,45 +901,63 @@ def test_positive_product_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = '<%= content_view.label %>/<%= product.name %>' - lce = make_lifecycle_environment({'organization-id': module_org.id}) - prod = make_product_wait({'organization-id': module_org.id, 'name': old_prod_name}) - repo = _repo(prod['id'], name=gen_string('alpha', 5), upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - LifecycleEnvironment.update( + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + prod = module_target_sat.cli_factory.make_product_wait( + {'organization-id': module_org.id, 'name': old_prod_name} + ) + repo = _repo( + module_target_sat, + prod['id'], + name=gen_string('alpha', 5), + upstream_name=docker_upstream_name, + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['registry-name-pattern'] == new_pattern - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - Product.update({'name': new_prod_name, 'id': prod['id']}) + module_target_sat.cli.Product.update({'name': new_prod_name, 'id': prod['id']}) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( {'name': repo['name'], 'environment-id': lce['id'], 'organization-id': module_org.id} )[0] expected_name = f'{content_view["label"]}/{old_prod_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( { 'id': content_view['versions'][-1]['id'], 'to-lifecycle-environment-id': lce['id'], } ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': repo['name'], 'environment-id': lce['id'], @@ -875,10 +965,13 @@ def test_positive_product_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{new_prod_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_positive_repo_name_change_after_promotion(self, module_org): + def test_positive_repo_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change repository name. Verify that Docker repository name on product changed according to new pattern. @@ -893,27 +986,37 @@ def test_positive_repo_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = '<%= content_view.label %>/<%= repository.name %>' - lce = make_lifecycle_environment({'organization-id': module_org.id}) - prod = make_product_wait({'organization-id': module_org.id}) - repo = _repo(prod['id'], name=old_repo_name, upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - LifecycleEnvironment.update( + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + prod = module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo( + module_target_sat, prod['id'], name=old_repo_name, upstream_name=docker_upstream_name + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - Repository.update({'name': new_repo_name, 'id': repo['id'], 'product-id': prod['id']}) + module_target_sat.cli.Repository.update( + {'name': new_repo_name, 'id': repo['id'], 'product-id': prod['id']} + ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': new_repo_name, 'environment-id': lce['id'], @@ -921,18 +1024,21 @@ def test_positive_repo_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{old_repo_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( { 'id': content_view['versions'][-1]['id'], 'to-lifecycle-environment-id': lce['id'], } ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': new_repo_name, 'environment-id': lce['id'], @@ -940,10 +1046,13 @@ def test_positive_repo_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{new_repo_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_negative_set_non_unique_name_pattern_and_promote(self, module_org): + def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_target_sat): """Set registry name pattern to one that does not guarantee uniqueness. Try to promote content view with multiple Docker repositories to lifecycle environment. Verify that content has not been promoted. @@ -955,26 +1064,32 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org): docker_upstream_names = ['hello-world', 'alpine'] new_pattern = '<%= organization.label %>' - lce = make_lifecycle_environment( + lce = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'registry-name-pattern': new_pattern} ) - prod = make_product_wait({'organization-id': module_org.id}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + prod = module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for docker_name in docker_upstream_names: - repo = _repo(prod['id'], upstream_name=docker_name) - Repository.synchronize({'id': repo['id']}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + repo = _repo(module_target_sat, prod['id'], upstream_name=docker_name) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) @pytest.mark.tier2 - def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, module_product): + def test_negative_promote_and_set_non_unique_name_pattern( + self, module_org, module_product, module_target_sat + ): """Promote content view with multiple Docker repositories to lifecycle environment. Set registry name pattern to one that does not guarantee uniqueness. Verify that pattern has not been @@ -987,20 +1102,26 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, modu docker_upstream_names = ['hello-world', 'alpine'] new_pattern = '<%= organization.label %>' - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for docker_name in docker_upstream_names: - repo = _repo(module_product.id, upstream_name=docker_name) - Repository.synchronize({'id': repo['id']}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - lce = make_lifecycle_environment({'organization-id': module_org.id}) - ContentView.version_promote( + repo = _repo(module_target_sat, module_product.id, upstream_name=docker_name) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) with pytest.raises(CLIReturnCodeError): - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], @@ -1015,12 +1136,12 @@ class TestDockerActivationKey: :CaseComponent: ActivationKeys :team: Phoenix-subscriptions - - :CaseLevel: Integration """ @pytest.mark.tier2 - def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_promote): + def test_positive_add_docker_repo_cv( + self, module_org, module_lce, content_view_promote, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then create an activation key and associate it with the Docker content view. @@ -1030,7 +1151,7 @@ def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_ :expectedresults: Docker-based content view can be added to activation key """ - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': content_view_promote['content-view-id'], 'lifecycle-environment-id': module_lce.id, @@ -1040,7 +1161,9 @@ def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_ assert activation_key['content-view'] == content_view_promote['content-view-name'] @pytest.mark.tier2 - def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_view_promote): + def test_positive_remove_docker_repo_cv( + self, module_org, module_lce, content_view_promote, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Create an activation key and associate it with the Docker content view. Then remove this content view from the activation @@ -1051,7 +1174,7 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi :expectedresults: Docker-based content view can be added and then removed from the activation key. """ - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': content_view_promote['content-view-id'], 'lifecycle-environment-id': module_lce.id, @@ -1061,14 +1184,16 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi assert activation_key['content-view'] == content_view_promote['content-view-name'] # Create another content view replace with - another_cv = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.publish({'id': another_cv['id']}) - another_cv = ContentView.info({'id': another_cv['id']}) - ContentView.version_promote( + another_cv = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': another_cv['id']}) + another_cv = module_target_sat.cli.ContentView.info({'id': another_cv['id']}) + module_target_sat.cli.ContentView.version_promote( {'id': another_cv['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id} ) - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'id': activation_key['id'], 'organization-id': module_org.id, @@ -1076,11 +1201,13 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi 'lifecycle-environment-id': module_lce.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['content-view'] != content_view_promote['content-view-name'] @pytest.mark.tier2 - def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view_publish): + def test_positive_add_docker_repo_ccv( + self, module_org, module_lce, content_view_publish, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -1093,25 +1220,29 @@ def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view :BZ: 1359665 """ - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view_publish['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view_publish['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - comp_cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + comp_cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) + module_target_sat.cli.ContentView.version_promote( {'id': comp_cvv['id'], 'to-lifecycle-environment-id': module_lce.id} ) - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': comp_content_view['id'], 'lifecycle-environment-id': module_lce.id, @@ -1121,7 +1252,9 @@ def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view assert activation_key['content-view'] == comp_content_view['name'] @pytest.mark.tier2 - def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_view_publish): + def test_positive_remove_docker_repo_ccv( + self, module_org, module_lce, content_view_publish, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -1135,25 +1268,29 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v :BZ: 1359665 """ - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view_publish['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view_publish['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - comp_cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + comp_cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) + module_target_sat.cli.ContentView.version_promote( {'id': comp_cvv['id'], 'to-lifecycle-environment-id': module_lce.id} ) - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': comp_content_view['id'], 'lifecycle-environment-id': module_lce.id, @@ -1163,14 +1300,16 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v assert activation_key['content-view'] == comp_content_view['name'] # Create another content view replace with - another_cv = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.publish({'id': another_cv['id']}) - another_cv = ContentView.info({'id': another_cv['id']}) - ContentView.version_promote( + another_cv = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': another_cv['id']}) + another_cv = module_target_sat.cli.ContentView.info({'id': another_cv['id']}) + module_target_sat.cli.ContentView.version_promote( {'id': another_cv['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id} ) - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'id': activation_key['id'], 'organization-id': module_org.id, @@ -1178,5 +1317,5 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v 'lifecycle-environment-id': module_lce.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['content-view'] != comp_content_view['name'] diff --git a/tests/foreman/cli/test_domain.py b/tests/foreman/cli/test_domain.py index 1da37871243..32f81369592 100644 --- a/tests/foreman/cli/test_domain.py +++ b/tests/foreman/cli/test_domain.py @@ -4,30 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.domain import Domain -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_domain -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_id_list -from robottelo.utils.datafactory import parametrized +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_id_list, + parametrized, +) @filtered_datapoint @@ -50,8 +42,7 @@ def valid_create_params(): @filtered_datapoint def invalid_create_params(): """Returns a list of invalid domain create parameters""" - params = [{'name': gen_string(str_type='utf8', length=256)}] - return params + return [{'name': gen_string(str_type='utf8', length=256)}] @filtered_datapoint @@ -74,8 +65,7 @@ def valid_update_params(): @filtered_datapoint def invalid_update_params(): """Returns a list of invalid domain update parameters""" - params = [{'name': ''}, {'name': gen_string(str_type='utf8', length=256)}] - return params + return [{'name': ''}, {'name': gen_string(str_type='utf8', length=256)}] @filtered_datapoint @@ -114,7 +104,7 @@ def valid_delete_params(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_create_update_delete_domain(): +def test_positive_create_update_delete_domain(module_target_sat): """Create domain, update and delete domain and set parameters :id: 018740bf-1551-4162-b88e-4d4905af097b @@ -124,9 +114,9 @@ def test_positive_create_update_delete_domain(): :CaseImportance: Critical """ options = valid_create_params()[0] - location = make_location() - org = make_org() - domain = make_domain( + location = module_target_sat.cli_factory.make_location() + org = module_target_sat.cli_factory.make_org() + domain = module_target_sat.cli_factory.make_domain( { 'name': options['name'], 'description': options['description'], @@ -142,8 +132,8 @@ def test_positive_create_update_delete_domain(): # set parameter parameter_options = valid_set_params()[0] parameter_options['domain-id'] = domain['id'] - Domain.set_parameter(parameter_options) - domain = Domain.info({'id': domain['id']}) + module_target_sat.cli.Domain.set_parameter(parameter_options) + domain = module_target_sat.cli.Domain.info({'id': domain['id']}) parameter = { # Satellite applies lower to parameter's name parameter_options['name'].lower(): parameter_options['value'] @@ -152,27 +142,29 @@ def test_positive_create_update_delete_domain(): # update domain options = valid_update_params()[0] - Domain.update(dict(options, id=domain['id'])) + module_target_sat.cli.Domain.update(dict(options, id=domain['id'])) # check - domain updated - domain = Domain.info({'id': domain['id']}) + domain = module_target_sat.cli.Domain.info({'id': domain['id']}) for key, val in options.items(): assert domain[key] == val # delete parameter - Domain.delete_parameter({'name': parameter_options['name'], 'domain-id': domain['id']}) + module_target_sat.cli.Domain.delete_parameter( + {'name': parameter_options['name'], 'domain-id': domain['id']} + ) # check - parameter not set - domain = Domain.info({'name': domain['name']}) + domain = module_target_sat.cli.Domain.info({'name': domain['name']}) assert len(domain['parameters']) == 0 # delete domain - Domain.delete({'id': domain['id']}) + module_target_sat.cli.Domain.delete({'id': domain['id']}) with pytest.raises(CLIReturnCodeError): - Domain.info({'id': domain['id']}) + module_target_sat.cli.Domain.info({'id': domain['id']}) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_create_params())) -def test_negative_create(options): +def test_negative_create(options, module_target_sat): """Create domain with invalid values :id: 6d3aec19-75dc-41ca-89af-fef0ca37082d @@ -184,11 +176,11 @@ def test_negative_create(options): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_domain(options) + module_target_sat.cli_factory.make_domain(options) @pytest.mark.tier2 -def test_negative_create_with_invalid_dns_id(): +def test_negative_create_with_invalid_dns_id(module_target_sat): """Attempt to register a domain with invalid id :id: 4aa52167-368a-41ad-87b7-41d468ad41a8 @@ -197,12 +189,10 @@ def test_negative_create_with_invalid_dns_id(): :BZ: 1398392 - :CaseLevel: Integration - :CaseImportance: Medium """ with pytest.raises(CLIFactoryError) as context: - make_domain({'name': gen_string('alpha'), 'dns-id': -1}) + module_target_sat.cli_factory.make_domain({'name': gen_string('alpha'), 'dns-id': -1}) valid_messages = ['Invalid smart-proxy id', 'Invalid capsule id'] exception_string = str(context.value) messages = [message for message in valid_messages if message in exception_string] @@ -211,7 +201,7 @@ def test_negative_create_with_invalid_dns_id(): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_update_params())) -def test_negative_update(module_domain, options): +def test_negative_update(module_domain, options, module_target_sat): """Update domain with invalid values :id: 9fc708dc-20f9-4d7c-af53-863826462981 @@ -223,16 +213,16 @@ def test_negative_update(module_domain, options): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Domain.update(dict(options, id=module_domain.id)) + module_target_sat.cli.Domain.update(dict(options, id=module_domain.id)) # check - domain not updated - result = Domain.info({'id': module_domain.id}) - for key in options.keys(): + result = module_target_sat.cli.Domain.info({'id': module_domain.id}) + for key in options: assert result[key] == getattr(module_domain, key) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_set_params())) -def test_negative_set_parameter(module_domain, options): +def test_negative_set_parameter(module_domain, options, module_target_sat): """Domain set-parameter with invalid values :id: 991fb849-83be-48f4-a12b-81eabb2bd8d3 @@ -246,15 +236,15 @@ def test_negative_set_parameter(module_domain, options): options['domain-id'] = module_domain.id # set parameter with pytest.raises(CLIReturnCodeError): - Domain.set_parameter(options) + module_target_sat.cli.Domain.set_parameter(options) # check - parameter not set - domain = Domain.info({'id': module_domain.id}) + domain = module_target_sat.cli.Domain.info({'id': module_domain.id}) assert len(domain['parameters']) == 0 @pytest.mark.tier2 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) -def test_negative_delete_by_id(entity_id): +def test_negative_delete_by_id(entity_id, module_target_sat): """Create Domain then delete it by wrong ID :id: 0e4ef107-f006-4433-abc3-f872613e0b91 @@ -266,4 +256,4 @@ def test_negative_delete_by_id(entity_id): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Domain.delete({'id': entity_id}) + module_target_sat.cli.Domain.delete({'id': entity_id}) diff --git a/tests/foreman/cli/test_environment.py b/tests/foreman/cli/test_environment.py index d033c952711..90e5b79e72a 100644 --- a/tests/foreman/cli/test_environment.py +++ b/tests/foreman/cli/test_environment.py @@ -4,29 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ from random import choice +from fauxfactory import gen_alphanumeric, gen_string import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings -from robottelo.utils.datafactory import invalid_id_list -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_id_list, + invalid_values_list, + parametrized, +) @pytest.fixture(scope='module') @@ -48,8 +44,6 @@ def test_negative_list_with_parameters( :expectedresults: Server returns empty result as there is no environment associated with location - :CaseLevel: Integration - :BZ: 1337947 """ session_puppet_enabled_sat.cli.Environment.create( diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index 6e2c562ee3d..52eff37bda4 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -4,67 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ErrataManagement :team: Phoenix-content -:TestType: Functional - :CaseImportance: High - -:Upstream: No """ -from datetime import date -from datetime import datetime -from datetime import timedelta +from datetime import date, datetime, timedelta from operator import itemgetter -import pytest from broker import Broker -from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.contentview import ContentViewFilter -from robottelo.cli.erratum import Erratum -from robottelo.cli.factory import make_content_view_filter -from robottelo.cli.factory import make_content_view_filter_rule -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_host_collection -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.factory import setup_org_for_a_custom_repo -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.org import Org -from robottelo.cli.package import Package -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_5_CUSTOM_PACKAGE -from robottelo.constants import PRDS -from robottelo.constants import REAL_0_ERRATA_ID -from robottelo.constants import REAL_4_ERRATA_CVES -from robottelo.constants import REAL_4_ERRATA_ID -from robottelo.constants import REAL_RHEL7_0_2_PACKAGE_NAME -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + DEFAULT_SUBSCRIPTION_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE_NAME, + FAKE_4_CUSTOM_PACKAGE, + FAKE_4_CUSTOM_PACKAGE_NAME, + FAKE_5_CUSTOM_PACKAGE, + PRDS, + REAL_4_ERRATA_CVES, + REAL_4_ERRATA_ID, + REPOS, + REPOSET, +) +from robottelo.exceptions import CLIReturnCodeError from robottelo.hosts import ContentHost PER_PAGE = 10 @@ -116,6 +85,7 @@ ) TIMESTAMP_FMT = '%Y-%m-%d %H:%M' +TIMESTAMP_FMT_S = '%Y-%m-%d %H:%M:%S' PSUTIL_RPM = 'python2-psutil-5.6.7-1.el7.x86_64.rpm' @@ -128,23 +98,23 @@ @pytest.fixture(scope='module') -def orgs(): +def orgs(module_target_sat): """Create and return a list of three orgs.""" - return [entities.Organization().create() for _ in range(3)] + return [module_target_sat.api.Organization().create() for _ in range(3)] @pytest.fixture(scope='module') -def products_with_repos(orgs): +def products_with_repos(orgs, module_target_sat): """Create and return a list of products. For each product, create and sync a single repo.""" products = [] # Create one product for each org, and a second product for the last org. - for org, params in zip(orgs + orgs[-1:], REPOS_WITH_ERRATA): - product = entities.Product(organization=org).create() + for org, params in zip(orgs + orgs[-1:], REPOS_WITH_ERRATA, strict=True): + product = module_target_sat.api.Product(organization=org).create() # Replace the organization entity returned by create(), which contains only the id, # with the one we already have. product.organization = org products.append(product) - repo = make_repository( + repo = module_target_sat.cli_factory.make_repository( { 'download-policy': 'immediate', 'organization-id': product.organization.id, @@ -152,20 +122,20 @@ def products_with_repos(orgs): 'url': params['url'], } ) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) return products @pytest.fixture(scope='module') -def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce): +def rh_repo(module_sca_manifest_org, module_lce, module_cv, module_ak_cv_lce, module_target_sat): """Add a subscription for the Satellite Tools repo to activation key.""" - setup_org_for_a_rh_repo( + module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], 'repository': REPOS['rhst7']['name'], - 'organization-id': module_entitlement_manifest_org.id, + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': module_ak_cv_lce.id, @@ -174,12 +144,14 @@ def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv @pytest.fixture(scope='module') -def custom_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce): +def custom_repo( + module_sca_manifest_org, module_lce, module_cv, module_ak_cv_lce, module_target_sat +): """Create custom repo and add a subscription to activation key.""" - setup_org_for_a_custom_repo( + module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': REPO_WITH_ERRATA['url'], - 'organization-id': module_entitlement_manifest_org.id, + 'organization-id': module_sca_manifest_org.id, 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'activationkey-id': module_ak_cv_lce.id, @@ -191,8 +163,8 @@ def custom_repo(module_entitlement_manifest_org, module_lce, module_cv, module_a def hosts(request): """Deploy hosts via broker.""" num_hosts = getattr(request, 'param', 2) - with Broker(nick='rhel7', host_class=ContentHost, _count=num_hosts) as hosts: - if type(hosts) is not list or len(hosts) != num_hosts: + with Broker(nick='rhel8', host_class=ContentHost, _count=num_hosts) as hosts: + if not isinstance(hosts, list) or len(hosts) != num_hosts: pytest.fail('Failed to provision the expected number of hosts.') yield hosts @@ -201,24 +173,39 @@ def hosts(request): def register_hosts( hosts, module_entitlement_manifest_org, - module_ak_cv_lce, - rh_repo, - custom_repo, + module_lce, + module_ak, module_target_sat, ): - """Register hosts to Satellite and install katello-agent rpm.""" + """Register hosts to Satellite""" for host in hosts: - host.install_katello_ca(module_target_sat) - host.register_contenthost(module_entitlement_manifest_org.name, module_ak_cv_lce.name) - host.enable_repo(REPOS['rhst7']['id']) - host.install_katello_agent() + module_target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': REPO_WITH_ERRATA['url'], + 'organization-id': module_entitlement_manifest_org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': module_ak.id, + } + ) + host.register( + activation_keys=module_ak.name, + target=module_target_sat, + org=module_entitlement_manifest_org, + loc=None, + ) + assert host.subscribed + return hosts @pytest.fixture -def errata_hosts(register_hosts): +def errata_hosts(register_hosts, target_sat): """Ensure that rpm is installed on host.""" for host in register_hosts: + # Enable all custom and rh repositories. + host.execute(r'subscription-manager repos --enable \*') + host.execute(r'yum-config-manager --enable \*') + host.add_rex_key(satellite=target_sat) # Remove all packages. for errata in REPO_WITH_ERRATA['errata']: # Remove package if present, old or new. @@ -232,22 +219,29 @@ def errata_hosts(register_hosts): result = host.execute(f'yum install -y {old_package}') if result.status != 0: pytest.fail(f'Failed to install {old_package}: {result.stdout} {result.stderr}') + return register_hosts @pytest.fixture(scope='module') -def host_collection(module_entitlement_manifest_org, module_ak_cv_lce, register_hosts): +def host_collection( + module_entitlement_manifest_org, module_ak_cv_lce, register_hosts, module_target_sat +): """Create and setup host collection.""" - host_collection = make_host_collection({'organization-id': module_entitlement_manifest_org.id}) - host_ids = [Host.info({'name': host.hostname})['id'] for host in register_hosts] - HostCollection.add_host( + host_collection = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_entitlement_manifest_org.id} + ) + host_ids = [ + module_target_sat.cli.Host.info({'name': host.hostname})['id'] for host in register_hosts + ] + module_target_sat.cli.HostCollection.add_host( { 'id': host_collection['id'], 'organization-id': module_entitlement_manifest_org.id, 'host-ids': host_ids, } ) - ActivationKey.add_host_collection( + module_target_sat.cli.ActivationKey.add_host_collection( { 'id': module_ak_cv_lce.id, 'host-collection-id': host_collection['id'], @@ -257,6 +251,42 @@ def host_collection(module_entitlement_manifest_org, module_ak_cv_lce, register_ return host_collection +def start_and_wait_errata_recalculate(sat, host): + """Helper to find any in-progress errata applicability task and wait_for completion. + Otherwise, schedule errata recalculation, wait for the task. + Find the successful completed task(s). + + :param sat: Satellite instance to check for task(s) + :param host: ContentHost instance to schedule errata recalculate + """ + # Find any in-progress task for this host + search = "label = Actions::Katello::Applicability::Hosts::BulkGenerate and result = pending" + applicability_task_running = sat.cli.Task.list_tasks({'search': search}) + # No task in progress, invoke errata recalculate + if len(applicability_task_running) == 0: + sat.cli.Host.errata_recalculate({'host-id': host.nailgun_host.id}) + host.run('subscription-manager repos') + # Note time check for later wait_for_tasks include 30s margin of safety + timestamp = (datetime.utcnow() - timedelta(seconds=30)).strftime(TIMESTAMP_FMT_S) + # Wait for upload profile event (in case Satellite system is slow) + sat.wait_for_tasks( + search_query=( + 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' + f' and started_at >= "{timestamp}"' + ), + search_rate=15, + max_tries=10, + ) + # Find the successful finished task(s) + search = ( + "label = Actions::Katello::Applicability::Hosts::BulkGenerate" + " and result = success" + f" and started_at >= '{timestamp}'" + ) + applicability_task_success = sat.cli.Task.list_tasks({'search': search}) + assert applicability_task_success, f'No successful task found by search: {search}' + + def is_rpm_installed(host, rpm=None): """Return whether the specified rpm is installed. @@ -268,7 +298,7 @@ def is_rpm_installed(host, rpm=None): return not host.execute(f'rpm -q {rpm}').status -def get_sorted_errata_info_by_id(errata_ids, sort_by='issued', sort_reversed=False): +def get_sorted_errata_info_by_id(sat, errata_ids, sort_by='issued', sort_reversed=False): """Query hammer for erratum ids info :param errata_ids: a list of errata id @@ -284,14 +314,17 @@ def get_sorted_errata_info_by_id(errata_ids, sort_by='issued', sort_reversed=Fal if len(errata_ids) > PER_PAGE: raise Exception('Errata ids length exceeded') errata_info = [ - Erratum.info(options={'id': errata_id}, output_format='json') for errata_id in errata_ids + sat.cli.Erratum.info(options={'id': errata_id}, output_format='json') + for errata_id in errata_ids ] return sorted(errata_info, key=itemgetter(sort_by), reverse=sort_reversed) -def get_errata_ids(*params): +def get_errata_ids(sat, *params): """Return list of sets of errata ids corresponding to the provided params.""" - errata_ids = [{errata['errata-id'] for errata in Erratum.list(param)} for param in params] + errata_ids = [ + {errata['errata-id'] for errata in sat.cli.Erratum.list(param)} for param in params + ] return errata_ids[0] if len(errata_ids) == 1 else errata_ids @@ -301,12 +334,12 @@ def check_errata(errata_ids, by_org=False): :param errata_ids: a list containing a list of errata ids for each repo :type errata_ids: list[list] """ - for ids, repo_with_errata in zip(errata_ids, REPOS_WITH_ERRATA): + for ids, repo_with_errata in zip(errata_ids, REPOS_WITH_ERRATA, strict=True): assert len(ids) == repo_with_errata['org_errata_count' if by_org else 'errata_count'] assert repo_with_errata['errata_id'] in ids -def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): +def filter_sort_errata(sat, org, sort_by_date='issued', filter_by_org=None): """Compare the list of errata returned by `hammer erratum {list|info}` to the expected values, subject to the date sort and organization filter options. @@ -327,15 +360,15 @@ def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): elif filter_by_org == 'label': list_param['organization-label'] = org.label - sort_reversed = True if sort_order == 'DESC' else False + sort_reversed = sort_order == 'DESC' - errata_list = Erratum.list(list_param) + errata_list = sat.cli.Erratum.list(list_param) assert len(errata_list) > 0 # Build a sorted errata info list, which also contains the sort field. errata_internal_ids = [errata['id'] for errata in errata_list] sorted_errata_info = get_sorted_errata_info_by_id( - errata_internal_ids, sort_by=sort_by_date, sort_reversed=sort_reversed + sat, errata_internal_ids, sort_by=sort_by_date, sort_reversed=sort_reversed ) sort_field_values = [errata[sort_by_date] for errata in sorted_errata_info] @@ -346,7 +379,7 @@ def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): assert errata_ids == sorted_errata_ids -def cv_publish_promote(cv, org, lce): +def cv_publish_promote(sat, cv, org, lce, force=False): """Publish and promote a new version into the given lifecycle environment. :param cv: content view @@ -356,33 +389,36 @@ def cv_publish_promote(cv, org, lce): :param lce: lifecycle environment :type lce: entities.LifecycleEnvironment """ - ContentView.publish({'id': cv.id}) - cvv = ContentView.info({'id': cv.id})['versions'][-1] - ContentView.version_promote( + sat.cli.ContentView.publish({'id': cv.id, 'organization': org}) + # Sort CV Version results by --id, grab last version (latest) + cvv_id = sorted(cvv['id'] for cvv in sat.cli.ContentView.info({'id': cv.id})['versions'])[-1] + sat.cli.ContentView.version_promote( { - 'id': cvv['id'], - 'organization-id': org.id, + 'id': cvv_id, + 'organization': org, 'to-lifecycle-environment-id': lce.id, + 'force': force, } ) -def cv_filter_cleanup(filter_id, cv, org, lce): +def cv_filter_cleanup(sat, filter_id, cv, org, lce): """Delete the cv filter, then publish and promote an unfiltered version.""" - ContentViewFilter.delete( + sat.cli.ContentViewFilter.delete( { 'content-view-id': cv.id, 'id': filter_id, 'organization-id': org.id, } ) - cv_publish_promote(cv, org, lce) + cv = cv.read() + cv_publish_promote(sat, cv, org, lce) @pytest.mark.tier3 -@pytest.mark.parametrize('filter_by_hc', ('id', 'name'), ids=('hc_id', 'hc_name')) +@pytest.mark.parametrize('filter_by_hc', ['id', 'name'], ids=('hc_id', 'hc_name')) @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'title'), ids=('org_id', 'org_name', 'org_title') + 'filter_by_org', ['id', 'name', 'title'], ids=('org_id', 'org_name', 'org_title') ) @pytest.mark.no_containers def test_positive_install_by_host_collection_and_org( @@ -411,15 +447,10 @@ def test_positive_install_by_host_collection_and_org( :expectedresults: Erratum is installed. - :CaseLevel: System - :BZ: 1457977, 1983043 """ errata_id = REPO_WITH_ERRATA['errata'][0]['id'] - for host in errata_hosts: - host.add_rex_key(satellite=target_sat) - if filter_by_hc == 'id': host_collection_query = f'host_collection_id = {host_collection["id"]}' elif filter_by_hc == 'name': @@ -435,7 +466,7 @@ def test_positive_install_by_host_collection_and_org( organization_key = 'organization-title' organization_value = module_entitlement_manifest_org.title - JobInvocation.create( + target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': host_collection_query, @@ -450,7 +481,7 @@ def test_positive_install_by_host_collection_and_org( @pytest.mark.tier3 def test_negative_install_by_hc_id_without_errata_info( - module_entitlement_manifest_org, host_collection, errata_hosts + module_entitlement_manifest_org, host_collection, errata_hosts, target_sat ): """Attempt to install an erratum on a host collection by host collection id but no errata info specified. @@ -465,11 +496,9 @@ def test_negative_install_by_hc_id_without_errata_info( :expectedresults: Error message thrown. :CaseImportance: Low - - :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match="Error: Option '--errata' is required"): - HostCollection.erratum_install( + target_sat.cli.HostCollection.erratum_install( { 'id': host_collection['id'], 'organization-id': module_entitlement_manifest_org.id, @@ -479,7 +508,7 @@ def test_negative_install_by_hc_id_without_errata_info( @pytest.mark.tier3 def test_negative_install_by_hc_name_without_errata_info( - module_entitlement_manifest_org, host_collection, errata_hosts + module_entitlement_manifest_org, host_collection, errata_hosts, target_sat ): """Attempt to install an erratum on a host collection by host collection name but no errata info specified. @@ -494,11 +523,9 @@ def test_negative_install_by_hc_name_without_errata_info( :expectedresults: Error message thrown. :CaseImportance: Low - - :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match="Error: Option '--errata' is required"): - HostCollection.erratum_install( + target_sat.cli.HostCollection.erratum_install( { 'name': host_collection['name'], 'organization-id': module_entitlement_manifest_org.id, @@ -507,7 +534,9 @@ def test_negative_install_by_hc_name_without_errata_info( @pytest.mark.tier3 -def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_collection): +def test_negative_install_without_hc_info( + module_entitlement_manifest_org, host_collection, module_target_sat +): """Attempt to install an erratum on a host collection without specifying host collection info. This test only works with two or more host collections (BZ#1928281). We have the one from the fixture, just need to create one more at the start of the test. @@ -524,12 +553,12 @@ def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_ :BZ: 1928281 :CaseImportance: Low - - :CaseLevel: System """ - make_host_collection({'organization-id': module_entitlement_manifest_org.id}) + module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_entitlement_manifest_org.id} + ) with pytest.raises(CLIReturnCodeError): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( { 'organization-id': module_entitlement_manifest_org.id, 'errata': [REPO_WITH_ERRATA['errata'][0]['id']], @@ -539,7 +568,7 @@ def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_ @pytest.mark.tier3 def test_negative_install_by_hc_id_without_org_info( - module_entitlement_manifest_org, host_collection + module_entitlement_manifest_org, host_collection, module_target_sat ): """Attempt to install an erratum on a host collection by host collection id but without specifying any org info. @@ -553,18 +582,16 @@ def test_negative_install_by_hc_id_without_org_info( :expectedresults: Error message thrown. :CaseImportance: Low - - :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match='Error: Could not find organization'): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( {'id': host_collection['id'], 'errata': [REPO_WITH_ERRATA['errata'][0]['id']]} ) @pytest.mark.tier3 def test_negative_install_by_hc_name_without_org_info( - module_entitlement_manifest_org, host_collection + module_entitlement_manifest_org, host_collection, module_target_sat ): """Attempt to install an erratum on a host collection by host collection name but without specifying any org info. @@ -579,17 +606,16 @@ def test_negative_install_by_hc_name_without_org_info( :CaseImportance: Low - :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match='Error: Could not find organization'): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( {'name': host_collection['name'], 'errata': [REPO_WITH_ERRATA['errata'][0]['id']]} ) @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_hosts): +def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_hosts, target_sat): """View a list of affected content hosts for an erratum. :id: 3b592253-52c0-4165-9a48-ba55287e9ee9 @@ -604,7 +630,7 @@ def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_h :CaseAutomation: Automated """ - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {REPO_WITH_ERRATA["errata"][0]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -650,19 +676,10 @@ def test_install_errata_to_one_host( # Remove the package on first host to remove need for errata. result = errata_hosts[0].execute(f'yum erase -y {errata["package_name"]}') assert result.status == 0, f'Failed to erase the rpm: {result.stdout}' - # Add ssh keys + for host in errata_hosts: - host.add_rex_key(satellite=target_sat) - Host.errata_recalculate({'host-id': host.nailgun_host.id}) - timestamp = (datetime.utcnow() - timedelta(minutes=1)).strftime(TIMESTAMP_FMT) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=15, - max_tries=10, - ) + start_and_wait_errata_recalculate(target_sat, host) + assert not is_rpm_installed( errata_hosts[0], rpm=errata['package_name'] ), 'Package should not be installed on host.' @@ -674,7 +691,12 @@ def test_install_errata_to_one_host( @pytest.mark.tier3 @pytest.mark.e2e def test_positive_list_affected_chosts_by_erratum_restrict_flag( - request, module_entitlement_manifest_org, module_cv, module_lce, errata_hosts + target_sat, + request, + module_entitlement_manifest_org, + module_cv, + module_lce, + errata_hosts, ): """View a list of affected content hosts for an erratum filtered with restrict flags. Applicability is calculated using the Library, @@ -706,37 +728,35 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( :CaseAutomation: Automated """ - # Uninstall package so that only the first errata applies. for host in errata_hosts: host.execute(f'yum erase -y {REPO_WITH_ERRATA["errata"][1]["package_name"]}') + start_and_wait_errata_recalculate(target_sat, host) # Create list of uninstallable errata. errata = REPO_WITH_ERRATA['errata'][0] uninstallable = REPO_WITH_ERRATA['errata_ids'].copy() uninstallable.remove(errata['id']) - # Check search for only installable errata param = { 'errata-restrict-installable': 1, - 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) + assert errata_ids, 'No installable errata found' assert errata['id'] in errata_ids, 'Errata not found in list of installable errata' assert not set(uninstallable) & set(errata_ids), 'Unexpected errata found' # Check search of errata is not affected by installable=0 restrict flag param = { 'errata-restrict-installable': 0, - 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) assert set(REPO_WITH_ERRATA['errata_ids']).issubset( errata_ids ), 'Errata not found in list of installable errata' @@ -747,7 +767,7 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) assert errata['id'] in errata_ids, 'Errata not found in list of applicable errata' # Check search of errata is not affected by applicable=0 restrict flag @@ -756,14 +776,14 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) assert set(REPO_WITH_ERRATA['errata_ids']).issubset( errata_ids ), 'Errata not found in list of applicable errata' # Apply a filter and rule to the CV to hide the RPM, thus making erratum not installable # Make RPM exclude filter - cv_filter = make_content_view_filter( + cv_filter = target_sat.cli_factory.make_content_view_filter( { 'content-view-id': module_cv.id, 'name': 'erratum_restrict_test', @@ -774,17 +794,18 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( } ) - @request.addfinalizer - def cleanup(): - cv_filter_cleanup( + request.addfinalizer( + lambda: cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_entitlement_manifest_org, module_lce, ) + ) # Make rule to hide the RPM that creates the need for the installable erratum - make_content_view_filter_rule( + target_sat.cli_factory.content_view_filter_rule( { 'content-view-id': module_cv.id, 'content-view-filter-id': cv_filter['filter-id'], @@ -793,7 +814,7 @@ def cleanup(): ) # Publish and promote a new version with the filter - cv_publish_promote(module_cv, module_entitlement_manifest_org, module_lce) + cv_publish_promote(target_sat, module_cv, module_entitlement_manifest_org, module_lce) # Check that the installable erratum is no longer present in the list param = { @@ -803,7 +824,7 @@ def cleanup(): 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) assert errata['id'] not in errata_ids, 'Errata not found in list of installable errata' # Check errata still applicable @@ -812,7 +833,7 @@ def cleanup(): 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) + errata_ids = get_errata_ids(target_sat, param) assert errata['id'] in errata_ids, 'Errata not found in list of applicable errata' @@ -822,7 +843,6 @@ def test_host_errata_search_commands( module_entitlement_manifest_org, module_cv, module_lce, - host_collection, errata_hosts, target_sat, ): @@ -849,11 +869,8 @@ def test_host_errata_search_commands( :expectedresults: The hosts are correctly listed for security and bugfix advisories. """ - # note time for later wait_for_tasks include 2 mins margin of safety. - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) errata = REPO_WITH_ERRATA['errata'] - # Update package on first host so that the security advisory doesn't apply. result = errata_hosts[0].execute(f'yum update -y {errata[0]["new_package"]}') assert result.status == 0, 'Failed to install rpm' @@ -863,20 +880,10 @@ def test_host_errata_search_commands( assert result.status == 0, 'Failed to install rpm' for host in errata_hosts: - timestamp = (datetime.utcnow() - timedelta(minutes=1)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': host.nailgun_host.id}) - # Wait for upload profile event (in case Satellite system slow) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=20, - max_tries=10, - ) + start_and_wait_errata_recalculate(target_sat, host) # Step 1: Search for hosts that require bugfix advisories - result = Host.list( + result = target_sat.cli.Host.list( { 'search': 'errata_status = errata_needed', 'organization-id': module_entitlement_manifest_org.id, @@ -888,7 +895,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 2: Search for hosts that require security advisories - result = Host.list( + result = target_sat.cli.Host.list( { 'search': 'errata_status = security_needed', 'organization-id': module_entitlement_manifest_org.id, @@ -900,7 +907,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname in result # Step 3: Search for hosts that require the specified bugfix advisory - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {errata[1]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -912,7 +919,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 4: Search for hosts that require the specified security advisory - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {errata[0]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -924,7 +931,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname in result # Step 5: Search for hosts that require the specified bugfix package - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[1]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -936,7 +943,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 6: Search for hosts that require the specified security package - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[0]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -949,7 +956,7 @@ def test_host_errata_search_commands( # Step 7: Apply filter and rule to CV to hide RPM, thus making erratum not installable # Make RPM exclude filter - cv_filter = make_content_view_filter( + cv_filter = target_sat.cli_factory.make_content_view_filter( { 'content-view-id': module_cv.id, 'name': 'erratum_search_test', @@ -959,18 +966,17 @@ def test_host_errata_search_commands( 'inclusion': 'false', } ) - - @request.addfinalizer - def cleanup(): - cv_filter_cleanup( + request.addfinalizer( + lambda: cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_entitlement_manifest_org, module_lce, ) - + ) # Make rule to exclude the specified bugfix package - make_content_view_filter_rule( + target_sat.cli_factory.content_view_filter_rule( { 'content-view-id': module_cv.id, 'content-view-filter-id': cv_filter['filter-id'], @@ -979,11 +985,11 @@ def cleanup(): ) # Publish and promote a new version with the filter - cv_publish_promote(module_cv, module_entitlement_manifest_org, module_lce) + cv_publish_promote(target_sat, module_cv, module_entitlement_manifest_org, module_lce) # Step 8: Run tests again. Applicable should still be true, installable should now be false. # Search for hosts that require the bugfix package. - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[1]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -995,7 +1001,7 @@ def cleanup(): assert errata_hosts[1].hostname not in result # Search for hosts that require the specified bugfix advisory. - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'installable_errata = {errata[1]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -1008,14 +1014,14 @@ def cleanup(): @pytest.mark.tier3 -@pytest.mark.parametrize('sort_by_date', ('issued', 'updated'), ids=('issued_date', 'updated_date')) +@pytest.mark.parametrize('sort_by_date', ['issued', 'updated'], ids=('issued_date', 'updated_date')) @pytest.mark.parametrize( 'filter_by_org', - ('id', 'name', 'label', None), + ['id', 'name', 'label', None], ids=('org_id', 'org_name', 'org_label', 'no_org_filter'), ) def test_positive_list_filter_by_org_sort_by_date( - module_entitlement_manifest_org, rh_repo, custom_repo, filter_by_org, sort_by_date + module_sca_manifest_org, rh_repo, custom_repo, filter_by_org, sort_by_date, module_target_sat ): """Filter by organization and sort by date. @@ -1032,14 +1038,15 @@ def test_positive_list_filter_by_org_sort_by_date( :expectedresults: Errata are filtered by org and sorted by date. """ filter_sort_errata( - module_entitlement_manifest_org, + sat=module_target_sat, + org=module_sca_manifest_org, sort_by_date=sort_by_date, filter_by_org=filter_by_org, ) @pytest.mark.tier3 -def test_positive_list_filter_by_product_id(products_with_repos): +def test_positive_list_filter_by_product_id(target_sat, products_with_repos): """Filter errata by product id :id: 7d06950a-c058-48b3-a384-c3565cbd643f @@ -1053,17 +1060,17 @@ def test_positive_list_filter_by_product_id(products_with_repos): params = [ {'product-id': product.id, 'per-page': PER_PAGE_LARGE} for product in products_with_repos ] - errata_ids = get_errata_ids(*params) + errata_ids = get_errata_ids(target_sat, *params) check_errata(errata_ids) @pytest.mark.tier3 -@pytest.mark.parametrize('filter_by_product', ('id', 'name'), ids=('product_id', 'product_name')) +@pytest.mark.parametrize('filter_by_product', ['id', 'name'], ids=('product_id', 'product_name')) @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'label'), ids=('org_id', 'org_name', 'org_label') + 'filter_by_org', ['id', 'name', 'label'], ids=('org_id', 'org_name', 'org_label') ) def test_positive_list_filter_by_product_and_org( - products_with_repos, filter_by_product, filter_by_org + target_sat, products_with_repos, filter_by_product, filter_by_org ): """Filter errata by product id and Org id @@ -1096,12 +1103,12 @@ def test_positive_list_filter_by_product_and_org( params.append(param) - errata_ids = get_errata_ids(*params) + errata_ids = get_errata_ids(target_sat, *params) check_errata(errata_ids) @pytest.mark.tier3 -def test_negative_list_filter_by_product_name(products_with_repos): +def test_negative_list_filter_by_product_name(products_with_repos, module_target_sat): """Attempt to Filter errata by product name :id: c7a5988b-668f-4c48-bc1e-97cb968a2563 @@ -1115,18 +1122,18 @@ def test_negative_list_filter_by_product_name(products_with_repos): :expectedresults: Error must be returned. :CaseImportance: Low - - :CaseLevel: System """ with pytest.raises(CLIReturnCodeError): - Erratum.list({'product': products_with_repos[0].name, 'per-page': PER_PAGE_LARGE}) + module_target_sat.cli.Erratum.list( + {'product': products_with_repos[0].name, 'per-page': PER_PAGE_LARGE} + ) @pytest.mark.tier3 @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'label'), ids=('org_id', 'org_name', 'org_label') + 'filter_by_org', ['id', 'name', 'label'], ids=('org_id', 'org_name', 'org_label') ) -def test_positive_list_filter_by_org(products_with_repos, filter_by_org): +def test_positive_list_filter_by_org(target_sat, products_with_repos, filter_by_org): """Filter errata by org id, name, or label. :id: de7646be-7ac8-4dbe-8cc3-6959808d78fa @@ -1153,13 +1160,13 @@ def test_positive_list_filter_by_org(products_with_repos, filter_by_org): params.append(param) - errata_ids = get_errata_ids(*params) + errata_ids = get_errata_ids(target_sat, *params) check_errata(errata_ids, by_org=True) @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): +def test_positive_list_filter_by_cve(module_sca_manifest_org, rh_repo, target_sat): """Filter errata by CVE :id: 7791137c-95a7-4518-a56b-766a5680c5fb @@ -1169,146 +1176,44 @@ def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): :Steps: erratum list --cve :expectedresults: Errata is filtered by CVE. - """ - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], - 'organization-id': module_entitlement_manifest_org.id, + 'organization-id': module_sca_manifest_org.id, 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', } ) - Repository.synchronize( + target_sat.cli.Repository.synchronize( { 'name': REPOS['rhva6']['name'], - 'organization-id': module_entitlement_manifest_org.id, + 'organization-id': module_sca_manifest_org.id, 'product': PRDS['rhel'], } ) - repository_info = Repository.info( + repository_info = target_sat.cli.Repository.info( { 'name': REPOS['rhva6']['name'], - 'organization-id': module_entitlement_manifest_org.id, + 'organization-id': module_sca_manifest_org.id, 'product': PRDS['rhel'], } ) assert REAL_4_ERRATA_ID in { - errata['errata-id'] for errata in Erratum.list({'repository-id': repository_info['id']}) + errata['errata-id'] + for errata in target_sat.cli.Erratum.list({'repository-id': repository_info['id']}) } for errata_cve in REAL_4_ERRATA_CVES: assert REAL_4_ERRATA_ID in { - errata['errata-id'] for errata in Erratum.list({'cve': errata_cve}) - } - - -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_positive_user_permission(products_with_repos): - """Show errata only if the User has permissions to view them - - :id: f350c13b-8cf9-4aa5-8c3a-1c48397ea514 - - :Setup: - - 1. Create two products with one repo each. Sync them. - 2. Make sure that they both have errata. - 3. Create a user with view access on one product and not on the - other. - - :Steps: erratum list --organization-id= - - :expectedresults: Check that the new user is able to see errata for one - product only. - - :BZ: 1403947 - """ - user_password = gen_string('alphanumeric') - user_name = gen_string('alphanumeric') - - product = products_with_repos[3] - org = product.organization - - # get the available permissions - permissions = Filter.available_permissions() - user_required_permissions_names = ['view_products'] - # get the user required permissions ids - user_required_permissions_ids = [ - permission['id'] - for permission in permissions - if permission['name'] in user_required_permissions_names - ] - assert len(user_required_permissions_ids) > 0 - - # create a role - role = make_role({'organization-ids': org.id}) - - # create a filter with the required permissions for role with product - # one only - make_filter( - { - 'permission-ids': user_required_permissions_ids, - 'role-id': role['id'], - 'search': f"name = {product.name}", - } - ) - - # create a new user and assign him the created role permissions - user = make_user( - { - 'admin': False, - 'login': user_name, - 'password': user_password, - 'organization-ids': [org.id], - 'default-organization-id': org.id, + errata['errata-id'] for errata in target_sat.cli.Erratum.list({'cve': errata_cve}) } - ) - User.add_role({'id': user['id'], 'role-id': role['id']}) - - # make sure the user is not admin and has only the permissions assigned - user = User.info({'id': user['id']}) - assert user['admin'] == 'no' - assert set(user['roles']) == {role['name']} - - # try to get organization info - # get the info as admin user first - org_info = Org.info({'id': org.id}) - assert str(org.id) == org_info['id'] - assert org.name == org_info['name'] - - # get the organization info as the created user - with pytest.raises(CLIReturnCodeError) as context: - Org.with_user(user_name, user_password).info({'id': org.id}) - assert 'Missing one of the required permissions: view_organizations' in context.value.stderr - - # try to get the erratum products list by organization id only - # ensure that all products erratum are accessible by admin user - admin_org_errata_ids = [ - errata['errata-id'] for errata in Erratum.list({'organization-id': org.id}) - ] - assert REPOS_WITH_ERRATA[2]['errata_id'] in admin_org_errata_ids - assert REPOS_WITH_ERRATA[3]['errata_id'] in admin_org_errata_ids - - assert len(admin_org_errata_ids) == ( - REPOS_WITH_ERRATA[2]['errata_count'] + REPOS_WITH_ERRATA[3]['errata_count'] - ) - - # ensure that the created user see only the erratum product that was - # assigned in permissions - user_org_errata_ids = [ - errata['errata-id'] - for errata in Erratum.with_user(user_name, user_password).list({'organization-id': org.id}) - ] - assert len(user_org_errata_ids) == REPOS_WITH_ERRATA[3]['errata_count'] - assert REPOS_WITH_ERRATA[3]['errata_id'] in user_org_errata_ids - assert REPOS_WITH_ERRATA[2]['errata_id'] not in user_org_errata_ids @pytest.mark.tier3 -def test_positive_check_errata_dates(module_entitlement_manifest_org): +def test_positive_check_errata_dates(module_entitlement_manifest_org, module_target_sat): """Check for errata dates in `hammer erratum list` :id: b19286ae-bdb4-4319-87d0-5d3ff06c5f38 @@ -1321,20 +1226,20 @@ def test_positive_check_errata_dates(module_entitlement_manifest_org): :BZ: 1695163 """ - product = entities.Product(organization=module_entitlement_manifest_org).create() - repo = make_repository( + product = module_target_sat.api.Product(organization=module_entitlement_manifest_org).create() + repo = module_target_sat.cli_factory.make_repository( {'content-type': 'yum', 'product-id': product.id, 'url': REPO_WITH_ERRATA['url']} ) # Synchronize custom repository - Repository.synchronize({'id': repo['id']}) - result = Erratum.list(options={'per-page': '5', 'fields': 'Issued'}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + result = module_target_sat.cli.Erratum.list(options={'per-page': '5', 'fields': 'Issued'}) assert 'issued' in result[0] # Verify any errata ISSUED date from stdout validate_issued_date = datetime.strptime(result[0]['issued'], '%Y-%m-%d').date() assert isinstance(validate_issued_date, date) - result = Erratum.list(options={'per-page': '5', 'fields': 'Updated'}) + result = module_target_sat.cli.Erratum.list(options={'per-page': '5', 'fields': 'Updated'}) assert 'updated' in result[0] # Verify any errata UPDATED date from stdout @@ -1355,7 +1260,7 @@ def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): releasever=None, ) # Sync step because repo is not synced by default - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @@ -1388,36 +1293,98 @@ def new_module_ak(module_entitlement_manifest_org, rh_repo_module_manifest, defa @pytest.fixture def errata_host( - module_entitlement_manifest_org, rhel7_contenthost_module, new_module_ak, target_sat + target_sat, + rhel8_contenthost, + module_entitlement_manifest_org, + module_lce_library, + new_module_ak, ): - """A RHEL77 Content Host that has applicable errata and registered to Library""" - # python-psutil is obsoleted by python2-psutil, so get older python2-psutil for errata test - rhel7_contenthost_module.run(f'rpm -Uvh {settings.repos.epel_repo.url}/{PSUTIL_RPM}') - rhel7_contenthost_module.install_katello_ca(target_sat) - rhel7_contenthost_module.register_contenthost( - module_entitlement_manifest_org.label, new_module_ak.name + """A RHEL8 Content Host that has applicable errata and registered to Library, using Global Registration.""" + org = module_entitlement_manifest_org + cv = target_sat.api.ContentView( + organization=org, + environment=[module_lce_library.id], + ).create() + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': REPO_WITH_ERRATA['url'], + 'organization-id': org.id, + 'lifecycle-environment-id': module_lce_library.id, + 'activationkey-id': new_module_ak.id, + 'content-view-id': cv.id, + } ) - assert rhel7_contenthost_module.nailgun_host.read_json()['subscription_status'] == 0 - rhel7_contenthost_module.install_katello_host_tools() - rhel7_contenthost_module.add_rex_key(satellite=target_sat) - return rhel7_contenthost_module + rhel8_contenthost.register( + activation_keys=new_module_ak.name, + target=target_sat, + org=org, + loc=None, + ) + rhel8_contenthost.add_rex_key(satellite=target_sat) + assert rhel8_contenthost.subscribed + assert rhel8_contenthost.applicable_errata_count == 0 + + rhel8_contenthost.execute(r'yum-config-manager --enable \*') + rhel8_contenthost.execute(r'subscription-manager repos --enable \*') + for errata in REPO_WITH_ERRATA['errata']: + # Remove package if present, old or new. + package_name = errata['package_name'] + result = rhel8_contenthost.execute(f'yum erase -y {package_name}') + if result.status != 0: + pytest.fail(f'Failed to remove {package_name}: {result.stdout} {result.stderr}') + + # Install old package, so that errata apply. + old_package = errata['old_package'] + result = rhel8_contenthost.execute(f'yum install -y {old_package}') + if result.status != 0: + pytest.fail(f'Failed to install {old_package}: {result.stdout} {result.stderr}') + + start_and_wait_errata_recalculate(target_sat, rhel8_contenthost) + assert rhel8_contenthost.applicable_errata_count == 2 + + return rhel8_contenthost @pytest.fixture -def chost(module_entitlement_manifest_org, rhel7_contenthost_module, new_module_ak, target_sat): +def chost( + module_entitlement_manifest_org, + rhel7_contenthost, + module_lce_library, + new_module_ak, + target_sat, +): """A RHEL77 Content Host registered to Library that does not have applicable errata""" - rhel7_contenthost_module.install_katello_ca(target_sat) - rhel7_contenthost_module.register_contenthost( - module_entitlement_manifest_org.label, new_module_ak.name + module_cv = target_sat.api.ContentView( + organization=module_entitlement_manifest_org, + environment=[module_lce_library.id], + ).create() + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': REPO_WITH_ERRATA['url'], + 'organization-id': module_entitlement_manifest_org.id, + 'lifecycle-environment-id': module_lce_library.id, + 'activationkey-id': new_module_ak.id, + 'content-view-id': module_cv.id, + } ) - assert rhel7_contenthost_module.nailgun_host.read_json()['subscription_status'] == 0 - rhel7_contenthost_module.install_katello_host_tools() - return rhel7_contenthost_module + rhel7_contenthost.register( + activation_keys=new_module_ak.name, + target=target_sat, + org=module_entitlement_manifest_org, + loc=None, + ) + assert rhel7_contenthost.subscribed + assert rhel7_contenthost.applicable_errata_count == 0 + assert rhel7_contenthost.nailgun_host.read_json()['subscription_status'] == 2 + rhel7_contenthost.execute(r'subscription-manager repos --enable \*') + rhel7_contenthost.execute(r'yum-config-manager --enable \*') @pytest.mark.tier2 @pytest.mark.no_containers -def test_apply_errata_using_default_content_view(errata_host, target_sat): +def test_apply_errata_using_default_content_view( + errata_host, module_entitlement_manifest_org, target_sat +): """Updating an applicable errata on a host attached to the default content view causes the errata to not be applicable. @@ -1435,33 +1402,30 @@ def test_apply_errata_using_default_content_view(errata_host, target_sat): :CaseImportance: High """ - # check that package errata is applicable - erratum = Host.errata_list({'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + assert errata_host.applicable_errata_count > 0 + # Check that package errata is applicable + erratum_id = REPO_WITH_ERRATA['errata'][0]['id'] + erratum = target_sat.cli.Host.errata_list( + {'host': errata_host.hostname, 'search': f'id = {erratum_id}'} + ) assert len(erratum) == 1 assert erratum[0]['installable'] == 'true' # Update errata from Library, i.e. Default CV - result = JobInvocation.create( + result = target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': f'name = {errata_host.hostname}', - 'inputs': f"errata='{REAL_0_ERRATA_ID}'", - 'organization-id': f'{errata_host.nailgun_host.organization.id}', + 'inputs': f'errata={erratum[0]["erratum-id"]}', + 'organization-id': f'{module_entitlement_manifest_org.id}', } )[1]['id'] assert 'success' in result - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=15, - max_tries=10, - ) + start_and_wait_errata_recalculate(target_sat, errata_host) # Assert that the erratum is no longer applicable - erratum = Host.errata_list({'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + erratum = target_sat.cli.Host.errata_list( + {'host': errata_host.hostname, 'search': f'id = {erratum_id}'} + ) assert len(erratum) == 0 @@ -1487,56 +1451,35 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe :CaseImportance: High """ # check that package is applicable - applicable_packages = Package.list( + package_name = REPO_WITH_ERRATA['errata'][0]['package_name'] + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={package_name}', } ) - timestamp = (datetime.utcnow()).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=20, - max_tries=15, - ) + + start_and_wait_errata_recalculate(target_sat, errata_host) assert len(applicable_packages) == 1 - assert REAL_RHEL7_0_2_PACKAGE_NAME in applicable_packages[0]['filename'] + assert package_name in applicable_packages[0]['filename'] # Update package from Library, i.e. Default CV - result = JobInvocation.create( + result = target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': f'name = {errata_host.hostname}', - 'inputs': f"errata='{REAL_0_ERRATA_ID}'", + 'inputs': f'errata={REPO_WITH_ERRATA["errata"][0]["id"]}', 'organization-id': f'{errata_host.nailgun_host.organization.id}', } )[1]['id'] assert 'success' in result - # note time for later wait_for_tasks include 2 mins margin of safety. - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - - # Wait for upload profile event (in case Satellite system slow) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=30, - max_tries=10, - ) - + start_and_wait_errata_recalculate(target_sat, errata_host) # Assert that the package is no longer applicable - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={package_name}', } ) assert len(applicable_packages) == 0 @@ -1565,51 +1508,42 @@ def test_downgrade_applicable_package_using_default_content_view(errata_host, ta :CaseImportance: High """ # Update package from Library, i.e. Default CV - errata_host.run(f'yum -y update {REAL_RHEL7_0_2_PACKAGE_NAME}') + errata_host.run(f'yum -y update {FAKE_2_CUSTOM_PACKAGE_NAME}') + start_and_wait_errata_recalculate(target_sat, errata_host) # Assert that the package is not applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={FAKE_2_CUSTOM_PACKAGE_NAME}', } ) assert len(applicable_packages) == 0 - # note time for later wait_for_tasks include 2 mins margin of safety. - # Downgrade package (we can't get it from Library, so get older one from EPEL) - errata_host.run(f'curl -O {settings.repos.epel_repo.url}/{PSUTIL_RPM}') - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - errata_host.run(f'yum -y downgrade {PSUTIL_RPM}') - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - # Wait for upload profile event (in case Satellite system slow) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=15, - max_tries=10, - ) + + # Downgrade package + errata_host.run(f'yum -y downgrade {FAKE_1_CUSTOM_PACKAGE}') + start_and_wait_errata_recalculate(target_sat, errata_host) # check that package is applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={FAKE_2_CUSTOM_PACKAGE_NAME}', } ) assert len(applicable_packages) == 1 - assert REAL_RHEL7_0_2_PACKAGE_NAME in applicable_packages[0]['filename'] + assert FAKE_2_CUSTOM_PACKAGE_NAME in applicable_packages[0]['filename'] @pytest.mark.tier2 -def test_install_applicable_package_to_registerd_host(chost, target_sat): +def test_install_applicable_package_to_registerd_host(errata_host, target_sat): """Installing an older package to an already registered host should show the newer package and errata as applicable and installable. :id: 519bfe91-cf86-4d6e-94ef-aaf3e5d40a81 - :setup: Register a host to default org with Library + :setup: Register a host to default org with Library, + Remove the newer package version if present :steps: 1. Ensure package is not applicable @@ -1622,42 +1556,29 @@ def test_install_applicable_package_to_registerd_host(chost, target_sat): :CaseImportance: Medium """ + errata_host.run(f'yum -y remove {FAKE_2_CUSTOM_PACKAGE_NAME}') + start_and_wait_errata_recalculate(target_sat, errata_host) # Assert that the package is not applicable - Host.errata_recalculate({'host-id': chost.nailgun_host.id}) - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { - 'host-id': chost.nailgun_host.id, + 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={FAKE_2_CUSTOM_PACKAGE_NAME}', } ) assert len(applicable_packages) == 0 - # python-psutil is obsoleted by python2-psutil, so download older python2-psutil for this test - chost.run(f'curl -O {settings.repos.epel_repo.url}/{PSUTIL_RPM}') - # note time for later wait_for_tasks include 2 mins margin of safety. - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - chost.run(f'yum -y install {PSUTIL_RPM}') - # Wait for upload profile event (in case Satellite system slow) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=15, - max_tries=10, - ) - Host.errata_recalculate({'host-id': chost.nailgun_host.id}) - + errata_host.run(f'yum -y install {FAKE_1_CUSTOM_PACKAGE}') + start_and_wait_errata_recalculate(target_sat, errata_host) # check that package is applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { - 'host-id': chost.nailgun_host.id, + 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={FAKE_2_CUSTOM_PACKAGE_NAME}', } ) assert len(applicable_packages) == 1 - assert REAL_RHEL7_0_2_PACKAGE_NAME in applicable_packages[0]['filename'] + assert FAKE_2_CUSTOM_PACKAGE_NAME in applicable_packages[0]['filename'] @pytest.mark.tier2 @@ -1685,45 +1606,29 @@ def test_downgrading_package_shows_errata_from_library( :CaseImportance: High """ # Update package from Library, i.e. Default CV - errata_host.run(f'yum -y update {REAL_RHEL7_0_2_PACKAGE_NAME}') - # Assert that the package is not applicable - applicable_packages = Package.list( + errata_host.run(f'yum -y install {FAKE_2_CUSTOM_PACKAGE}') + start_and_wait_errata_recalculate(target_sat, errata_host) + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', - 'search': f'name={REAL_RHEL7_0_2_PACKAGE_NAME}', + 'search': f'name={FAKE_2_CUSTOM_PACKAGE_NAME}', } ) assert len(applicable_packages) == 0 - # note time for later wait_for_tasks include 2 mins margin of safety. - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - # Downgrade package (we can't get it from Library, so get older one from EPEL) - errata_host.run(f'curl -O {settings.repos.epel_repo.url}/{PSUTIL_RPM}') - errata_host.run(f'yum -y downgrade {PSUTIL_RPM}') - # Wait for upload profile event (in case Satellite system slow) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - # Wait for upload profile event (in case Satellite system slow) - target_sat.wait_for_tasks( - search_query=( - 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' - f' and started_at >= "{timestamp}"' - ), - search_rate=15, - max_tries=10, - ) - # check that errata is applicable + # Downgrade package + errata_host.run(f'yum -y downgrade {FAKE_1_CUSTOM_PACKAGE}') + start_and_wait_errata_recalculate(target_sat, errata_host) param = { 'errata-restrict-applicable': 1, - 'organization-id': module_manifest_org.id, 'per-page': PER_PAGE_LARGE, } - errata_ids = get_errata_ids(param) - assert REAL_0_ERRATA_ID in errata_ids + errata_ids = get_errata_ids(target_sat, param) + assert settings.repos.yum_6.errata[2] in errata_ids -@pytest.mark.skip_if_open('BZ:1785146') @pytest.mark.tier2 -def test_errata_list_by_contentview_filter(module_entitlement_manifest_org): +def test_errata_list_by_contentview_filter(module_entitlement_manifest_org, module_target_sat): """Hammer command to list errata should take filter ID into consideration. :id: e9355a92-8354-4853-a806-d388ed32d73e @@ -1743,33 +1648,35 @@ def test_errata_list_by_contentview_filter(module_entitlement_manifest_org): :BZ: 1785146 """ - product = entities.Product(organization=module_entitlement_manifest_org).create() - repo = make_repository( + product = module_target_sat.api.Product(organization=module_entitlement_manifest_org).create() + repo = module_target_sat.cli_factory.make_repository( {'content-type': 'yum', 'product-id': product.id, 'url': REPO_WITH_ERRATA['url']} ) - Repository.synchronize({'id': repo['id']}) - lce = entities.LifecycleEnvironment(organization=module_entitlement_manifest_org).create() - cv = entities.ContentView( + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + lce = module_target_sat.api.LifecycleEnvironment( + organization=module_entitlement_manifest_org + ).create() + cv = module_target_sat.api.ContentView( organization=module_entitlement_manifest_org, repository=[repo['id']] ).create() - cv_publish_promote(cv, module_entitlement_manifest_org, lce) + cv_publish_promote(module_target_sat, cv, module_entitlement_manifest_org, lce) errata_count = len( - Erratum.list( + module_target_sat.cli.Erratum.list( { 'organization-id': module_entitlement_manifest_org.id, 'content-view-id': cv.id, } ) ) - cvf = entities.ErratumContentViewFilter(content_view=cv, inclusion=True).create() - errata_id = entities.Errata().search( + cvf = module_target_sat.api.ErratumContentViewFilter(content_view=cv, inclusion=True).create() + errata_id = module_target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.yum_9.errata[0]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cvf, errata=errata_id).create() + module_target_sat.api.ContentViewFilterRule(content_view_filter=cvf, errata=errata_id).create() cv.publish() cv_version_info = cv.read().version[1].read() errata_count_cvf = len( - Erratum.list( + module_target_sat.cli.Erratum.list( { 'organization-id': module_entitlement_manifest_org.id, 'content-view-id': cv.id, diff --git a/tests/foreman/cli/test_fact.py b/tests/foreman/cli/test_fact.py index 8a484521ea0..ddaaa477944 100644 --- a/tests/foreman/cli/test_fact.py +++ b/tests/foreman/cli/test_fact.py @@ -4,23 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Fact :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ -import pytest from fauxfactory import gen_string - -from robottelo.cli.fact import Fact - +import pytest pytestmark = [pytest.mark.tier1] @@ -30,7 +22,7 @@ @pytest.mark.parametrize( 'fact', ['uptime', 'os::family', 'uptime_seconds', 'memorysize', 'ipaddress'] ) -def test_positive_list_by_name(fact): +def test_positive_list_by_name(fact, module_target_sat): """Test Fact List :id: 83794d97-d21b-4482-9522-9b41053e595f @@ -41,12 +33,12 @@ def test_positive_list_by_name(fact): :BZ: 2161294 """ - facts = Fact().list(options={'search': f'fact={fact}'}) + facts = module_target_sat.cli.Fact().list(options={'search': f'fact={fact}'}) assert facts[0]['fact'] == fact @pytest.mark.parametrize('fact', ['uptime_days', 'memoryfree']) -def test_negative_list_ignored_by_name(fact): +def test_negative_list_ignored_by_name(fact, module_target_sat): """Test Fact List :id: b6375f39-b8c3-4807-b04b-b0e43644441f @@ -55,14 +47,16 @@ def test_negative_list_ignored_by_name(fact): :parametrized: yes """ - assert Fact().list(options={'search': f'fact={fact}'}) == [] + assert module_target_sat.cli.Fact().list(options={'search': f'fact={fact}'}) == [] -def test_negative_list_by_name(): +def test_negative_list_by_name(module_target_sat): """Test Fact List failure :id: bd56d27e-59c0-4f35-bd53-2999af7c6946 :expectedresults: Fact List is not displayed """ - assert Fact().list(options={'search': f'fact={gen_string("alpha")}'}) == [] + assert ( + module_target_sat.cli.Fact().list(options={'search': f'fact={gen_string("alpha")}'}) == [] + ) diff --git a/tests/foreman/cli/test_filter.py b/tests/foreman/cli/test_filter.py index ee873349d73..4108ce07d6b 100644 --- a/tests/foreman/cli/test_filter.py +++ b/tests/foreman/cli/test_filter.py @@ -4,47 +4,37 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_role -from robottelo.cli.filter import Filter -from robottelo.cli.role import Role +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') -def module_perms(): +def module_perms(module_target_sat): """Search for provisioning template permissions. Set ``cls.ct_perms``.""" - perms = [ + return [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=User"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=User"} + ) ] - return perms -@pytest.fixture(scope='function') -def function_role(): +@pytest.fixture +def function_role(target_sat): """Create a role that a filter would be assigned""" - return make_role() + return target_sat.cli_factory.make_role() @pytest.mark.tier1 -def test_positive_create_with_permission(module_perms, function_role): +def test_positive_create_with_permission(module_perms, function_role, target_sat): """Create a filter and assign it some permissions. :id: 6da6c5d3-2727-4eb7-aa15-9f7b6f91d3b2 @@ -54,12 +44,14 @@ def test_positive_create_with_permission(module_perms, function_role): :CaseImportance: Critical """ # Assign filter to created role - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) assert set(filter_['permissions'].split(", ")) == set(module_perms) @pytest.mark.tier1 -def test_positive_create_with_org(module_perms, function_role): +def test_positive_create_with_org(module_perms, function_role, target_sat): """Create a filter and assign it some permissions. :id: f6308192-0e1f-427b-a296-b285f6684691 @@ -70,9 +62,9 @@ def test_positive_create_with_org(module_perms, function_role): :CaseImportance: Critical """ - org = make_org() + org = target_sat.cli_factory.make_org() # Assign filter to created role - filter_ = make_filter( + filter_ = target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -85,7 +77,7 @@ def test_positive_create_with_org(module_perms, function_role): @pytest.mark.tier1 -def test_positive_create_with_loc(module_perms, function_role): +def test_positive_create_with_loc(module_perms, function_role, module_target_sat): """Create a filter and assign it some permissions. :id: d7d1969a-cb30-4e97-a9a3-3a4aaf608795 @@ -96,9 +88,9 @@ def test_positive_create_with_loc(module_perms, function_role): :CaseImportance: Critical """ - loc = make_location() + loc = module_target_sat.cli_factory.make_location() # Assign filter to created role - filter_ = make_filter( + filter_ = module_target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -111,7 +103,7 @@ def test_positive_create_with_loc(module_perms, function_role): @pytest.mark.tier1 -def test_positive_delete(module_perms, function_role): +def test_positive_delete(module_perms, function_role, module_target_sat): """Create a filter and delete it afterwards. :id: 97d1093c-0d49-454b-86f6-f5be87b32775 @@ -120,15 +112,17 @@ def test_positive_delete(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) - Filter.delete({'id': filter_['id']}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) + module_target_sat.cli.Filter.delete({'id': filter_['id']}) with pytest.raises(CLIReturnCodeError): - Filter.info({'id': filter_['id']}) + module_target_sat.cli.Filter.info({'id': filter_['id']}) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_role(module_perms, function_role): +def test_positive_delete_role(module_perms, function_role, target_sat): """Create a filter and delete the role it points at. :id: e2adb6a4-e408-4912-a32d-2bf2c43187d9 @@ -137,19 +131,21 @@ def test_positive_delete_role(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) # A filter depends on a role. Deleting a role implicitly deletes the # filter pointing at it. - Role.delete({'id': function_role['id']}) + target_sat.cli.Role.delete({'id': function_role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': function_role['id']}) + target_sat.cli.Role.info({'id': function_role['id']}) with pytest.raises(CLIReturnCodeError): - Filter.info({'id': filter_['id']}) + target_sat.cli.Filter.info({'id': filter_['id']}) @pytest.mark.tier1 -def test_positive_update_permissions(module_perms, function_role): +def test_positive_update_permissions(module_perms, function_role, target_sat): """Create a filter and update its permissions. :id: 3d6a52d8-2f8f-4f97-a155-9b52888af16e @@ -158,18 +154,22 @@ def test_positive_update_permissions(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) new_perms = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=User"}) + for permission in target_sat.cli.Filter.available_permissions( + {"search": "resource_type=User"} + ) ] - Filter.update({'id': filter_['id'], 'permissions': new_perms}) - filter_ = Filter.info({'id': filter_['id']}) + target_sat.cli.Filter.update({'id': filter_['id'], 'permissions': new_perms}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) assert set(filter_['permissions'].split(", ")) == set(new_perms) @pytest.mark.tier1 -def test_positive_update_role(module_perms, function_role): +def test_positive_update_role(module_perms, function_role, target_sat): """Create a filter and assign it to another role. :id: 2950b3a1-2bce-447f-9df2-869b1d10eaf5 @@ -178,16 +178,18 @@ def test_positive_update_role(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) # Update with another role - new_role = make_role() - Filter.update({'id': filter_['id'], 'role-id': new_role['id']}) - filter_ = Filter.info({'id': filter_['id']}) + new_role = target_sat.cli_factory.make_role() + target_sat.cli.Filter.update({'id': filter_['id'], 'role-id': new_role['id']}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) assert filter_['role'] == new_role['name'] @pytest.mark.tier1 -def test_positive_update_org_loc(module_perms, function_role): +def test_positive_update_org_loc(module_perms, function_role, target_sat): """Create a filter and assign it to another organization and location. :id: 9bb59109-9701-4ef3-95c6-81f387d372da @@ -198,9 +200,9 @@ def test_positive_update_org_loc(module_perms, function_role): :CaseImportance: Critical """ - org = make_org() - loc = make_location() - filter_ = make_filter( + org = target_sat.cli_factory.make_org() + loc = target_sat.cli_factory.make_location() + filter_ = target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -210,9 +212,9 @@ def test_positive_update_org_loc(module_perms, function_role): } ) # Update org and loc - new_org = make_org() - new_loc = make_location() - Filter.update( + new_org = target_sat.cli_factory.make_org() + new_loc = target_sat.cli_factory.make_location() + target_sat.cli.Filter.update( { 'id': filter_['id'], 'permissions': module_perms, @@ -221,7 +223,7 @@ def test_positive_update_org_loc(module_perms, function_role): 'override': 1, } ) - filter_ = Filter.info({'id': filter_['id']}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) # We expect here only one organization and location assert filter_['organizations'][0] == new_org['name'] assert filter_['locations'][0] == new_loc['name'] diff --git a/tests/foreman/cli/test_foremantask.py b/tests/foreman/cli/test_foremantask.py index 7dcd90ac669..bdce6e63a97 100644 --- a/tests/foreman/cli/test_foremantask.py +++ b/tests/foreman/cli/test_foremantask.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: TasksPlugin :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -48,5 +43,4 @@ def test_positive_tasks_backup(): :CaseImportance: High :CaseAutomation: NotAutomated - """ diff --git a/tests/foreman/cli/test_globalparam.py b/tests/foreman/cli/test_globalparam.py index 4f42b83579c..41bb8ca769b 100644 --- a/tests/foreman/cli/test_globalparam.py +++ b/tests/foreman/cli/test_globalparam.py @@ -4,31 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Parameters :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ from functools import partial -import pytest from fauxfactory import gen_string - -from robottelo.cli.globalparam import GlobalParameter - +import pytest pytestmark = [pytest.mark.tier1] @pytest.mark.upgrade -def test_positive_list_delete_by_name(): +def test_positive_list_delete_by_name(module_target_sat): """Test Global Param List :id: 8dd6c4e8-4ec9-4bee-8a04-f5788960973a @@ -41,13 +33,13 @@ def test_positive_list_delete_by_name(): value = f'val-{alphastring()} {alphastring()}' # Create - GlobalParameter().set({'name': name, 'value': value}) + module_target_sat.cli.GlobalParameter().set({'name': name, 'value': value}) # List by name - result = GlobalParameter().list({'search': name}) + result = module_target_sat.cli.GlobalParameter().list({'search': name}) assert len(result) == 1 assert result[0]['value'] == value # Delete - GlobalParameter().delete({'name': name}) - assert len(GlobalParameter().list({'search': name})) == 0 + module_target_sat.cli.GlobalParameter().delete({'name': name}) + assert len(module_target_sat.cli.GlobalParameter().list({'search': name})) == 0 diff --git a/tests/foreman/cli/test_hammer.py b/tests/foreman/cli/test_hammer.py index 8fe12f17627..aa3bfcf4aeb 100644 --- a/tests/foreman/cli/test_hammer.py +++ b/tests/foreman/cli/test_hammer.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hammer :Team: Endeavour -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import io import json @@ -27,10 +22,9 @@ from robottelo.constants import DataFile from robottelo.logging import logger - HAMMER_COMMANDS = json.loads(DataFile.HAMMER_COMMANDS_JSON.read_text()) -pytestmark = [pytest.mark.tier1, pytest.mark.upgrade] +pytestmark = [pytest.mark.tier1] def fetch_command_info(command): @@ -127,6 +121,7 @@ def test_positive_all_options(target_sat): pytest.fail(format_commands_diff(differences)) +@pytest.mark.upgrade def test_positive_disable_hammer_defaults(request, function_product, target_sat): """Verify hammer disable defaults command. @@ -141,6 +136,13 @@ def test_positive_disable_hammer_defaults(request, function_product, target_sat) :BZ: 1640644, 1368173 """ + + @request.addfinalizer + def _finalize(): + target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) + result = target_sat.execute('hammer defaults list') + assert str(function_product.organization.id) not in result.stdout + target_sat.cli.Defaults.add( {'param-name': 'organization_id', 'param-value': function_product.organization.id} ) @@ -159,13 +161,8 @@ def test_positive_disable_hammer_defaults(request, function_product, target_sat) assert result.status == 0 assert function_product.name in result.stdout - @request.addfinalizer - def _finalize(): - target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) - result = target_sat.execute('hammer defaults list') - assert str(function_product.organization.id) not in result.stdout - +@pytest.mark.upgrade def test_positive_check_debug_log_levels(target_sat): """Enabling debug log level in candlepin via hammer logging @@ -181,9 +178,9 @@ def test_positive_check_debug_log_levels(target_sat): """ target_sat.cli.Admin.logging({'all': True, 'level-debug': True}) # Verify value of `log4j.logger.org.candlepin` as `DEBUG` - result = target_sat.execute('grep DEBUG /etc/candlepin/candlepin.conf') + result = target_sat.execute('grep log4j.logger.org.candlepin /etc/candlepin/candlepin.conf') assert result.status == 0 - assert 'log4j.logger.org.candlepin = DEBUG' in result.stdout + assert 'DEBUG' in result.stdout target_sat.cli.Admin.logging({"all": True, "level-production": True}) # Verify value of `log4j.logger.org.candlepin` as `WARN` @@ -193,6 +190,7 @@ def test_positive_check_debug_log_levels(target_sat): @pytest.mark.e2e +@pytest.mark.upgrade def test_positive_hammer_shell(target_sat): """Verify that hammer shell runs a command when input is provided via interactive/bash diff --git a/tests/foreman/cli/test_host.py b/tests/foreman/cli/test_host.py index 211c37b3f10..37864e0f763 100644 --- a/tests/foreman/cli/test_host.py +++ b/tests/foreman/cli/test_host.py @@ -4,60 +4,43 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import re from random import choice +import re +from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_mac, gen_string import pytest +from wait_for import TimedOutError, wait_for import yaml -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_string -from wait_for import TimedOutError -from wait_for import wait_for - -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import add_role_permissions -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_fake_host -from robottelo.cli.factory import make_host -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.cli.host import Host -from robottelo.cli.host import HostInterface -from robottelo.cli.host import HostTraces -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.package import Package -from robottelo.cli.user import User + from robottelo.config import settings -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_7_CUSTOM_PACKAGE -from robottelo.constants import FAKE_8_CUSTOM_PACKAGE -from robottelo.constants import FAKE_8_CUSTOM_PACKAGE_NAME -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants import SM_OVERALL_STATUS +from robottelo.constants import ( + DEFAULT_SUBSCRIPTION_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, + FAKE_2_CUSTOM_PACKAGE, + FAKE_7_CUSTOM_PACKAGE, + FAKE_8_CUSTOM_PACKAGE, + FAKE_8_CUSTOM_PACKAGE_NAME, + PRDS, + REPOS, + REPOSET, + SM_OVERALL_STATUS, +) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHostError -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_hosts_list +from robottelo.logging import logger +from robottelo.utils.datafactory import ( + invalid_values_list, + valid_data_list, + valid_hosts_list, +) from robottelo.utils.issue_handlers import is_open @@ -67,12 +50,12 @@ def module_default_proxy(module_target_sat): return module_target_sat.cli.Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] -@pytest.fixture(scope="function") +@pytest.fixture def function_host(target_sat): host_template = target_sat.api.Host() host_template.create_missing() # using CLI to create host - host = make_host( + host = target_sat.cli_factory.make_host( { 'architecture-id': host_template.architecture.id, 'domain-id': host_template.domain.id, @@ -87,10 +70,10 @@ def function_host(target_sat): } ) yield host - Host.delete({'id': host['id']}) + target_sat.cli.Host.delete({'id': host['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_user(target_sat, function_host): """ Returns dict with user object and with password to this user @@ -116,7 +99,7 @@ def function_user(target_sat, function_host): user.delete() -@pytest.fixture(scope='function') +@pytest.fixture def tracer_host(katello_host_tools_tracer_host): # create a custom, rhel version-specific mock-service repo rhelver = katello_host_tools_tracer_host.os_version.major @@ -132,7 +115,7 @@ def tracer_host(katello_host_tools_tracer_host): ) katello_host_tools_tracer_host.execute(f'systemctl start {settings.repos["MOCK_SERVICE_RPM"]}') - yield katello_host_tools_tracer_host + return katello_host_tools_tracer_host def update_smart_proxy(sat, location, smart_proxy): @@ -249,7 +232,7 @@ def parse_cli_entity_list_help_message(help_message): name = name[:-1] # remove colon from name if 'Usage' in name: continue - elif 'Options' in name: + if 'Options' in name: # used together with previous_line when line (message) is appended to previous line options = parse_two_columns(content, options_start_with_dash=True) elif 'field sets' in name: @@ -261,7 +244,7 @@ def parse_cli_entity_list_help_message(help_message): return parsed_dict -def test_positive_search_all_field_sets(): +def test_positive_search_all_field_sets(module_target_sat): """All fields in predefined field sets from hammer host list --help message are shown when specified as --fields in hammer host list command Note: host was created, so we there will always be at least 1 host @@ -283,12 +266,14 @@ def test_positive_search_all_field_sets(): :customerscenario: true """ - new_host = make_fake_host() - host_help_yaml = Host.list(options={'help': ''}, output_format='yaml') + new_host = module_target_sat.cli_factory.make_fake_host() + host_help_yaml = module_target_sat.cli.Host.list(options={'help': ''}, output_format='yaml') host_help = yaml.load(host_help_yaml, yaml.SafeLoader) parsed_dict = parse_cli_entity_list_help_message(host_help[':message']) help_field_sets = parsed_dict['Predefined field sets'] - output_field_sets = Host.list(options={'fields': ','.join(help_field_sets)}) + output_field_sets = module_target_sat.cli.Host.list( + options={'fields': ','.join(help_field_sets)} + ) # get list index of the created host in the output_field_sets [host_idx] = [idx for idx, host in enumerate(output_field_sets) if new_host['id'] == host['id']] @@ -324,7 +309,7 @@ def test_positive_create_and_delete(target_sat, module_lce_library, module_publi 'type=interface,mac={},identifier=eth0,name={},domain_id={},' 'ip={},primary=true,provision=true' ).format(host.mac, gen_string('alpha'), host.domain.id, gen_ipaddr()) - new_host = make_host( + new_host = target_sat.cli_factory.make_host( { 'architecture-id': host.architecture.id, 'content-view-id': module_published_cv.id, @@ -347,14 +332,14 @@ def test_positive_create_and_delete(target_sat, module_lce_library, module_publi assert ( new_host['content-information']['lifecycle-environment']['name'] == module_lce_library.name ) - host_interface = HostInterface.info( + host_interface = target_sat.cli.HostInterface.info( {'host-id': new_host['id'], 'id': new_host['network-interfaces'][0]['id']} ) assert host_interface['domain'] == host.domain.read().name - Host.delete({'id': new_host['id']}) + target_sat.cli.Host.delete({'id': new_host['id']}) with pytest.raises(CLIReturnCodeError): - Host.info({'id': new_host['id']}) + target_sat.cli.Host.info({'id': new_host['id']}) @pytest.mark.e2e @@ -373,14 +358,14 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org domain = target_sat.api.Domain(location=[default_location], organization=[default_org]).create() mac = gen_mac(multicast=False) - host = make_fake_host({'domain-id': domain.id}) - number_of_interfaces = len(HostInterface.list({'host-id': host['id']})) + host = target_sat.cli_factory.make_fake_host({'domain-id': domain.id}) + number_of_interfaces = len(target_sat.cliHostInterface.list({'host-id': host['id']})) - HostInterface.create( + target_sat.cliHostInterface.create( {'host-id': host['id'], 'domain-id': domain.id, 'mac': mac, 'type': 'interface'} ) - host = Host.info({'id': host['id']}) - host_interface = HostInterface.info( + host = target_sat.cli.Host.info({'id': host['id']}) + host_interface = target_sat.cliHostInterface.info( { 'host-id': host['id'], 'id': [ni for ni in host['network-interfaces'] if ni['mac-address'] == mac][0]['id'], @@ -388,13 +373,15 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org ) assert host_interface['domain'] == domain.name assert host_interface['mac-address'] == mac - assert len(HostInterface.list({'host-id': host['id']})) == number_of_interfaces + 1 + assert ( + len(target_sat.cliHostInterface.list({'host-id': host['id']})) == number_of_interfaces + 1 + ) new_domain = target_sat.api.Domain( location=[default_location], organization=[default_org] ).create() new_mac = gen_mac(multicast=False) - HostInterface.update( + target_sat.cliHostInterface.update( { 'host-id': host['id'], 'id': host_interface['id'], @@ -402,7 +389,7 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org 'mac': new_mac, } ) - host_interface = HostInterface.info( + host_interface = target_sat.cliHostInterface.info( { 'host-id': host['id'], 'id': [ni for ni in host['network-interfaces'] if ni['mac-address'] == mac][0]['id'], @@ -411,15 +398,17 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org assert host_interface['domain'] == new_domain.name assert host_interface['mac-address'] == new_mac - HostInterface.delete({'host-id': host['id'], 'id': host_interface['id']}) - assert len(HostInterface.list({'host-id': host['id']})) == number_of_interfaces + target_sat.cliHostInterface.delete({'host-id': host['id'], 'id': host_interface['id']}) + assert len(target_sat.cliHostInterface.list({'host-id': host['id']})) == number_of_interfaces with pytest.raises(CLIReturnCodeError): - HostInterface.info({'host-id': host['id'], 'id': host_interface['id']}) + target_sat.cliHostInterface.info({'host-id': host['id'], 'id': host_interface['id']}) @pytest.mark.cli_host_create @pytest.mark.tier2 -def test_negative_create_with_content_source(module_lce_library, module_org, module_published_cv): +def test_negative_create_with_content_source( + module_lce_library, module_org, module_published_cv, module_target_sat +): """Attempt to create a host with invalid content source specified :id: d92d6aff-4ad3-467c-88a8-5a5e56614f58 @@ -431,7 +420,7 @@ def test_negative_create_with_content_source(module_lce_library, module_org, mod :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-source-id': gen_integer(10000, 99999), 'content-view-id': module_published_cv.id, @@ -444,7 +433,7 @@ def test_negative_create_with_content_source(module_lce_library, module_org, mod @pytest.mark.cli_host_create @pytest.mark.tier2 def test_negative_update_content_source( - module_default_proxy, module_lce_library, module_org, module_published_cv + module_default_proxy, module_lce_library, module_org, module_published_cv, module_target_sat ): """Attempt to update host's content source with invalid value @@ -459,7 +448,7 @@ def test_negative_update_content_source( :CaseImportance: Medium """ - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( { 'content-source-id': module_default_proxy['id'], 'content-view-id': module_published_cv.id, @@ -468,14 +457,18 @@ def test_negative_update_content_source( } ) with pytest.raises(CLIReturnCodeError): - Host.update({'id': host['id'], 'content-source-id': gen_integer(10000, 99999)}) - host = Host.info({'id': host['id']}) + module_target_sat.cli.Host.update( + {'id': host['id'], 'content-source-id': gen_integer(10000, 99999)} + ) + host = module_target_sat.cli.Host.info({'id': host['id']}) assert host['content-information']['content-source']['name'] == module_default_proxy['name'] @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted_cv): +def test_positive_create_with_lce_and_cv( + module_lce, module_org, module_promoted_cv, module_target_sat +): """Check if host can be created with new lifecycle and new content view @@ -488,7 +481,7 @@ def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted :CaseImportance: Critical """ - new_host = make_fake_host( + new_host = module_target_sat.cli_factory.make_fake_host( { 'content-view-id': module_promoted_cv.id, 'lifecycle-environment-id': module_lce.id, @@ -501,7 +494,9 @@ def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted @pytest.mark.cli_host_create @pytest.mark.tier2 -def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org): +def test_positive_create_with_openscap_proxy_id( + module_default_proxy, module_org, module_target_sat +): """Check if host can be created with OpenSCAP Proxy id :id: 3774ba08-3b18-4e64-b07f-53f6aa0504f3 @@ -510,7 +505,7 @@ def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org :CaseImportance: Medium """ - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( {'organization-id': module_org.id, 'openscap-proxy-id': module_default_proxy['id']} ) assert host['openscap-proxy'] == module_default_proxy['id'] @@ -518,7 +513,9 @@ def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_negative_create_with_name(module_lce_library, module_org, module_published_cv): +def test_negative_create_with_name( + module_lce_library, module_org, module_published_cv, module_target_sat +): """Check if host can be created with random long names :id: f92b6070-b2d1-4e3e-975c-39f1b1096697 @@ -529,7 +526,7 @@ def test_negative_create_with_name(module_lce_library, module_org, module_publis """ name = gen_choice(invalid_values_list()) with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'name': name, 'organization-id': module_org.id, @@ -541,7 +538,7 @@ def test_negative_create_with_name(module_lce_library, module_org, module_publis @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): +def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv, module_target_sat): """Check if host can be created using unpublished cv :id: 9997383d-3c27-4f14-94f9-4b8b51180eb6 @@ -551,7 +548,7 @@ def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, @@ -563,7 +560,7 @@ def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): @pytest.mark.cli_host_create @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_katello_and_openscap_loaded(): +def test_positive_katello_and_openscap_loaded(target_sat): """Verify that command line arguments from both Katello and foreman_openscap plugins are loaded and available at the same time @@ -574,71 +571,19 @@ def test_positive_katello_and_openscap_loaded(): and foreman_openscap are available in help message (note: help is generated dynamically based on apipie cache) - :CaseLevel: System - :customerscenario: true :CaseImportance: Medium :BZ: 1671148 """ - help_output = Host.execute('host update --help') + help_output = target_sat.cli.Host.execute('host update --help') for arg in ['lifecycle-environment[-id]', 'openscap-proxy-id']: assert any( f'--{arg}' in line for line in help_output.split('\n') ), f'--{arg} not supported by update subcommand' -@pytest.mark.cli_host_create -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_positive_register_with_no_ak( - module_lce, module_org, module_promoted_cv, rhel7_contenthost, target_sat -): - """Register host to satellite without activation key - - :id: 6a7cedd2-aa9c-4113-a83b-3f0eea43ecb4 - - :expectedresults: Host successfully registered to appropriate org - - :parametrized: yes - - :CaseLevel: System - """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost( - module_org.label, - lce=f'{module_lce.label}/{module_promoted_cv.label}', - ) - assert rhel7_contenthost.subscribed - - -@pytest.mark.cli_host_create -@pytest.mark.tier3 -def test_negative_register_twice(module_ak_with_cv, module_org, rhel7_contenthost, target_sat): - """Attempt to register a host twice to Satellite - - :id: 0af81129-cd69-4fa7-a128-9e8fcf2d03b1 - - :expectedresults: host cannot be registered twice - - :parametrized: yes - - :CaseLevel: System - """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost(module_org.label, module_ak_with_cv.name) - assert rhel7_contenthost.subscribed - result = rhel7_contenthost.register_contenthost( - module_org.label, module_ak_with_cv.name, force=False - ) - # Depending on distro version, successful status may be 0 or - # 1, so we can't verify host wasn't registered by status != 0 - # check. Verifying status == 64 here, which stands for content - # host being already registered. - assert result.status == 64 - - @pytest.mark.cli_host_create @pytest.mark.tier3 def test_positive_list_and_unregister( @@ -652,17 +597,14 @@ def test_positive_list_and_unregister( Unlike content host, host has not disappeared from list of hosts after unregistering. :parametrized: yes - - :CaseLevel: System """ - rhel7_contenthost.install_katello_ca(target_sat) - rhel7_contenthost.register_contenthost(module_org.label, module_ak_with_cv.name) + rhel7_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) assert rhel7_contenthost.subscribed - hosts = Host.list({'organization-id': module_org.id}) + hosts = target_sat.cli.Host.list({'organization-id': module_org.id}) assert rhel7_contenthost.hostname in [host['name'] for host in hosts] result = rhel7_contenthost.unregister() assert result.status == 0 - hosts = Host.list({'organization-id': module_org.id}) + hosts = target_sat.cli.Host.list({'organization-id': module_org.id}) assert rhel7_contenthost.hostname in [host['name'] for host in hosts] @@ -682,8 +624,6 @@ def test_positive_list_by_last_checkin( :BZ: 1285992 :parametrized: yes - - :CaseLevel: System """ rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost( @@ -691,7 +631,9 @@ def test_positive_list_by_last_checkin( lce=f'{module_lce.label}/{module_promoted_cv.label}', ) assert rhel7_contenthost.subscribed - hosts = Host.list({'search': 'last_checkin = "Today" or last_checkin = "Yesterday"'}) + hosts = target_sat.cli.Host.list( + {'search': 'last_checkin = "Today" or last_checkin = "Yesterday"'} + ) assert len(hosts) >= 1 assert rhel7_contenthost.hostname in [host['name'] for host in hosts] @@ -708,8 +650,6 @@ def test_positive_list_infrastructure_hosts( :expectedresults: Infrastructure hosts are listed :parametrized: yes - - :CaseLevel: System """ rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost( @@ -717,60 +657,21 @@ def test_positive_list_infrastructure_hosts( lce=f'{module_lce.label}/{module_promoted_cv.label}', ) assert rhel7_contenthost.subscribed - Host.update({'name': target_sat.hostname, 'new-organization-id': module_org.id}) + target_sat.cli.Host.update({'name': target_sat.hostname, 'new-organization-id': module_org.id}) # list satellite hosts - hosts = Host.list({'search': 'infrastructure_facet.foreman=true'}) + hosts = target_sat.cli.Host.list({'search': 'infrastructure_facet.foreman=true'}) assert len(hosts) == 2 if is_open('BZ:1994685') else len(hosts) == 1 hostnames = [host['name'] for host in hosts] assert rhel7_contenthost.hostname not in hostnames assert target_sat.hostname in hostnames # list capsule hosts - hosts = Host.list({'search': 'infrastructure_facet.smart_proxy_id=1'}) + hosts = target_sat.cli.Host.list({'search': 'infrastructure_facet.smart_proxy_id=1'}) hostnames = [host['name'] for host in hosts] assert len(hosts) == 2 if is_open('BZ:1994685') else len(hosts) == 1 assert rhel7_contenthost.hostname not in hostnames assert target_sat.hostname in hostnames -@pytest.mark.skip_if_not_set('libvirt') -@pytest.mark.cli_host_create -@pytest.mark.libvirt_discovery -@pytest.mark.on_premises_provisioning -@pytest.mark.tier1 -def test_positive_create_using_libvirt_without_mac(target_sat, module_location, module_org): - """Create a libvirt host and not specify a MAC address. - - :id: b003faa9-2810-4176-94d2-ea84bed248eb - - :expectedresults: Host is created - - :CaseImportance: Critical - """ - compute_resource = target_sat.api.LibvirtComputeResource( - url=f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system', - organization=[module_org.id], - location=[module_location.id], - ).create() - host = target_sat.api.Host(organization=module_org.id, location=module_location.id) - host.create_missing() - result = make_host( - { - 'architecture-id': host.architecture.id, - 'compute-resource-id': compute_resource.id, - 'domain-id': host.domain.id, - 'location-id': host.location.id, - 'medium-id': host.medium.id, - 'name': host.name, - 'operatingsystem-id': host.operatingsystem.id, - 'organization-id': host.organization.id, - 'partition-table-id': host.ptable.id, - 'root-password': host.root_pass, - } - ) - assert result['name'] == host.name + '.' + host.domain.name - Host.delete({'id': result['id']}) - - @pytest.mark.cli_host_create @pytest.mark.tier2 def test_positive_create_inherit_lce_cv( @@ -784,8 +685,6 @@ def test_positive_create_inherit_lce_cv( :expectedresults: Host's lifecycle environment and content view match the ones specified in hostgroup - :CaseLevel: Integration - :BZ: 1391656 """ hostgroup = target_sat.api.HostGroup( @@ -793,7 +692,9 @@ def test_positive_create_inherit_lce_cv( lifecycle_environment=module_lce_library, organization=[module_org], ).create() - host = make_fake_host({'hostgroup-id': hostgroup.id, 'organization-id': module_org.id}) + host = target_sat.cli_factory.make_fake_host( + {'hostgroup-id': hostgroup.id, 'organization-id': module_org.id} + ) assert ( int(host['content-information']['lifecycle-environment']['id']) == hostgroup.lifecycle_environment.id @@ -811,8 +712,6 @@ def test_positive_create_inherit_nested_hostgroup(target_sat): :expectedresults: Host created successfully using host group title - :CaseLevel: System - :customerscenario: true :BZ: 1436162 @@ -848,7 +747,7 @@ def test_positive_create_inherit_nested_hostgroup(target_sat): ).create() nested_hostgroups.append(nested_hg) - host = make_host( + host = target_sat.cli_factory.make_host( { 'hostgroup-title': f'{parent_hostgroups[0].name}/{nested_hostgroups[0].name}', 'location-id': options.location.id, @@ -872,9 +771,7 @@ def test_positive_list_with_nested_hostgroup(target_sat): :expectedresults: Host is successfully listed and has both parent and nested host groups names in its hostgroup parameter - :BZ: 1427554 - - :CaseLevel: System + :BZ: 1427554, 1955421 """ options = target_sat.api.Host() options.create_missing() @@ -886,11 +783,13 @@ def test_positive_list_with_nested_hostgroup(target_sat): content_view.publish() content_view.read().version[0].promote(data={'environment_ids': lce.id, 'force': False}) parent_hg = target_sat.api.HostGroup( - name=parent_hg_name, organization=[options.organization] + name=parent_hg_name, + organization=[options.organization], + content_view=content_view, + ptable=options.ptable, ).create() nested_hg = target_sat.api.HostGroup( architecture=options.architecture, - content_view=content_view, domain=options.domain, lifecycle_environment=lce, location=[options.location], @@ -899,9 +798,8 @@ def test_positive_list_with_nested_hostgroup(target_sat): operatingsystem=options.operatingsystem, organization=[options.organization], parent=parent_hg, - ptable=options.ptable, ).create() - make_host( + target_sat.cli_factory.make_host( { 'hostgroup-id': nested_hg.id, 'location-id': options.location.id, @@ -909,8 +807,18 @@ def test_positive_list_with_nested_hostgroup(target_sat): 'name': host_name, } ) - hosts = Host.list({'organization-id': options.organization.id}) + hosts = target_sat.cli.Host.list({'organization-id': options.organization.id}) assert f'{parent_hg_name}/{nested_hg_name}' == hosts[0]['host-group'] + host = target_sat.cli.Host.info({'id': hosts[0]['id']}) + logger.info(f'Host info: {host}') + assert host['operating-system']['medium'] == options.medium.name + assert host['operating-system']['partition-table'] == options.ptable.name # inherited + if not is_open('BZ:2215294') or not target_sat.is_stream: + assert 'id' in host['content-information']['lifecycle-environment'] + assert int(host['content-information']['lifecycle-environment']['id']) == int(lce.id) + assert int(host['content-information']['content-view']['id']) == int( + content_view.id + ) # inherited @pytest.mark.cli_host_create @@ -939,9 +847,7 @@ def test_negative_create_with_incompatible_pxe_loader(): 2. Files not deployed on TFTP 3. Host not created - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ @@ -989,7 +895,7 @@ def test_positive_update_parameters_by_name( organization=[organization], operatingsystem=[new_os], ).create() - Host.update( + target_sat.cli.Host.update( { 'architecture': module_architecture.name, 'domain': new_domain.name, @@ -1001,7 +907,7 @@ def test_positive_update_parameters_by_name( 'new-location-id': new_loc.id, } ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert '{}.{}'.format(new_name, host['network']['domain']) == host['name'] assert host['location'] == new_loc.name assert host['network']['mac'] == new_mac @@ -1013,7 +919,7 @@ def test_positive_update_parameters_by_name( @pytest.mark.tier1 @pytest.mark.cli_host_update -def test_negative_update_name(function_host): +def test_negative_update_name(function_host, target_sat): """A host can not be updated with invalid or empty name :id: e8068d2a-6a51-4627-908b-60a516c67032 @@ -1024,14 +930,14 @@ def test_negative_update_name(function_host): """ new_name = gen_choice(invalid_values_list()) with pytest.raises(CLIReturnCodeError): - Host.update({'id': function_host['id'], 'new-name': new_name}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update({'id': function_host['id'], 'new-name': new_name}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert '{}.{}'.format(new_name, host['network']['domain']).lower() != host['name'] @pytest.mark.tier1 @pytest.mark.cli_host_update -def test_negative_update_mac(function_host): +def test_negative_update_mac(function_host, target_sat): """A host can not be updated with invalid or empty MAC address :id: 2f03032d-789d-419f-9ff2-a6f3561444da @@ -1042,26 +948,26 @@ def test_negative_update_mac(function_host): """ new_mac = gen_choice(invalid_values_list()) with pytest.raises(CLIReturnCodeError): - Host.update({'id': function_host['id'], 'mac': new_mac}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update({'id': function_host['id'], 'mac': new_mac}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['network']['mac'] != new_mac @pytest.mark.tier2 @pytest.mark.cli_host_update -def test_negative_update_arch(function_host, module_architecture): +def test_negative_update_arch(function_host, module_architecture, target_sat): """A host can not be updated with a architecture, which does not belong to host's operating system :id: a86524da-8caf-472b-9a3d-17a4385c3a18 :expectedresults: A host is not updated - - :CaseLevel: Integration """ with pytest.raises(CLIReturnCodeError): - Host.update({'architecture': module_architecture.name, 'id': function_host['id']}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update( + {'architecture': module_architecture.name, 'id': function_host['id']} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['operating-system']['architecture'] != module_architecture.name @@ -1074,8 +980,6 @@ def test_negative_update_os(target_sat, function_host, module_architecture): :id: ff13d2af-e54a-4daf-a24d-7ec930b4fbbe :expectedresults: A host is not updated - - :CaseLevel: Integration """ p_table = function_host['operating-system']['partition-table'] p_table = target_sat.api.PartitionTable().search(query={'search': f'name="{p_table}"'})[0] @@ -1086,14 +990,14 @@ def test_negative_update_os(target_sat, function_host, module_architecture): ptable=[p_table.id], ).create() with pytest.raises(CLIReturnCodeError): - Host.update( + target_sat.cli.Host.update( { 'architecture': module_architecture.name, 'id': function_host['id'], 'operatingsystem': new_os.title, } ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['operating-system']['operating-system'] != new_os.title @@ -1105,7 +1009,7 @@ def test_hammer_host_info_output(target_sat, module_user): :id: 03468516-0ebb-11eb-8ad8-0c7a158cbff4 - :Steps: + :steps: 1. Update the host with any owner 2. Get host info by running `hammer host info` 3. Create new user and update his location and organization based on the hosts @@ -1121,25 +1025,27 @@ def test_hammer_host_info_output(target_sat, module_user): user = target_sat.api.User().search( query={'search': f'login={settings.server.admin_username}'} )[0] - Host.update({'owner': settings.server.admin_username, 'owner-type': 'User', 'id': '1'}) - result_info = Host.info(options={'id': '1', 'fields': 'Additional info'}) + target_sat.cli.Host.update( + {'owner': settings.server.admin_username, 'owner-type': 'User', 'id': '1'} + ) + result_info = target_sat.cli.Host.info(options={'id': '1', 'fields': 'Additional info'}) assert int(result_info['additional-info']['owner-id']) == user.id - host = Host.info({'id': '1'}) - User.update( + host = target_sat.cli.Host.info({'id': '1'}) + target_sat.cli.User.update( { 'id': module_user.id, 'organizations': [host['organization']], 'locations': [host['location']], } ) - Host.update({'owner-id': module_user.id, 'id': '1'}) - result_info = Host.info(options={'id': '1', 'fields': 'Additional info'}) + target_sat.cli.Host.update({'owner-id': module_user.id, 'id': '1'}) + result_info = target_sat.cli.Host.info(options={'id': '1', 'fields': 'Additional info'}) assert int(result_info['additional-info']['owner-id']) == module_user.id @pytest.mark.cli_host_parameter @pytest.mark.tier1 -def test_positive_parameter_crud(function_host): +def test_positive_parameter_crud(function_host, target_sat): """Add, update and remove host parameter with valid name. :id: 76034424-cf18-4ced-916b-ee9798c311bc @@ -1151,26 +1057,28 @@ def test_positive_parameter_crud(function_host): """ name = next(iter(valid_data_list())) value = valid_data_list()[name] - Host.set_parameter({'host-id': function_host['id'], 'name': name, 'value': value}) - host = Host.info({'id': function_host['id']}) - assert name in host['parameters'].keys() + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': name, 'value': value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) + assert name in host['parameters'] assert value == host['parameters'][name] new_value = valid_data_list()[name] - Host.set_parameter({'host-id': host['id'], 'name': name, 'value': new_value}) - host = Host.info({'id': host['id']}) - assert name in host['parameters'].keys() + target_sat.cli.Host.set_parameter({'host-id': host['id'], 'name': name, 'value': new_value}) + host = target_sat.cli.Host.info({'id': host['id']}) + assert name in host['parameters'] assert new_value == host['parameters'][name] - Host.delete_parameter({'host-id': host['id'], 'name': name}) - host = Host.info({'id': host['id']}) - assert name not in host['parameters'].keys() + target_sat.cli.Host.delete_parameter({'host-id': host['id'], 'name': name}) + host = target_sat.cli.Host.info({'id': host['id']}) + assert name not in host['parameters'] # -------------------------- HOST PARAMETER SCENARIOS ------------------------- @pytest.mark.cli_host_parameter @pytest.mark.tier1 -def test_negative_add_parameter(function_host): +def test_negative_add_parameter(function_host, target_sat): """Try to add host parameter with different invalid names. :id: 473f8c3f-b66e-4526-88af-e139cc3dabcb @@ -1182,15 +1090,15 @@ def test_negative_add_parameter(function_host): """ name = gen_choice(invalid_values_list()).lower() with pytest.raises(CLIReturnCodeError): - Host.set_parameter( + target_sat.cli.Host.set_parameter( { 'host-id': function_host['id'], 'name': name, 'value': gen_string('alphanumeric'), } ) - host = Host.info({'id': function_host['id']}) - assert name not in host['parameters'].keys() + host = target_sat.cli.Host.info({'id': function_host['id']}) + assert name not in host['parameters'] @pytest.mark.cli_host_parameter @@ -1217,19 +1125,21 @@ def test_negative_view_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, 'Organization': {'permissions': ['view_organizations']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) - host = Host.with_user( + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + host = target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).info({'id': host['id']}) assert not host.get('parameters') @@ -1260,11 +1170,13 @@ def test_positive_view_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, @@ -1272,8 +1184,8 @@ def test_positive_view_parameter_by_non_admin_user(target_sat, function_host, fu 'Parameter': {'permissions': ['view_params']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) - host = Host.with_user( + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + host = target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).info({'id': host['id']}) assert param_name in host['parameters'] @@ -1305,11 +1217,13 @@ def test_negative_edit_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, @@ -1317,21 +1231,21 @@ def test_negative_edit_parameter_by_non_admin_user(target_sat, function_host, fu 'Parameter': {'permissions': ['view_params']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) param_new_value = gen_string('alphanumeric') with pytest.raises(CLIReturnCodeError): - Host.with_user( + target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).set_parameter( {'host-id': function_host['id'], 'name': param_name, 'value': param_new_value} ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value @pytest.mark.cli_host_parameter @pytest.mark.tier2 -def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host): +def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host, target_sat): """Check that host parameter value with multi-line and spaces is correctly restored from yaml format @@ -1343,8 +1257,6 @@ def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host): from yaml format :BZ: 1315282 - - :CaseLevel: Integration """ param_name = gen_string('alpha').lower() # long string that should be escaped and affected by line break with @@ -1355,15 +1267,17 @@ def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host): 'account include password-auth' ) # count parameters of a host - response = Host.info( + response = target_sat.cli.Host.info( {'id': function_host['id']}, output_format='yaml', return_raw_response=True ) assert response.status == 0 yaml_content = yaml.load(response.stdout, yaml.SafeLoader) host_initial_params = yaml_content.get('Parameters') # set parameter - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - response = Host.info( + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + response = target_sat.cli.Host.info( {'id': function_host['id']}, output_format='yaml', return_raw_response=True ) assert response.status == 0 @@ -1410,9 +1324,7 @@ def test_positive_provision_baremetal_with_bios_syslinux(): 6. GRUB config changes the boot order (boot local first) 7. Hosts boots straight to RHEL after reboot (step #4) - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ @@ -1448,9 +1360,7 @@ def test_positive_provision_baremetal_with_uefi_syslinux(): 6. GRUB config changes the boot order (boot local first) 7. Hosts boots straight to RHEL after reboot (step #4) - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ @@ -1489,9 +1399,7 @@ def test_positive_provision_baremetal_with_uefi_grub(): 7. Hosts boots straight to RHEL after reboot (step #4) - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ @@ -1532,9 +1440,7 @@ def test_positive_provision_baremetal_with_uefi_grub2(): 7. Hosts boots straight to RHEL after reboot (step #4) - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ @@ -1567,24 +1473,16 @@ def test_positive_provision_baremetal_with_uefi_secureboot(): :expectedresults: Host is provisioned - :CaseAutomation: NotAutomated - - :CaseLevel: System + :CaseAutomation: NotAutomated """ -@pytest.fixture(scope="function") +@pytest.fixture def setup_custom_repo(target_sat, module_org, katello_host_tools_host, request): """Create custom repository content""" - def restore_sca_setting(): - """Restore the original SCA setting for module_org""" - module_org.sca_enable() if sca_enabled else module_org.sca_disable() - - if module_org.sca_eligible().get('simple_content_access_eligible', False): - sca_enabled = module_org.simple_content_access - module_org.sca_disable() - request.addfinalizer(restore_sca_setting) + sca_enabled = module_org.simple_content_access + module_org.sca_disable() # get package details details = {} @@ -1612,6 +1510,7 @@ def restore_sca_setting(): url=settings.repos[custom_repo].url, ).create() custom_repo.sync() + subs = target_sat.api.Subscription(organization=module_org, name=prod.name).search() assert len(subs), f'Subscription for sat client product: {prod.name} was not found.' custom_sub = subs[0] @@ -1623,12 +1522,20 @@ def restore_sca_setting(): "subscriptions": [{"id": custom_sub.id, "quantity": 1}], } ) + # make sure repo is enabled + katello_host_tools_host.enable_repo( + f'{module_org.name}_{prod.name}_{custom_repo.name}', force=True + ) # refresh repository metadata katello_host_tools_host.subscription_manager_list_repos() - return details + if sca_enabled: + yield details + module_org.sca_enable() + else: + return details -@pytest.fixture(scope="function") +@pytest.fixture def yum_security_plugin(katello_host_tools_host): """Enable yum-security-plugin if the distro version requires it. Rhel6 yum version does not support updating of a specific advisory out of the box. @@ -1646,7 +1553,9 @@ def yum_security_plugin(katello_host_tools_host): @pytest.mark.e2e @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_report_package_installed_removed(katello_host_tools_host, setup_custom_repo): +def test_positive_report_package_installed_removed( + katello_host_tools_host, setup_custom_repo, target_sat +): """Ensure installed/removed package is reported to satellite :id: fa5dc238-74c3-4c8a-aa6f-e0a91ba543e3 @@ -1668,22 +1577,20 @@ def test_positive_report_package_installed_removed(katello_host_tools_host, setu :BZ: 1463809 :parametrized: yes - - :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 1 assert installed_packages[0]['nvra'] == setup_custom_repo["package"] result = client.run(f'yum remove -y {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 0 @@ -1691,7 +1598,7 @@ def test_positive_report_package_installed_removed(katello_host_tools_host, setu @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_package_applicability(katello_host_tools_host, setup_custom_repo): +def test_positive_package_applicability(katello_host_tools_host, setup_custom_repo, target_sat): """Ensure packages applicability is functioning properly :id: d283b65b-19c1-4eba-87ea-f929b0ee4116 @@ -1714,16 +1621,14 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re :BZ: 1463809 :parametrized: yes - - :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 applicable_packages, _ = wait_for( - lambda: Package.list( + lambda: target_sat.cli.Package.list( { 'host-id': host_info['id'], 'packages-restrict-applicable': 'true', @@ -1741,7 +1646,7 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re client.run(f'yum install -y {setup_custom_repo["new_package"]}') result = client.run(f'rpm -q {setup_custom_repo["new_package"]}') assert result.status == 0 - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': host_info['id'], 'packages-restrict-applicable': 'true', @@ -1757,7 +1662,7 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re @pytest.mark.pit_server @pytest.mark.tier3 def test_positive_erratum_applicability( - katello_host_tools_host, setup_custom_repo, yum_security_plugin + katello_host_tools_host, setup_custom_repo, yum_security_plugin, target_sat ): """Ensure erratum applicability is functioning properly @@ -1778,16 +1683,14 @@ def test_positive_erratum_applicability( :BZ: 1463809,1740790 :parametrized: yes - - :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') client.subscription_manager_list_repos() applicable_errata, _ = wait_for( - lambda: Host.errata_list({'host-id': host_info['id']}), + lambda: target_sat.cli.Host.errata_list({'host-id': host_info['id']}), handle_exception=True, fail_condition=[], timeout=120, @@ -1809,23 +1712,23 @@ def test_positive_erratum_applicability( lambda: setup_custom_repo["security_errata"] not in [ errata['erratum-id'] - for errata in Host.errata_list({'host-id': host_info['id']}) + for errata in target_sat.cli.Host.errata_list({'host-id': host_info['id']}) if errata['installable'] == 'true' ], handle_exception=True, timeout=300, delay=5, ) - except TimedOutError: + except TimedOutError as err: raise TimedOutError( f"Timed out waiting for erratum \"{setup_custom_repo['security_errata']}\"" " to disappear from the list" - ) + ) from err @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_repo): +def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_repo, target_sat): """Apply security erratum to a host :id: 4d1095c8-d354-42ac-af44-adf6dbb46deb @@ -1833,8 +1736,6 @@ def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_r :expectedresults: erratum is recognized by the `yum update --security` command on client - :CaseLevel: System - :customerscenario: true :BZ: 1420671 @@ -1842,12 +1743,12 @@ def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_r :parametrized: yes """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["new_package"]}') client.run(f'yum downgrade -y {setup_custom_repo["package_name"]}') # Check that host has applicable errata host_erratum, _ = wait_for( - lambda: Host.errata_list({'host-id': host_info['id']})[0], + lambda: target_sat.cli.Host.errata_list({'host-id': host_info['id']})[0], handle_exception=True, timeout=120, delay=5, @@ -1873,15 +1774,13 @@ def test_positive_install_package_via_rex( :expectedresults: Package was installed - :CaseLevel: System - :parametrized: yes """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.configure_rex(satellite=target_sat, org=module_org, register=False) # Apply errata to the host collection using job invocation - JobInvocation.create( + target_sat.cli.JobInvocation.create( { 'feature': 'katello_package_install', 'search-query': f'name ~ {client.hostname}', @@ -1891,18 +1790,17 @@ def test_positive_install_package_via_rex( ) result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 1 # -------------------------- HOST SUBSCRIPTION SUBCOMMAND FIXTURES -------------------------- -@pytest.mark.skip_if_not_set('clients') -@pytest.fixture(scope="function") +@pytest.fixture def host_subscription_client(rhel7_contenthost, target_sat): rhel7_contenthost.install_katello_ca(target_sat) - yield rhel7_contenthost + return rhel7_contenthost @pytest.fixture @@ -1923,7 +1821,12 @@ def ak_with_subscription( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_positive_register( - module_org, module_promoted_cv, module_lce, module_ak_with_cv, host_subscription_client + module_org, + module_promoted_cv, + module_lce, + module_ak_with_cv, + host_subscription_client, + target_sat, ): """Attempt to register a host @@ -1932,17 +1835,15 @@ def test_positive_register( :expectedresults: host successfully registered :parametrized: yes - - :CaseLevel: System """ - hosts = Host.list( + hosts = target_sat.cli.Host.list( { 'organization-id': module_org.id, 'search': host_subscription_client.hostname, } ) assert len(hosts) == 0 - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -1950,18 +1851,18 @@ def test_positive_register( 'name': host_subscription_client.hostname, } ) - hosts = Host.list( + hosts = target_sat.cli.Host.list( { 'organization-id': module_org.id, 'search': host_subscription_client.hostname, } ) assert len(hosts) > 0 - host = Host.info({'id': hosts[0]['id']}) + host = target_sat.cli.Host.info({'id': hosts[0]['id']}) assert host['name'] == host_subscription_client.hostname # note: when not registered the following command lead to exception, # see unregister - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': module_ak_with_cv.id, @@ -1982,6 +1883,7 @@ def test_positive_attach( module_rhst_repo, default_subscription, host_subscription_client, + target_sat, ): """Attempt to attach a subscription to host @@ -1995,12 +1897,10 @@ def test_positive_attach( enabled, and repository package installed :parametrized: yes - - :CaseLevel: System """ # create an activation key without subscriptions # register the client host - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2008,13 +1908,13 @@ def test_positive_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, activation_key=module_ak_with_cv.name ) assert host_subscription_client.subscribed # attach the subscription to host - Host.subscription_attach( + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, @@ -2038,6 +1938,7 @@ def test_positive_attach_with_lce( module_rhst_repo, default_subscription, host_subscription_client, + target_sat, ): """Attempt to attach a subscription to host, registered by lce @@ -2051,8 +1952,6 @@ def test_positive_attach_with_lce( repository enabled, and repository package installed :parametrized: yes - - :CaseLevel: System """ host_subscription_client.register_contenthost( module_org.name, @@ -2060,8 +1959,8 @@ def test_positive_attach_with_lce( auto_attach=False, ) assert host_subscription_client.subscribed - host = Host.info({'name': host_subscription_client.hostname}) - Host.subscription_attach( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, @@ -2079,7 +1978,7 @@ def test_positive_attach_with_lce( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_negative_without_attach( - module_org, module_promoted_cv, module_lce, host_subscription_client + module_org, module_promoted_cv, module_lce, host_subscription_client, target_sat ): """Register content host from satellite, register client to uuid of that content host, as there was no attach on the client, @@ -2090,10 +1989,8 @@ def test_negative_without_attach( :expectedresults: repository list is empty :parametrized: yes - - :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2101,7 +1998,7 @@ def test_negative_without_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, lce=None, # required, to jump into right branch in register_contenthost method @@ -2129,15 +2026,13 @@ def test_negative_without_attach_with_lce( :expectedresults: repository not enabled on host :parametrized: yes - - :CaseLevel: System """ content_view = target_sat.api.ContentView(organization=function_org).create() ak = target_sat.api.ActivationKey( environment=function_lce, organization=function_org, ).create() - setup_org_for_a_rh_repo( + target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -2184,6 +2079,7 @@ def test_positive_remove( ak_with_subscription, default_subscription, host_subscription_client, + target_sat, ): """Attempt to remove a subscription from content host @@ -2192,10 +2088,8 @@ def test_positive_remove( :expectedresults: subscription successfully removed from host :parametrized: yes - - :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2203,8 +2097,8 @@ def test_positive_remove( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) - host_subscriptions = ActivationKey.subscriptions( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2216,13 +2110,13 @@ def test_positive_remove( host_subscription_client.register_contenthost( module_org.name, activation_key=ak_with_subscription.name ) - Host.subscription_attach( + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, } ) - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2231,13 +2125,13 @@ def test_positive_remove( output_format='json', ) assert default_subscription.name in [sub['name'] for sub in host_subscriptions] - Host.subscription_remove( + target_sat.cli.Host.subscription_remove( { 'host-id': host['id'], 'subscription-id': default_subscription.id, } ) - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2257,6 +2151,7 @@ def test_positive_auto_attach( module_rhst_repo, ak_with_subscription, host_subscription_client, + target_sat, ): """Attempt to auto attach a subscription to content host @@ -2266,10 +2161,8 @@ def test_positive_auto_attach( repository enabled, and repository package installed :parametrized: yes - - :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2277,11 +2170,11 @@ def test_positive_auto_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, activation_key=ak_with_subscription.name ) - Host.subscription_auto_attach({'host-id': host['id']}) + target_sat.cli.Host.subscription_auto_attach({'host-id': host['id']}) host_subscription_client.enable_repo(module_rhst_repo) # ensure that katello agent can be installed try: @@ -2293,7 +2186,7 @@ def test_positive_auto_attach( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_positive_unregister_host_subscription( - module_org, module_rhst_repo, ak_with_subscription, host_subscription_client + module_org, module_rhst_repo, ak_with_subscription, host_subscription_client, target_sat ): """Attempt to unregister host subscription @@ -2302,8 +2195,6 @@ def test_positive_unregister_host_subscription( :expectedresults: host subscription is unregistered :parametrized: yes - - :CaseLevel: System """ # register the host client host_subscription_client.register_contenthost( @@ -2314,8 +2205,8 @@ def test_positive_unregister_host_subscription( host_subscription_client.run('subscription-manager attach --auto') host_subscription_client.enable_repo(module_rhst_repo) assert host_subscription_client.subscribed - host = Host.info({'name': host_subscription_client.hostname}) - host_subscriptions = ActivationKey.subscriptions( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2324,11 +2215,11 @@ def test_positive_unregister_host_subscription( output_format='json', ) assert len(host_subscriptions) > 0 - Host.subscription_unregister({'host': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_unregister({'host': host_subscription_client.hostname}) with pytest.raises(CLIReturnCodeError): # raise error that the host was not registered by # subscription-manager register - ActivationKey.subscriptions( + target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2360,8 +2251,6 @@ def test_syspurpose_end_to_end( :CaseImportance: Critical :parametrized: yes - - :CaseLevel: System """ # Create an activation key with test values purpose_addons = "test-addon1, test-addon2" @@ -2374,7 +2263,7 @@ def test_syspurpose_end_to_end( purpose_usage="test-usage", service_level="Self-Support", ).create() - ActivationKey.add_subscription( + target_sat.cli.ActivationKey.add_subscription( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2388,14 +2277,14 @@ def test_syspurpose_end_to_end( assert host_subscription_client.subscribed host_subscription_client.run('subscription-manager attach --auto') host_subscription_client.enable_repo(module_rhst_repo) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) # Assert system purpose values are set in the host as expected assert host['subscription-information']['system-purpose']['purpose-addons'] == purpose_addons assert host['subscription-information']['system-purpose']['purpose-role'] == "test-role" assert host['subscription-information']['system-purpose']['purpose-usage'] == "test-usage" assert host['subscription-information']['system-purpose']['service-level'] == "Self-Support" # Change system purpose values in the host - Host.update( + target_sat.cli.Host.update( { 'purpose-addons': "test-addon3", 'purpose-role': "test-role2", @@ -2404,13 +2293,13 @@ def test_syspurpose_end_to_end( 'id': host['id'], } ) - host = Host.info({'id': host['id']}) + host = target_sat.cli.Host.info({'id': host['id']}) # Assert system purpose values have been updated in the host as expected assert host['subscription-information']['system-purpose']['purpose-addons'] == "test-addon3" assert host['subscription-information']['system-purpose']['purpose-role'] == "test-role2" assert host['subscription-information']['system-purpose']['purpose-usage'] == "test-usage2" assert host['subscription-information']['system-purpose']['service-level'] == "Self-Support2" - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2421,11 +2310,11 @@ def test_syspurpose_end_to_end( assert len(host_subscriptions) > 0 assert host_subscriptions[0]['name'] == default_subscription.name # Unregister host - Host.subscription_unregister({'host': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_unregister({'host': host_subscription_client.hostname}) with pytest.raises(CLIReturnCodeError): # raise error that the host was not registered by # subscription-manager register - ActivationKey.subscriptions( + target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2448,8 +2337,8 @@ def test_positive_errata_list_of_sat_server(target_sat): :CaseImportance: Critical """ hostname = target_sat.execute('hostname').stdout.strip() - host = Host.info({'name': hostname}) - assert isinstance(Host.errata_list({'host-id': host['id']}), list) + host = target_sat.cli.Host.info({'name': hostname}) + assert isinstance(target_sat.cli.Host.errata_list({'host-id': host['id']}), list) # -------------------------- HOST ENC SUBCOMMAND SCENARIOS ------------------------- @@ -2467,7 +2356,7 @@ def test_positive_dump_enc_yaml(target_sat): :CaseImportance: Critical """ - enc_dump = Host.enc_dump({'name': target_sat.hostname}) + enc_dump = target_sat.cli.Host.enc_dump({'name': target_sat.hostname}) assert f'fqdn: {target_sat.hostname}' in enc_dump assert f'ip: {target_sat.ip_addr}' in enc_dump assert 'ssh-rsa' in enc_dump @@ -2476,7 +2365,7 @@ def test_positive_dump_enc_yaml(target_sat): # -------------------------- HOST TRACE SUBCOMMAND SCENARIOS ------------------------- @pytest.mark.tier3 @pytest.mark.rhel_ver_match('[^6].*') -def test_positive_tracer_list_and_resolve(tracer_host): +def test_positive_tracer_list_and_resolve(tracer_host, target_sat): """Install tracer on client, downgrade the service, check from the satellite that tracer shows and resolves the problem. The test works with a package specified in settings. This package is expected to install a systemd service which is expected @@ -2494,11 +2383,13 @@ def test_positive_tracer_list_and_resolve(tracer_host): :CaseComponent: katello-tracer + :Team: Phoenix-subscriptions + :bz: 2186188 """ client = tracer_host package = settings.repos["MOCK_SERVICE_RPM"] - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) # mark the service log messages for later comparison and downgrade the pkg version service_ver_log_old = tracer_host.execute(f'cat /var/log/{package}/service.log') @@ -2506,12 +2397,12 @@ def test_positive_tracer_list_and_resolve(tracer_host): assert package_downgrade.status == 0 # tracer should detect a new trace - traces = HostTraces.list({'host-id': host_info['id']})[0] + traces = target_sat.cli.HostTraces.list({'host-id': host_info['id']})[0] assert package == traces['application'] # resolve traces and make sure that they disappear - HostTraces.resolve({'host-id': host_info['id'], 'trace-ids': traces['trace-id']}) - traces = HostTraces.list({'host-id': host_info['id']}) + target_sat.cli.HostTraces.resolve({'host-id': host_info['id'], 'trace-ids': traces['trace-id']}) + traces = target_sat.cli.HostTraces.list({'host-id': host_info['id']}) assert not traces # verify on the host end, that the service was really restarted @@ -2575,14 +2466,14 @@ def test_positive_host_with_puppet( session_puppet_enabled_sat.cli.Host.delete({'id': host['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_proxy(session_puppet_enabled_sat, puppet_proxy_port_range): proxy = session_puppet_enabled_sat.cli_factory.make_proxy() yield proxy session_puppet_enabled_sat.cli.Proxy.delete({'id': proxy['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_host_content_source( session_puppet_enabled_sat, session_puppet_enabled_proxy, @@ -2599,7 +2490,7 @@ def function_host_content_source( } ) yield host - session_puppet_enabled_sat.cli.Host.delete({'id': host['id']}) + session_puppet_enabled_sat.cli.target_sat.cli.Host.delete({'id': host['id']}) @pytest.mark.tier2 @@ -2619,8 +2510,6 @@ def test_positive_list_scparams( :expectedresults: Overridden sc-param from puppet class are listed - - :CaseLevel: Integration """ update_smart_proxy(session_puppet_enabled_sat, module_puppet_loc, session_puppet_enabled_proxy) # Create hostgroup with associated puppet class @@ -2764,8 +2653,8 @@ def test_positive_create_and_update_with_content_source( host['content-information']['content-source']['name'] == session_puppet_enabled_proxy.name ) new_content_source = function_proxy - session_puppet_enabled_sat.cli.Host.update( + session_puppet_enabled_sat.cli.target_sat.cli.Host.update( {'id': host['id'], 'content-source-id': new_content_source.id} ) - host = session_puppet_enabled_sat.cli.Host.info({'id': host['id']}) + host = session_puppet_enabled_sat.cli.target_sat.cli.Host.info({'id': host['id']}) assert host['content-information']['content-source']['name'] == new_content_source.name diff --git a/tests/foreman/cli/test_hostcollection.py b/tests/foreman/cli/test_hostcollection.py index 41b980d498d..ce3426030b5 100644 --- a/tests/foreman/cli/test_hostcollection.py +++ b/tests/foreman/cli/test_hostcollection.py @@ -4,45 +4,34 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: HostCollections :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker from fauxfactory import gen_string +import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_fake_host -from robottelo.cli.factory import make_host_collection -from robottelo.cli.factory import make_org -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT +from robottelo.constants import DEFAULT_CV, ENVIRONMENT +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, +) -def _make_fake_host_helper(module_org): +def _make_fake_host_helper(module_org, sat): """Make a new fake host""" - library = LifecycleEnvironment.info({'organization-id': module_org.id, 'name': ENVIRONMENT}) - default_cv = ContentView.info({'organization-id': module_org.id, 'name': DEFAULT_CV}) - return make_fake_host( + library = sat.cli.LifecycleEnvironment.info( + {'organization-id': module_org.id, 'name': ENVIRONMENT} + ) + default_cv = sat.cli.ContentView.info({'organization-id': module_org.id, 'name': DEFAULT_CV}) + return sat.cli_factory.make_fake_host( { 'content-view-id': default_cv['id'], 'lifecycle-environment-id': library['id'], @@ -55,7 +44,7 @@ def _make_fake_host_helper(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.e2e -def test_positive_end_to_end(module_org): +def test_positive_end_to_end(module_org, module_target_sat): """Check if host collection can be created with name and description, content host can be added and removed, host collection can be listed, updated and deleted @@ -70,57 +59,70 @@ def test_positive_end_to_end(module_org): """ name = list(valid_data_list().values())[0] desc = list(valid_data_list().values())[0] - new_host_col = make_host_collection( + new_host_col = module_target_sat.cli_factory.make_host_collection( {'description': desc, 'name': name, 'organization-id': module_org.id} ) assert new_host_col['name'] == name assert new_host_col['description'] == desc # add host - new_system = _make_fake_host_helper(module_org) + new_system = _make_fake_host_helper(module_org, module_target_sat) no_of_content_host = new_host_col['total-hosts'] - HostCollection.add_host({'host-ids': new_system['id'], 'id': new_host_col['id']}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.add_host( + {'host-ids': new_system['id'], 'id': new_host_col['id']} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['total-hosts'] > no_of_content_host # list hosts - result = HostCollection.hosts({'name': name, 'organization-id': module_org.id}) + result = module_target_sat.cli.HostCollection.hosts( + {'name': name, 'organization-id': module_org.id} + ) assert new_system['name'].lower() == result[0]['name'] # List all host collections within organization - result = HostCollection.list({'organization': module_org.name}) + result = module_target_sat.cli.HostCollection.list({'organization': module_org.name}) assert len(result) >= 1 # Filter list by name - result = HostCollection.list({'name': name, 'organization-id': module_org.id}) + result = module_target_sat.cli.HostCollection.list( + {'name': name, 'organization-id': module_org.id} + ) assert len(result) == 1 assert result[0]['id'] == new_host_col['id'] # Filter list by associated host name - result = HostCollection.list({'organization': module_org.name, 'host': new_system['name']}) + result = module_target_sat.cli.HostCollection.list( + {'organization': module_org.name, 'host': new_system['name']} + ) assert len(result) == 1 assert result[0]['name'] == new_host_col['name'] # remove host - no_of_content_host = HostCollection.info({'id': new_host_col['id']})['total-hosts'] - HostCollection.remove_host({'host-ids': new_system['id'], 'id': new_host_col['id']}) - result = HostCollection.info({'id': new_host_col['id']}) + no_of_content_host = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']})[ + 'total-hosts' + ] + module_target_sat.cli.HostCollection.remove_host( + {'host-ids': new_system['id'], 'id': new_host_col['id']} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert no_of_content_host > result['total-hosts'] # update new_name = list(valid_data_list().values())[0] new_desc = list(valid_data_list().values())[0] - HostCollection.update({'description': new_desc, 'id': new_host_col['id'], 'new-name': new_name}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.update( + {'description': new_desc, 'id': new_host_col['id'], 'new-name': new_name} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['name'] == new_name assert result['description'] == new_desc # delete - HostCollection.delete({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.delete({'id': new_host_col['id']}) with pytest.raises(CLIReturnCodeError): - HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) -@pytest.mark.build_sanity @pytest.mark.tier1 -def test_positive_create_with_limit(module_org): +def test_positive_create_with_limit(module_org, module_target_sat): """Check if host collection can be created with correct limits :id: 682b5624-1095-48e6-a0dd-c76e70ca6540 @@ -130,12 +132,14 @@ def test_positive_create_with_limit(module_org): :CaseImportance: Critical """ for limit in ('1', '3', '5', '10', '20'): - new_host_col = make_host_collection({'max-hosts': limit, 'organization-id': module_org.id}) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'max-hosts': limit, 'organization-id': module_org.id} + ) assert new_host_col['limit'] == limit @pytest.mark.tier1 -def test_positive_update_to_unlimited_hosts(module_org): +def test_positive_update_to_unlimited_hosts(module_org, module_target_sat): """Create Host Collection with a limit and update it to unlimited hosts :id: d688fd4a-88eb-484e-9e90-854e0595edd0 @@ -144,24 +148,24 @@ def test_positive_update_to_unlimited_hosts(module_org): :CaseImportance: High """ - host_collection = make_host_collection( + host_collection = module_target_sat.cli_factory.make_host_collection( { 'max-hosts': 1, 'organization-id': module_org.id, } ) - result = HostCollection.info( + result = module_target_sat.cli.HostCollection.info( {'name': host_collection['name'], 'organization-id': module_org.id} ) assert result['limit'] == '1' - HostCollection.update( + module_target_sat.cli.HostCollection.update( { 'name': host_collection['name'], 'organization-id': module_org.id, 'unlimited-hosts': True, } ) - result = HostCollection.info( + result = module_target_sat.cli.HostCollection.info( {'name': host_collection['name'], 'organization-id': module_org.id} ) assert result['limit'] == 'None' @@ -169,7 +173,7 @@ def test_positive_update_to_unlimited_hosts(module_org): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_name(module_org, name): +def test_negative_create_with_name(module_org, name, module_target_sat): """Attempt to create host collection with invalid name of different types @@ -182,11 +186,13 @@ def test_negative_create_with_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_host_collection({'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.make_host_collection( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.tier1 -def test_positive_update_limit(module_org): +def test_positive_update_limit(module_org, module_target_sat): """Check if host collection limits can be updated :id: 4c0e0c3b-82ac-4aa2-8378-6adc7946d4ec @@ -197,37 +203,39 @@ def test_positive_update_limit(module_org): :CaseImportance: Critical """ - new_host_col = make_host_collection({'organization-id': module_org.id}) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_org.id} + ) for limit in ('3', '6', '9', '12', '15', '17', '19'): - HostCollection.update({'id': new_host_col['id'], 'max-hosts': limit}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.update({'id': new_host_col['id'], 'max-hosts': limit}) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['limit'] == limit @pytest.mark.tier2 -def test_positive_list_by_org_id(module_org): +def test_positive_list_by_org_id(module_org, module_target_sat): """Check if host collection list can be filtered by organization id :id: afbe077a-0de1-432c-a0c4-082129aab92e :expectedresults: Only host-collection within specific org is listed - - :CaseLevel: Integration """ # Create two host collections within different organizations - make_host_collection({'organization-id': module_org.id}) - new_org = make_org() - new_host_col = make_host_collection({'organization-id': new_org['id']}) + module_target_sat.cli_factory.make_host_collection({'organization-id': module_org.id}) + new_org = module_target_sat.cli_factory.make_org() + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'organization-id': new_org['id']} + ) # List all host collections - assert len(HostCollection.list()) >= 2 + assert len(module_target_sat.cli.HostCollection.list()) >= 2 # Filter list by org id - result = HostCollection.list({'organization-id': new_org['id']}) + result = module_target_sat.cli.HostCollection.list({'organization-id': new_org['id']}) assert len(result) == 1 assert result[0]['id'] == new_host_col['id'] @pytest.mark.tier2 -def test_positive_host_collection_host_pagination(module_org): +def test_positive_host_collection_host_pagination(module_org, module_target_sat): """Check if pagination configured on per-page param defined in hammer host-collection hosts command overrides global configuration defined on /etc/hammer/cli_config.yml, which default is 20 per page @@ -238,14 +246,18 @@ def test_positive_host_collection_host_pagination(module_org): :expectedresults: Number of host per page follows per_page configuration restriction - - :CaseLevel: Integration """ - host_collection = make_host_collection({'organization-id': module_org.id}) - host_ids = ','.join(_make_fake_host_helper(module_org)['id'] for _ in range(2)) - HostCollection.add_host({'host-ids': host_ids, 'id': host_collection['id']}) + host_collection = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_org.id} + ) + host_ids = ','.join( + _make_fake_host_helper(module_org, module_target_sat)['id'] for _ in range(2) + ) + module_target_sat.cli.HostCollection.add_host( + {'host-ids': host_ids, 'id': host_collection['id']} + ) for number in range(1, 3): - listed_hosts = HostCollection.hosts( + listed_hosts = module_target_sat.cli.HostCollection.hosts( { 'id': host_collection['id'], 'organization-id': module_org.id, @@ -256,7 +268,7 @@ def test_positive_host_collection_host_pagination(module_org): @pytest.mark.tier2 -def test_positive_copy_by_id(module_org): +def test_positive_copy_by_id(module_org, module_target_sat): """Check if host collection can be cloned by id :id: fd7cea50-bc56-4938-a81d-4f7a60711814 @@ -266,15 +278,15 @@ def test_positive_copy_by_id(module_org): :expectedresults: Host collection is cloned successfully :BZ: 1328925 - - :CaseLevel: Integration """ - host_collection = make_host_collection( + host_collection = module_target_sat.cli_factory.make_host_collection( {'name': gen_string('alpha', 15), 'organization-id': module_org.id} ) new_name = gen_string('numeric') - new_host_collection = HostCollection.copy({'id': host_collection['id'], 'new-name': new_name}) - result = HostCollection.info({'id': new_host_collection[0]['id']}) + new_host_collection = module_target_sat.cli.HostCollection.copy( + {'id': host_collection['id'], 'new-name': new_name} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_collection[0]['id']}) assert result['name'] == new_name @@ -288,13 +300,11 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi :expectedresults: Host successfully registered and listed in host collection :BZ: 1385814 - - :CaseLevel: System """ - host_info = _make_fake_host_helper(module_org) + host_info = _make_fake_host_helper(module_org, target_sat) - hc = make_host_collection({'organization-id': module_org.id}) - ActivationKey.add_host_collection( + hc = target_sat.cli_factory.make_host_collection({'organization-id': module_org.id}) + target_sat.cli.ActivationKey.add_host_collection( { 'id': module_ak_with_cv.id, 'organization-id': module_org.id, @@ -302,7 +312,7 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi } ) # add the registered instance host to collection - HostCollection.add_host( + target_sat.cli.HostCollection.add_host( {'id': hc['id'], 'organization-id': module_org.id, 'host-ids': host_info['id']} ) @@ -312,8 +322,10 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi client.register_contenthost(module_org.name, activation_key=module_ak_with_cv.name) assert client.subscribed # note: when registering the host, it should be automatically added to the host-collection - client_host = Host.info({'name': client.hostname}) - hosts = HostCollection.hosts({'id': hc['id'], 'organization-id': module_org.id}) + client_host = target_sat.cli.Host.info({'name': client.hostname}) + hosts = target_sat.cli.HostCollection.hosts( + {'id': hc['id'], 'organization-id': module_org.id} + ) assert len(hosts) == 2 expected_hosts_ids = {host_info['id'], client_host['id']} hosts_ids = {host['id'] for host in hosts} diff --git a/tests/foreman/cli/test_hostgroup.py b/tests/foreman/cli/test_hostgroup.py index e5fee3a7738..febe94038f8 100644 --- a/tests/foreman/cli/test_hostgroup.py +++ b/tests/foreman/cli/test_hostgroup.py @@ -4,43 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: HostGroup :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_integer from nailgun import entities +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_architecture -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_domain -from robottelo.cli.factory import make_environment -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_os -from robottelo.cli.factory import make_partition_table -from robottelo.cli.factory import make_subnet -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.proxy import Proxy from robottelo.config import settings -from robottelo.utils.datafactory import invalid_id_list -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_hostgroups_list +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_id_list, + invalid_values_list, + parametrized, + valid_hostgroups_list, +) pytestmark = [ pytest.mark.skipif( @@ -88,20 +70,20 @@ def puppet_content_source(session_puppet_enabled_sat): @pytest.fixture(scope='module') def content_source(module_target_sat): """Return the proxy.""" - return Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] + return module_target_sat.cli.Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] @pytest.fixture(scope='module') -def hostgroup(content_source, module_org): +def hostgroup(content_source, module_org, module_target_sat): """Create a host group.""" - return make_hostgroup( + return module_target_sat.cli_factory.hostgroup( {'content-source-id': content_source['id'], 'organization-ids': module_org.id} ) @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name): +def test_negative_create_with_name(name, module_target_sat): """Don't create an HostGroup with invalid data. :id: 853a6d43-129a-497b-94f0-08dc622862f8 @@ -111,7 +93,7 @@ def test_negative_create_with_name(name): :expectedresults: HostGroup is not created. """ with pytest.raises(CLIReturnCodeError): - HostGroup.create({'name': name}) + module_target_sat.cli.HostGroup.create({'name': name}) @pytest.mark.e2e @@ -129,34 +111,44 @@ def test_positive_create_with_multiple_entities_and_delete( :BZ: 1395254, 1313056 - :CaseLevel: Integration - :CaseImportance: Critical """ with session_puppet_enabled_sat: # Common entities name = valid_hostgroups_list()[0] - loc = make_location() + loc = session_puppet_enabled_sat.cli_factory.make_location() org_2 = entities.Organization().create() orgs = [module_puppet_org, org_2] - env = make_environment({'location-ids': loc['id'], 'organization-ids': org_2.id}) - lce = make_lifecycle_environment({'organization-id': org_2.id}) + env = session_puppet_enabled_sat.cli_factory.make_environment( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + lce = session_puppet_enabled_sat.cli_factory.make_lifecycle_environment( + {'organization-id': org_2.id} + ) # Content View should be promoted to be used with LC Env - cv = make_content_view({'organization-id': org_2.id}) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) - ContentView.version_promote( + cv = session_puppet_enabled_sat.cli_factory.make_content_view({'organization-id': org_2.id}) + session_puppet_enabled_sat.cli.ContentView.publish({'id': cv['id']}) + cv = session_puppet_enabled_sat.cli.ContentView.info({'id': cv['id']}) + session_puppet_enabled_sat.cli.ContentView.version_promote( {'id': cv['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) # Network - domain = make_domain({'location-ids': loc['id'], 'organization-ids': org_2.id}) - subnet = make_subnet({'domain-ids': domain['id'], 'organization-ids': org_2.id}) + domain = session_puppet_enabled_sat.cli_factory.make_domain( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + subnet = session_puppet_enabled_sat.cli_factory.make_subnet( + {'domain-ids': domain['id'], 'organization-ids': org_2.id} + ) # Operating System - arch = make_architecture() - ptable = make_partition_table({'location-ids': loc['id'], 'organization-ids': org_2.id}) - os = make_os({'architecture-ids': arch['id'], 'partition-table-ids': ptable['id']}) + arch = session_puppet_enabled_sat.cli_factory.make_architecture() + ptable = session_puppet_enabled_sat.cli_factory.make_partition_table( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + os = session_puppet_enabled_sat.cli_factory.make_os( + {'architecture-ids': arch['id'], 'partition-table-ids': ptable['id']} + ) os_full_name = "{} {}.{}".format(os['name'], os['major-version'], os['minor-version']) - media = make_medium( + media = session_puppet_enabled_sat.cli_factory.make_medium( { 'operatingsystem-ids': os['id'], 'location-ids': loc['id'], @@ -184,7 +176,7 @@ def test_positive_create_with_multiple_entities_and_delete( 'puppet-classes': puppet_classes[0]['name'], 'query-organization': org_2.name, } - hostgroup = make_hostgroup(make_hostgroup_params) + hostgroup = session_puppet_enabled_sat.cli_factory.hostgroup(make_hostgroup_params) assert hostgroup['name'] == name assert {org.name for org in orgs} == set(hostgroup['organizations']) assert loc['name'] in hostgroup['locations'] @@ -202,13 +194,13 @@ def test_positive_create_with_multiple_entities_and_delete( assert puppet_content_source['name'] == hostgroup['content-source']['name'] assert puppet_classes[0]['name'] in hostgroup['puppetclasses'] # delete hostgroup - HostGroup.delete({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.delete({'id': hostgroup['id']}) with pytest.raises(CLIReturnCodeError): - HostGroup.info({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.info({'id': hostgroup['id']}) @pytest.mark.tier2 -def test_negative_create_with_content_source(module_org): +def test_negative_create_with_content_source(module_org, module_target_sat): """Attempt to create a hostgroup with invalid content source specified :id: 9fc1b777-36a3-4940-a9c8-aed7ff725371 @@ -216,11 +208,9 @@ def test_negative_create_with_content_source(module_org): :BZ: 1260697 :expectedresults: Hostgroup was not created - - :CaseLevel: Integration """ with pytest.raises(CLIFactoryError): - make_hostgroup( + module_target_sat.cli_factory.hostgroup( { 'content-source-id': gen_integer(10000, 99999), 'organization-ids': module_org.id, @@ -249,11 +239,9 @@ def test_positive_update_hostgroup_with_puppet( :expectedresults: Hostgroup was successfully updated with new content source, name and puppet classes - - :CaseLevel: Integration """ with session_puppet_enabled_sat as puppet_sat: - hostgroup = make_hostgroup( + hostgroup = puppet_sat.cli_factory.hostgroup( { 'content-source-id': puppet_content_source['id'], 'organization-ids': module_puppet_org.id, @@ -266,13 +254,13 @@ def test_positive_update_hostgroup_with_puppet( @request.addfinalizer def _cleanup(): with session_puppet_enabled_sat: - HostGroup.delete({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.delete({'id': hostgroup['id']}) session_puppet_enabled_sat.cli.Proxy.delete({'id': new_content_source['id']}) assert len(hostgroup['puppetclasses']) == 0 new_name = valid_hostgroups_list()[0] puppet_class_names = [puppet['name'] for puppet in puppet_classes] - HostGroup.update( + session_puppet_enabled_sat.cli.HostGroup.update( { 'new-name': new_name, 'id': hostgroup['id'], @@ -280,7 +268,7 @@ def _cleanup(): 'puppet-classes': puppet_class_names, } ) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + hostgroup = session_puppet_enabled_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == new_name assert hostgroup['content-source']['name'] == new_content_source['name'] for puppet_class_name in puppet_class_names: @@ -304,10 +292,8 @@ def test_positive_update_hostgroup( :expectedresults: Hostgroup was successfully updated with new content source and name - - :CaseLevel: Integration """ - hostgroup = make_hostgroup( + hostgroup = module_target_sat.cli_factory.hostgroup( { 'content-source-id': content_source['id'], 'organization-ids': module_org.id, @@ -317,20 +303,20 @@ def test_positive_update_hostgroup( new_content_source = module_target_sat.cli_factory.make_proxy() new_name = valid_hostgroups_list()[0] - HostGroup.update( + module_target_sat.cli.HostGroup.update( { 'new-name': new_name, 'id': hostgroup['id'], 'content-source-id': new_content_source['id'], } ) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + hostgroup = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == new_name assert hostgroup['content-source']['name'] == new_content_source['name'] @pytest.mark.tier2 -def test_negative_update_content_source(hostgroup, content_source): +def test_negative_update_content_source(hostgroup, content_source, module_target_sat): """Attempt to update hostgroup's content source with invalid value :id: 4ffe6d18-3899-4bf1-acb2-d55ea09b7a26 @@ -339,17 +325,17 @@ def test_negative_update_content_source(hostgroup, content_source): :expectedresults: Host group was not updated. Content source remains the same as it was before update - - :CaseLevel: Integration """ with pytest.raises(CLIReturnCodeError): - HostGroup.update({'id': hostgroup['id'], 'content-source-id': gen_integer(10000, 99999)}) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + module_target_sat.cli.HostGroup.update( + {'id': hostgroup['id'], 'content-source-id': gen_integer(10000, 99999)} + ) + hostgroup = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['content-source']['name'] == content_source['name'] @pytest.mark.tier2 -def test_negative_update_name(hostgroup): +def test_negative_update_name(hostgroup, module_target_sat): """Create HostGroup then fail to update its name :id: 42d208a4-f518-4ff2-9b7a-311adb460abd @@ -358,28 +344,26 @@ def test_negative_update_name(hostgroup): """ new_name = invalid_values_list()[0] with pytest.raises(CLIReturnCodeError): - HostGroup.update({'id': hostgroup['id'], 'new-name': new_name}) - result = HostGroup.info({'id': hostgroup['id']}) + module_target_sat.cli.HostGroup.update({'id': hostgroup['id'], 'new-name': new_name}) + result = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == result['name'] @pytest.mark.tier2 -def test_negative_delete_by_id(): +def test_negative_delete_by_id(module_target_sat): """Create HostGroup then delete it by wrong ID :id: 047c9f1a-4dd6-4fdc-b7ed-37cc725c68d3 :expectedresults: HostGroup is not deleted - - :CaseLevel: Integration """ entity_id = invalid_id_list()[0] with pytest.raises(CLIReturnCodeError): - HostGroup.delete({'id': entity_id}) + module_target_sat.cli.HostGroup.delete({'id': entity_id}) @pytest.mark.tier2 -def test_positive_created_nested_hostgroup(module_org): +def test_positive_created_nested_hostgroup(module_org, module_target_sat): """Create a nested host group using multiple parent hostgroup paths. e.g. ` hostgroup create --organization 'org_name' --name new3 --parent-title new_1/new_2` @@ -390,11 +374,12 @@ def test_positive_created_nested_hostgroup(module_org): :customerscenario: true :CaseImportance: Low - """ - parent_hg = make_hostgroup({'organization-ids': module_org.id}) - nested = make_hostgroup({'organization-ids': module_org.id, 'parent': parent_hg['name']}) - sub_nested = make_hostgroup( + parent_hg = module_target_sat.cli_factory.hostgroup({'organization-ids': module_org.id}) + nested = module_target_sat.cli_factory.hostgroup( + {'organization-ids': module_org.id, 'parent': parent_hg['name']} + ) + sub_nested = module_target_sat.cli_factory.hostgroup( {'organization-ids': module_org.id, 'parent-title': f'{parent_hg["name"]}/{nested["name"]}'} ) assert sub_nested['title'] == f"{parent_hg['name']}/{nested['name']}/{sub_nested['name']}" @@ -410,7 +395,7 @@ def test_positive_nested_hostgroup_info(): :customerscenario: true - :Steps: + :steps: 1. Create parent hostgroup and nested hostgroup, with puppet environment, classes, and parameters on each. diff --git a/tests/foreman/cli/test_http_proxy.py b/tests/foreman/cli/test_http_proxy.py index 4f7808e645c..68ea95559ca 100644 --- a/tests/foreman/cli/test_http_proxy.py +++ b/tests/foreman/cli/test_http_proxy.py @@ -2,33 +2,21 @@ :Requirement: HttpProxy -:CaseLevel: Acceptance - -:CaseComponent: Repositories +:CaseComponent: HTTPProxy :team: Phoenix-content -:TestType: Functional - :CaseImportance: High :CaseAutomation: Automated -:Upstream: No """ +from fauxfactory import gen_integer, gen_string, gen_url import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_url - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.http_proxy import HttpProxy -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository + from robottelo.config import settings from robottelo.constants import FAKE_0_YUM_REPO_PACKAGES_COUNT +from robottelo.exceptions import CLIReturnCodeError @pytest.mark.tier1 @@ -106,7 +94,7 @@ def test_insights_client_registration_with_http_proxy(): :customerscenario: true - :Steps: + :steps: 1. Create HTTP Proxy. 2. Set created proxy as "Default HTTP Proxy" in settings. 3. Edit /etc/resolv.conf and comment out all entries so that @@ -123,8 +111,6 @@ def test_insights_client_registration_with_http_proxy(): works with http proxy set. :CaseAutomation: NotAutomated - - :CaseImportance: High """ @@ -137,16 +123,13 @@ def test_positive_set_content_default_http_proxy(block_fake_repo_access, target_ :id: c12868eb-98f1-4763-a168-281ac44d9ff5 - :Steps: + :steps: 1. Create a product with repo. 2. Create an un-authenticated proxy. 3. Set the proxy to be the global default proxy. 4. Sync a repo. :expectedresults: Repo is synced - - :CaseImportance: High - """ org = target_sat.api.Organization().create() proxy_name = gen_string('alpha', 15) @@ -191,7 +174,7 @@ def test_positive_environment_variable_unset_set(): :customerscenario: true - :Steps: + :steps: 1. Export any environment variable from [http_proxy, https_proxy, ssl_cert_file, HTTP_PROXY, HTTPS_PROXY, SSL_CERT_FILE] 2. satellite-installer @@ -199,35 +182,42 @@ def test_positive_environment_variable_unset_set(): :expectedresults: satellite-installer unsets system proxy and SSL environment variables only for the duration of install and sets back those in the end. - :CaseImportance: High - :CaseAutomation: NotAutomated - """ @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_assign_http_proxy_to_products(module_org): +def test_positive_assign_http_proxy_to_products(module_org, module_target_sat): """Assign http_proxy to Products and perform product sync. :id: 6af7b2b8-15d5-4d9f-9f87-e76b404a966f - :expectedresults: HTTP Proxy is assigned to all repos present - in Products and sync operation performed successfully. + :steps: + 1. Create two HTTP proxies. + 2. Create two products and two repos in each product with various HTTP proxy policies. + 3. Set the HTTP proxy through bulk action for both products to the selected proxy. + 4. Bulk sync both products and verify packages counts. + 5. Set the HTTP proxy through bulk action for both products to None. + + :expectedresults: + 1. HTTP Proxy is assigned to all repos present in Products + and sync operation uses assigned http-proxy and pass. - :CaseImportance: High + :expectedresults: + 1. HTTP Proxy is assigned to all repos present in Products + and sync operation performed successfully. """ - # create HTTP proxies - http_proxy_a = HttpProxy.create( + # Create two HTTP proxies + http_proxy_a = module_target_sat.cli.HttpProxy.create( { 'name': gen_string('alpha', 15), 'url': settings.http_proxy.un_auth_proxy_url, 'organization-id': module_org.id, }, ) - http_proxy_b = HttpProxy.create( + http_proxy_b = module_target_sat.cli.HttpProxy.create( { 'name': gen_string('alpha', 15), 'url': settings.http_proxy.auth_proxy_url, @@ -236,10 +226,11 @@ def test_positive_assign_http_proxy_to_products(module_org): 'organization-id': module_org.id, }, ) - # Create products and repositories - product_a = make_product({'organization-id': module_org.id}) - product_b = make_product({'organization-id': module_org.id}) - repo_a1 = make_repository( + + # Create two products and two repos in each product with various HTTP proxy policies + product_a = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + product_b = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo_a1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_a['id'], @@ -247,7 +238,7 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-policy': 'none', }, ) - repo_a2 = make_repository( + repo_a2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_a['id'], @@ -256,7 +247,7 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-id': http_proxy_a['id'], }, ) - repo_b1 = make_repository( + repo_b1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_b['id'], @@ -264,34 +255,58 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-policy': 'none', }, ) - repo_b2 = make_repository( + repo_b2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_b['id'], 'url': settings.repos.yum_0.url, }, ) - # Add http_proxy to products - Product.update_proxy( + + # Set the HTTP proxy through bulk action for both products to the selected proxy + res = module_target_sat.cli.Product.update_proxy( { 'ids': f"{product_a['id']},{product_b['id']}", 'http-proxy-policy': 'use_selected_http_proxy', 'http-proxy-id': http_proxy_b['id'], } ) + assert 'Product proxy updated' in res + module_target_sat.wait_for_tasks( + search_query=( + f'Actions::Katello::Repository::Update and organization_id = {module_org.id}' + ), + max_tries=5, + poll_rate=10, + ) for repo in repo_a1, repo_a2, repo_b1, repo_b2: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['http-proxy']['http-proxy-policy'] == 'use_selected_http_proxy' assert result['http-proxy']['id'] == http_proxy_b['id'] - # Perform sync and verify packages count - Product.synchronize({'id': product_a['id'], 'organization-id': module_org.id}) - Product.synchronize({'id': product_b['id'], 'organization-id': module_org.id}) - Product.update_proxy( - {'ids': f"{product_a['id']},{product_b['id']}", 'http-proxy-policy': 'none'} + # Bulk sync both products and verify packages counts + module_target_sat.cli.Product.synchronize( + {'id': product_a['id'], 'organization-id': module_org.id} + ) + module_target_sat.cli.Product.synchronize( + {'id': product_b['id'], 'organization-id': module_org.id} ) + for repo in repo_a1, repo_a2, repo_b1, repo_b2: + info = module_target_sat.cli.Repository.info({'id': repo['id']}) + assert int(info['content-counts']['packages']) == FAKE_0_YUM_REPO_PACKAGES_COUNT + # Set the HTTP proxy through bulk action for both products to None + res = module_target_sat.cli.Product.update_proxy( + {'ids': f"{product_a['id']},{product_b['id']}", 'http-proxy-policy': 'none'} + ) + assert 'Product proxy updated' in res + module_target_sat.wait_for_tasks( + search_query=( + f'Actions::Katello::Repository::Update and organization_id = {module_org.id}' + ), + max_tries=5, + poll_rate=10, + ) for repo in repo_a1, repo_a2, repo_b1, repo_b2: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['http-proxy']['http-proxy-policy'] == 'none' - assert int(result['content-counts']['packages']) == FAKE_0_YUM_REPO_PACKAGES_COUNT diff --git a/tests/foreman/cli/test_installer.py b/tests/foreman/cli/test_installer.py index 67d0bf7ad9c..cf0eb838ee0 100644 --- a/tests/foreman/cli/test_installer.py +++ b/tests/foreman/cli/test_installer.py @@ -1,20 +1,15 @@ """Tests For Disconnected Satellite Installation -:Requirement: Installer (disconnected satellite installation) +:Requirement: Installation (disconnected satellite installation) :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Installer +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -36,15 +31,3 @@ def test_positive_server_installer_from_iso(): :CaseAutomation: NotAutomated """ - - -def test_positive_disconnected_util_installer(): - """Can install satellite disconnected utility successfully - via RPM - - :id: b738cf2a-9c5f-4865-b134-102a4688534c - - :expectedresults: Install of disconnected utility successful. - - :CaseAutomation: NotAutomated - """ diff --git a/tests/foreman/cli/test_jobtemplate.py b/tests/foreman/cli/test_jobtemplate.py index a90aa0fb6ad..66553d2c54d 100644 --- a/tests/foreman/cli/test_jobtemplate.py +++ b/tests/foreman/cli/test_jobtemplate.py @@ -4,28 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo import ssh -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_job_template -from robottelo.cli.job_template import JobTemplate -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import invalid_values_list, parametrized TEMPLATE_FILE = 'template_file.txt' TEMPLATE_FILE_EMPTY = 'template_file_empty.txt' @@ -38,7 +29,7 @@ def module_template(): @pytest.mark.tier1 -def test_positive_create_job_template(module_org): +def test_positive_create_job_template(module_org, module_target_sat): """Create a simple Job Template :id: a5a67b10-61b0-4362-b671-9d9f095c452c @@ -48,19 +39,19 @@ def test_positive_create_job_template(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - assert JobTemplate.info({'name': template_name}) is not None + assert module_target_sat.cli.JobTemplate.info({'name': template_name}) is not None @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_job_template_with_invalid_name(module_org, name): +def test_negative_create_job_template_with_invalid_name(module_org, name, module_target_sat): """Create Job Template with invalid name :id: eb51afd4-e7b3-42c3-81c3-6e18ef3d7efe @@ -73,7 +64,7 @@ def test_negative_create_job_template_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': name, @@ -83,7 +74,7 @@ def test_negative_create_job_template_with_invalid_name(module_org, name): @pytest.mark.tier1 -def test_negative_create_job_template_with_same_name(module_org): +def test_negative_create_job_template_with_same_name(module_org, module_target_sat): """Create Job Template with duplicate name :id: 66100c82-97f5-4300-a0c9-8cf041f7789f @@ -93,7 +84,7 @@ def test_negative_create_job_template_with_same_name(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, @@ -101,7 +92,7 @@ def test_negative_create_job_template_with_same_name(module_org): } ) with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, @@ -111,7 +102,7 @@ def test_negative_create_job_template_with_same_name(module_org): @pytest.mark.tier1 -def test_negative_create_empty_job_template(module_org): +def test_negative_create_empty_job_template(module_org, module_target_sat): """Create Job Template with empty template file :id: 749be863-94ae-4008-a242-c23f353ca404 @@ -122,7 +113,7 @@ def test_negative_create_empty_job_template(module_org): """ template_name = gen_string('alpha', 7) with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, @@ -133,7 +124,7 @@ def test_negative_create_empty_job_template(module_org): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_job_template(module_org): +def test_positive_delete_job_template(module_org, module_target_sat): """Delete a job template :id: 33104c04-20e9-47aa-99da-4bf3414ea31a @@ -143,34 +134,33 @@ def test_positive_delete_job_template(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - JobTemplate.delete({'name': template_name}) + module_target_sat.cli.JobTemplate.delete({'name': template_name}) with pytest.raises(CLIReturnCodeError): - JobTemplate.info({'name': template_name}) + module_target_sat.cli.JobTemplate.info({'name': template_name}) @pytest.mark.tier2 -def test_positive_view_dump(module_org): +def test_positive_view_dump(module_org, module_target_sat): """Export contents of a job template :id: 25fcfcaa-fc4c-425e-919e-330e36195c4a :expectedresults: Verify no errors are thrown - """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - dumped_content = JobTemplate.dump({'name': template_name}) + dumped_content = module_target_sat.cli.JobTemplate.dump({'name': template_name}) assert len(dumped_content) > 0 diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index e9f299ab213..ce7b625c3a6 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -4,38 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.auth import Auth -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_ldap_auth_source -from robottelo.cli.factory import make_usergroup -from robottelo.cli.factory import make_usergroup_external -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.cli.role import Role -from robottelo.cli.usergroup import UserGroup -from robottelo.cli.usergroup import UserGroupExternal -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import parametrized - - -@pytest.fixture() +from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import generate_strings_list, parametrized + + +@pytest.fixture def ldap_tear_down(): """Teardown the all ldap settings user, usergroup and ldap delete""" yield @@ -58,7 +43,7 @@ class TestADAuthSource: @pytest.mark.upgrade @pytest.mark.parametrize('server_name', **parametrized(generate_strings_list())) @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_create_with_ad(self, ad_data, server_name): + def test_positive_create_with_ad(self, ad_data, server_name, module_target_sat): """Create/update/delete LDAP authentication with AD using names of different types :id: 093f6abc-91e7-4449-b484-71e4a14ac808 @@ -70,7 +55,7 @@ def test_positive_create_with_ad(self, ad_data, server_name): :CaseImportance: Critical """ ad_data = ad_data() - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -90,17 +75,17 @@ def test_positive_create_with_ad(self, ad_data, server_name): assert auth['server']['server'] == ad_data['ldap_hostname'] assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['ad'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) @pytest.mark.tier1 @pytest.mark.parametrize('member_group', ['foobargroup', 'foobar.group']) @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): + def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data, module_target_sat): """Verify the usergroup-sync functionality in AD Auth Source :id: 2e913e76-49c3-11eb-b4c6-d46d6dd3b5b2 @@ -118,7 +103,7 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): """ ad_data = ad_data() LOGEDIN_MSG = "Using configured credentials for user '{0}'." - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.ldap_auth_source( { 'name': gen_string('alpha'), 'onthefly-register': 'true', @@ -128,29 +113,33 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): 'attr-firstname': LDAP_ATTR['firstname'], 'attr-lastname': LDAP_ATTR['surname'], 'attr-mail': LDAP_ATTR['mail'], - 'account': ad_data['ldap_user_name'], + 'account': fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", 'account-password': ad_data['ldap_user_passwd'], 'base-dn': ad_data['base_dn'], } ) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.usergroup() + module_target_sat.cli_factory.usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) - user_group = UserGroup.info({'id': user_group['id']}) - result = Auth.with_user( + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) + result = module_target_sat.cli.Auth.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).status() assert LOGEDIN_MSG.format(ad_data['ldap_user_name']) in result[0]['message'] - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - user_group = UserGroup.info({'id': user_group['id']}) - list = Role.with_user( + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) + list = module_target_sat.cli.Role.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).list() assert len(list) > 1 @@ -177,7 +166,7 @@ def _clean_up_previous_ldap(self): @pytest.mark.upgrade @pytest.mark.e2e @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): + def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name, module_target_sat): """CRUD LDAP authentication with FreeIPA :id: 6cb54405-b579-4020-bf99-cb811a6aa28b @@ -189,7 +178,7 @@ def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): :CaseImportance: High """ - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -209,16 +198,16 @@ def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): assert auth['server']['server'] == default_ipa_host.hostname assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['ipa'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) @pytest.mark.tier3 @pytest.mark.usefixtures("ldap_tear_down") - def test_usergroup_sync_with_refresh(self, default_ipa_host): + def test_usergroup_sync_with_refresh(self, default_ipa_host, module_target_sat): """Verify the refresh functionality in Ldap Auth Source :id: c905eb80-2bd0-11ea-abc3-ddb7dbb3c930 @@ -234,7 +223,7 @@ def test_usergroup_sync_with_refresh(self, default_ipa_host): member_group = 'foreman_group' LOGEDIN_MSG = "Using configured credentials for user '{0}'." auth_source_name = gen_string('alpha') - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.ldap_auth_source( { 'name': auth_source_name, 'onthefly-register': 'true', @@ -251,55 +240,61 @@ def test_usergroup_sync_with_refresh(self, default_ipa_host): 'groups-base': ipa_group_base_dn, } ) - auth_source = LDAPAuthSource.info({'id': auth_source['server']['id']}) + auth_source = module_target_sat.cli.LDAPAuthSource.info({'id': auth_source['server']['id']}) # Adding User in IPA UserGroup default_ipa_host.add_user_to_usergroup(member_username, member_group) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - ext_user_group = make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.usergroup() + ext_user_group = module_target_sat.cli_factory.usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) assert ext_user_group['auth-source'] == auth_source['server']['name'] - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 - result = Auth.with_user( + result = module_target_sat.cli.Auth.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).status() assert LOGEDIN_MSG.format(member_username) in result[0]['message'] with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - list = Role.with_user( + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + list = module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert len(list) > 1 - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 1 assert user_group['users'][0] == member_username # Removing User in IPA UserGroup default_ipa_host.remove_user_from_usergroup(member_username, member_group) - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - user_group = UserGroup.info({'id': user_group['id']}) + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message @pytest.mark.tier3 @pytest.mark.usefixtures("ldap_tear_down") - def test_usergroup_with_usergroup_sync(self, default_ipa_host): + def test_usergroup_with_usergroup_sync(self, default_ipa_host, module_target_sat): """Verify the usergroup-sync functionality in Ldap Auth Source :id: 2b63e886-2c53-11ea-9da5-db3ae0527554 @@ -315,7 +310,7 @@ def test_usergroup_with_usergroup_sync(self, default_ipa_host): member_group = 'foreman_group' LOGEDIN_MSG = "Using configured credentials for user '{0}'." auth_source_name = gen_string('alpha') - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.ldap_auth_source( { 'name': auth_source_name, 'onthefly-register': 'true', @@ -332,43 +327,45 @@ def test_usergroup_with_usergroup_sync(self, default_ipa_host): 'groups-base': ipa_group_base_dn, } ) - auth_source = LDAPAuthSource.info({'id': auth_source['server']['id']}) + auth_source = module_target_sat.cli.LDAPAuthSource.info({'id': auth_source['server']['id']}) # Adding User in IPA UserGroup default_ipa_host.add_user_to_usergroup(member_username, member_group) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - ext_user_group = make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.usergroup() + ext_user_group = module_target_sat.cli_factory.usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) assert ext_user_group['auth-source'] == auth_source['server']['name'] - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 - result = Auth.with_user( + result = module_target_sat.cli.Auth.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).status() assert LOGEDIN_MSG.format(member_username) in result[0]['message'] - list = Role.with_user( + list = module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert len(list) > 1 - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 1 assert user_group['users'][0] == member_username # Removing User in IPA UserGroup default_ipa_host.remove_user_from_usergroup(member_username, member_group) with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 @@ -380,7 +377,9 @@ class TestOpenLdapAuthSource: @pytest.mark.e2e @pytest.mark.parametrize('server_name', **parametrized(generate_strings_list())) @pytest.mark.upgrade - def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): + def test_positive_end_to_end_with_open_ldap( + self, open_ldap_data, server_name, module_target_sat + ): """CRUD LDAP Operations with OpenLDAP :id: f84db334-0189-11eb-846c-d46d6dd3b5b2 @@ -391,7 +390,7 @@ def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): :CaseImportance: High """ - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -410,9 +409,9 @@ def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): assert auth['server']['server'] == open_ldap_data['ldap_hostname'] assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['posix'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) diff --git a/tests/foreman/cli/test_leapp_client.py b/tests/foreman/cli/test_leapp_client.py new file mode 100644 index 00000000000..4ea36bbf37a --- /dev/null +++ b/tests/foreman/cli/test_leapp_client.py @@ -0,0 +1,287 @@ +"""Tests for leapp upgrade of content hosts with Satellite + +:Requirement: leapp + +:CaseComponent: Leappintegration + +:Team: Rocket + +:CaseImportance: High + +:CaseAutomation: Automated + +""" +from broker import Broker +import pytest + +from robottelo.config import settings +from robottelo.constants import PRDS +from robottelo.hosts import ContentHost +from robottelo.logging import logger + +synced_repos = pytest.StashKey[dict] + +RHEL7_VER = '7.9' +RHEL8_VER = '8.9' +RHEL9_VER = '9.3' + +RHEL_REPOS = { + 'rhel7_server': { + 'id': 'rhel-7-server-rpms', + 'name': f'Red Hat Enterprise Linux 7 Server RPMs x86_64 {RHEL7_VER}', + 'releasever': RHEL7_VER, + 'reposet': 'Red Hat Enterprise Linux 7 Server (RPMs)', + 'product': 'Red Hat Enterprise Linux Server', + }, + 'rhel7_server_extras': { + 'id': 'rhel-7-server-extras-rpms', + 'name': 'Red Hat Enterprise Linux 7 Server - Extras RPMs x86_64', + 'releasever': '7', + 'reposet': 'Red Hat Enterprise Linux 7 Server - Extras (RPMs)', + 'product': 'Red Hat Enterprise Linux Server', + }, + 'rhel8_bos': { + 'id': 'rhel-8-for-x86_64-baseos-rpms', + 'name': f'Red Hat Enterprise Linux 8 for x86_64 - BaseOS RPMs {RHEL8_VER}', + 'releasever': RHEL8_VER, + 'reposet': 'Red Hat Enterprise Linux 8 for x86_64 - BaseOS (RPMs)', + }, + 'rhel8_aps': { + 'id': 'rhel-8-for-x86_64-appstream-rpms', + 'name': f'Red Hat Enterprise Linux 8 for x86_64 - AppStream RPMs {RHEL8_VER}', + 'releasever': RHEL8_VER, + 'reposet': 'Red Hat Enterprise Linux 8 for x86_64 - AppStream (RPMs)', + }, + 'rhel9_bos': { + 'id': 'rhel-9-for-x86_64-baseos-rpms', + 'name': f'Red Hat Enterprise Linux 9 for x86_64 - BaseOS RPMs {RHEL9_VER}', + 'releasever': RHEL9_VER, + 'reposet': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS (RPMs)', + }, + 'rhel9_aps': { + 'id': 'rhel-9-for-x86_64-appstream-rpms', + 'name': f'Red Hat Enterprise Linux 9 for x86_64 - AppStream RPMs {RHEL9_VER}', + 'releasever': RHEL9_VER, + 'reposet': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream (RPMs)', + }, +} + + +@pytest.fixture(scope='module') +def module_stash(request): + """Module scoped stash for storing data between tests""" + # Please refer the documentation for more details on stash + # https://docs.pytest.org/en/latest/reference/reference.html#stash + request.node.stash[synced_repos] = {} + return request.node.stash + + +@pytest.fixture(scope='module') +def module_leapp_lce(module_target_sat, module_sca_manifest_org): + return module_target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() + + +@pytest.fixture +def function_leapp_cv(module_target_sat, module_sca_manifest_org, leapp_repos, module_leapp_lce): + function_leapp_cv = module_target_sat.api.ContentView( + organization=module_sca_manifest_org + ).create() + function_leapp_cv.repository = leapp_repos + function_leapp_cv = function_leapp_cv.update(['repository']) + function_leapp_cv.publish() + cvv = function_leapp_cv.read().version[0] + cvv.promote(data={'environment_ids': module_leapp_lce.id, 'force': True}) + return function_leapp_cv.read() + + +@pytest.fixture +def function_leapp_ak( + module_target_sat, + function_leapp_cv, + module_leapp_lce, + module_sca_manifest_org, +): + return module_target_sat.api.ActivationKey( + content_view=function_leapp_cv, + environment=module_leapp_lce, + organization=module_sca_manifest_org, + ).create() + + +@pytest.fixture +def leapp_repos( + default_architecture, + module_stash, + upgrade_path, + module_target_sat, + module_sca_manifest_org, +): + """Enable and sync RHEL BaseOS, AppStream repositories""" + source = upgrade_path['source_version'] + target = upgrade_path['target_version'] + all_repos = [] + for rh_repo_key in RHEL_REPOS: + release_version = RHEL_REPOS[rh_repo_key]['releasever'] + if release_version in str(source) or release_version in target: + prod = 'rhel' if 'rhel7' in rh_repo_key else rh_repo_key.split('_')[0] + if module_stash[synced_repos].get(rh_repo_key, None): + logger.info('Repo %s already synced, not syncing it', rh_repo_key) + else: + module_stash[synced_repos][rh_repo_key] = True + repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=default_architecture.name, + org_id=module_sca_manifest_org.id, + product=PRDS[prod], + repo=RHEL_REPOS[rh_repo_key]['name'], + reposet=RHEL_REPOS[rh_repo_key]['reposet'], + releasever=release_version, + ) + rh_repo = module_target_sat.api.Repository(id=repo_id).read() + all_repos.append(rh_repo) + rh_repo.sync(timeout=1800) + return all_repos + + +@pytest.fixture +def verify_target_repo_on_satellite( + module_target_sat, + function_leapp_cv, + module_sca_manifest_org, + module_leapp_lce, + upgrade_path, +): + """Verify target rhel version repositories have been added in correct CV, LCE on Satellite""" + target_rhel_major_ver = upgrade_path['target_version'].split('.')[0] + cmd_out = module_target_sat.cli.Repository.list( + { + 'search': f'content_label ~ rhel-{target_rhel_major_ver}', + 'content-view-id': function_leapp_cv.id, + 'organization-id': module_sca_manifest_org.id, + 'lifecycle-environment-id': module_leapp_lce.id, + } + ) + repo_names = [out['name'] for out in cmd_out] + if target_rhel_major_ver == '9': + assert RHEL_REPOS['rhel9_bos']['name'] in repo_names + assert RHEL_REPOS['rhel9_aps']['name'] in repo_names + else: + assert RHEL_REPOS['rhel8_bos']['name'] in repo_names + assert RHEL_REPOS['rhel8_aps']['name'] in repo_names + + +@pytest.fixture +def custom_leapp_host(upgrade_path, module_target_sat, module_sca_manifest_org, function_leapp_ak): + """Checkout content host and register with satellite""" + deploy_args = {} + deploy_args['deploy_rhel_version'] = upgrade_path['source_version'] + with Broker( + workflow='deploy-rhel', + host_class=ContentHost, + deploy_rhel_version=upgrade_path['source_version'], + deploy_flavor=settings.flavors.default, + ) as chost: + result = chost.register( + module_sca_manifest_org, None, function_leapp_ak.name, module_target_sat + ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + yield chost + + +@pytest.fixture +def precondition_check_upgrade_and_install_leapp_tool(custom_leapp_host): + """Clean-up directory if in-place upgrade already performed, + set rhel release version, update system and install leapp-upgrade""" + source_rhel = custom_leapp_host.os_version.base_version + custom_leapp_host.run('rm -rf /root/tmp_leapp_py3') + custom_leapp_host.run('yum repolist') + custom_leapp_host.run(f'subscription-manager release --set {source_rhel}') + assert custom_leapp_host.run('yum update -y').status == 0 + assert custom_leapp_host.run('yum install leapp-upgrade -y').status == 0 + if custom_leapp_host.run('needs-restarting -r').status == 1: + custom_leapp_host.power_control(state='reboot', ensure=True) + + +@pytest.mark.parametrize( + 'upgrade_path', + [ + # {'source_version': RHEL7_VER, 'target_version': RHEL8_VER}, + {'source_version': RHEL8_VER, 'target_version': RHEL9_VER}, + ], + ids=lambda upgrade_path: f'{upgrade_path["source_version"]}' + f'_to_{upgrade_path["target_version"]}', +) +def test_leapp_upgrade_rhel( + module_target_sat, + custom_leapp_host, + upgrade_path, + verify_target_repo_on_satellite, + precondition_check_upgrade_and_install_leapp_tool, +): + """Test to upgrade RHEL host to next major RHEL release using leapp preupgrade and leapp upgrade + job templates + + :id: 8eccc689-3bea-4182-84f3-c121e95d54c3 + + :steps: + 1. Import a subscription manifest and enable, sync source & target repositories + 2. Create LCE, Create CV, add repositories to it, publish and promote CV, Create AK, etc. + 3. Register content host with AK + 4. Verify that target rhel repositories are enabled on Satellite + 5. Update all packages, install leapp tool and fix inhibitors + 6. Run Leapp Preupgrade and Leapp Upgrade job template + + :expectedresults: + 1. Update RHEL OS major version to another major version + """ + # Fixing known inhibitors for source rhel version 8 + if custom_leapp_host.os_version.major == 8: + # Inhibitor - Firewalld Configuration AllowZoneDrifting Is Unsupported + custom_leapp_host.run( + 'sed -i "s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/" /etc/firewalld/firewalld.conf' + ) + # Run LEAPP-PREUPGRADE Job Template- + template_id = ( + module_target_sat.api.JobTemplate() + .search(query={'search': 'name="Run preupgrade via Leapp"'})[0] + .id + ) + job = module_target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {custom_leapp_host.hostname}', + }, + ) + module_target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1800 + ) + result = module_target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + + # Run LEAPP-UPGRADE Job Template- + template_id = ( + module_target_sat.api.JobTemplate() + .search(query={'search': 'name="Run upgrade via Leapp"'})[0] + .id + ) + job = module_target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {custom_leapp_host.hostname}', + 'inputs': {'Reboot': 'true'}, + }, + ) + module_target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1800 + ) + result = module_target_sat.api.JobInvocation(id=job['id']).read() + assert result.succeeded == 1 + # Wait for the host to be rebooted and SSH daemon to be started. + custom_leapp_host.wait_for_connection() + + custom_leapp_host.clean_cached_properties() + new_ver = str(custom_leapp_host.os_version) + assert new_ver == upgrade_path['target_version'] diff --git a/tests/foreman/cli/test_lifecycleenvironment.py b/tests/foreman/cli/test_lifecycleenvironment.py index fc05395c79c..75bf12c681b 100644 --- a/tests/foreman/cli/test_lifecycleenvironment.py +++ b/tests/foreman/cli/test_lifecycleenvironment.py @@ -4,33 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: LifecycleEnvironments :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from math import ceil -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_org -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment from robottelo.constants import ENVIRONMENT +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='class') -def module_lce(module_org): - return make_lifecycle_environment( +def module_lce(module_org, class_target_sat): + return class_target_sat.cli_factory.make_lifecycle_environment( { 'name': module_org.name, 'organization-id': module_org.id, @@ -41,7 +33,7 @@ def module_lce(module_org): # Issues validation @pytest.mark.tier2 -def test_positive_list_subcommand(module_org): +def test_positive_list_subcommand(module_org, module_target_sat): """List subcommand returns standard output :id: cca249d0-fb77-422b-aae3-3361887269db @@ -56,12 +48,14 @@ def test_positive_list_subcommand(module_org): # List available lifecycle environments using default Table # output cmd = 'lifecycle-environment list --organization-id="%s"' - result = LifecycleEnvironment.execute(cmd % module_org.id, None, None, False) + result = module_target_sat.cli.LifecycleEnvironment.execute( + cmd % module_org.id, None, None, False + ) assert len(result) > 0 @pytest.mark.tier2 -def test_positive_search_lce_via_UTF8(module_org): +def test_positive_search_lce_via_UTF8(module_org, module_target_sat): """Search lifecycle environment via its name containing UTF-8 chars @@ -75,15 +69,18 @@ def test_positive_search_lce_via_UTF8(module_org): """ test_data = {'name': gen_string('utf8', 15), 'organization-id': module_org.id} # Can we find the new object - result = LifecycleEnvironment.info( - {'name': make_lifecycle_environment(test_data)['name'], 'organization-id': module_org.id} + result = module_target_sat.cli.LifecycleEnvironment.info( + { + 'name': module_target_sat.cli_factory.make_lifecycle_environment(test_data)['name'], + 'organization-id': module_org.id, + } ) assert result['name'] == test_data['name'] # CRUD @pytest.mark.tier1 -def test_positive_lce_crud(module_org): +def test_positive_lce_crud(module_org, module_target_sat): """CRUD test case for lifecycle environment for name, description, label, registry name pattern, and unauthenticated pull @@ -104,7 +101,7 @@ def test_positive_lce_crud(module_org): ).format(gen_string('alpha', 5)) # create - lce = make_lifecycle_environment( + lce = module_target_sat.cli_factory.make_lifecycle_environment( { 'organization': org_name, 'organization-id': module_org.id, @@ -121,7 +118,7 @@ def test_positive_lce_crud(module_org): assert lce['organization'] == org_name # update - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'id': lce['id'], 'new-name': new_name, @@ -130,19 +127,23 @@ def test_positive_lce_crud(module_org): 'registry-name-pattern': registry_name_pattern, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['name'] == new_name assert lce['registry-name-pattern'] == registry_name_pattern assert lce['unauthenticated-pull'] == 'true' # delete - LifecycleEnvironment.delete({'id': lce['id']}) + module_target_sat.cli.LifecycleEnvironment.delete({'id': lce['id']}) with pytest.raises(CLIReturnCodeError): - LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) @pytest.mark.tier1 -def test_positive_create_with_organization_label(module_org): +def test_positive_create_with_organization_label(module_org, module_target_sat): """Create lifecycle environment, specifying organization label :id: eb5cfc71-c83d-45ca-ba34-9ef79197691d @@ -153,14 +154,14 @@ def test_positive_create_with_organization_label(module_org): :CaseImportance: Critical """ - new_lce = make_lifecycle_environment( - {'name': gen_string('alpha'), 'organization-label': module_org.label} + new_lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'name': gen_string('alpha'), 'organization-id': module_org.id} ) assert new_lce['organization'] == module_org.label @pytest.mark.tier1 -def test_positve_list_paths(module_org): +def test_positve_list_paths(module_org, module_target_sat): """List the environment paths under a given organization :id: 71600d6b-1ef4-4b88-8e9b-eb2481ee1fe2 @@ -170,9 +171,11 @@ def test_positve_list_paths(module_org): :CaseImportance: Critical """ - lc_env = make_lifecycle_environment({'organization-id': module_org.id}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Add paths to lifecycle environments - result = LifecycleEnvironment.paths( + result = module_target_sat.cli.LifecycleEnvironment.paths( {'organization-id': module_org.id, 'permission-type': 'readable'} ) assert f"Library >> {lc_env['name']}" in result @@ -181,27 +184,27 @@ def test_positve_list_paths(module_org): class LifeCycleEnvironmentPaginationTestCase: """Test class for LifeCycle Environment pagination tests""" - @classmethod - def setUpClass(cls): + @pytest.fixture(scope='class', autouse=True) + def class_setup(self, target_sat): """Create organization and lifecycle environments to reuse in tests""" super().setUpClass() - cls.lces_count = 25 - cls.org = make_org() + self.lces_count = 25 + self.org = target_sat.cli_factory.make_org() env_base_name = gen_string('alpha') last_env_name = ENVIRONMENT - cls.env_names = [last_env_name] - for env_index in range(cls.lces_count): + self.env_names = [last_env_name] + for env_index in range(self.lces_count): env_name = f'{env_base_name}-{env_index}' - make_lifecycle_environment( - {'name': env_name, 'organization-id': cls.org['id'], 'prior': last_env_name} + target_sat.cli_factory.make_lifecycle_environment( + {'name': env_name, 'organization-id': self.org['id'], 'prior': last_env_name} ) last_env_name = env_name - cls.env_names.append(env_name) + self.env_names.append(env_name) - cls.lces_count += 1 # include default 'Library' lce + self.lces_count += 1 # include default 'Library' lce @pytest.mark.tier2 - def test_positive_list_all_with_per_page(self): + def test_positive_list_all_with_per_page(self, target_sat): """Attempt to list more than 20 lifecycle environment with per-page option. @@ -214,7 +217,7 @@ def test_positive_list_all_with_per_page(self): """ per_page_count = self.lces_count + 5 - lifecycle_environments = LifecycleEnvironment.list( + lifecycle_environments = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'per-page': per_page_count} ) @@ -223,7 +226,7 @@ def test_positive_list_all_with_per_page(self): assert env_name_set == set(self.env_names) @pytest.mark.tier2 - def test_positive_list_with_pagination(self): + def test_positive_list_with_pagination(self, target_sat): """Make sure lces list can be displayed with different items per page value @@ -241,14 +244,14 @@ def test_positive_list_with_pagination(self): # Verify the first page contains exactly the same items count # as `per-page` value with self.subTest(per_page): - lces = LifecycleEnvironment.list( + lces = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'per-page': per_page} ) assert len(lces) == per_page # Verify pagination and total amount of pages by checking the # items count on the last page last_page = ceil(self.lces_count / per_page) - lces = LifecycleEnvironment.list( + lces = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'page': last_page, 'per-page': per_page} ) assert len(lces) == self.lces_count % per_page or per_page diff --git a/tests/foreman/cli/test_location.py b/tests/foreman/cli/test_location.py index 6f358baa166..9f835c1c581 100644 --- a/tests/foreman/cli/test_location.py +++ b/tests/foreman/cli/test_location.py @@ -4,42 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: OrganizationsandLocations :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.domain import Domain -from robottelo.cli.environment import Environment -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.factory import make_domain -from robottelo.cli.factory import make_environment -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_subnet -from robottelo.cli.factory import make_template -from robottelo.cli.factory import make_user -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.location import Location -from robottelo.cli.medium import Medium -from robottelo.cli.proxy import Proxy -from robottelo.cli.subnet import Subnet -from robottelo.cli.template import Template -from robottelo.cli.user import User +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError def _proxy(request, target_sat): @@ -48,107 +23,112 @@ def _proxy(request, target_sat): @request.addfinalizer def _cleanup(): - if Proxy.exists(search=('name', proxy['name'])): - Proxy.delete(options={'id': proxy['id']}) + if target_sat.cli.Proxy.exists(search=('name', proxy['name'])): + target_sat.cli.Proxy.delete(options={'id': proxy['id']}) return proxy -def _location(request, options=None): - location = make_location(options=options) +def _location(request, target_sat, options=None): + location = target_sat.cli_factory.make_location(options) @request.addfinalizer def _cleanup(): - if Location.exists(search=('id', location['id'])): - Location.delete(options={'id': location['id']}) + if target_sat.cli.Location.exists(search=('id', location['id'])): + target_sat.cli.Location.delete(options={'id': location['id']}) return location -def _subnet(request): - subnet = make_subnet() +def _subnet(request, target_sat): + subnet = target_sat.cli_factory.make_subnet() @request.addfinalizer def _cleanup(): - if Subnet.exists(search=('name', subnet['name'])): - Subnet.delete(options={'id': subnet['id']}) + if target_sat.cli.Subnet.exists(search=('name', subnet['name'])): + target_sat.cli.Subnet.delete(options={'id': subnet['id']}) return subnet -def _environment(request): - environment = make_environment() +def _environment(request, target_sat): + environment = target_sat.cli_factory.make_environment() @request.addfinalizer def _cleanup(): - if Environment.exists(search=('name', environment['name'])): - Environment.delete(options={'id': environment['id']}) + if target_sat.cli.Environment.exists(search=('name', environment['name'])): + target_sat.cli.Environment.delete(options={'id': environment['id']}) return environment -def _domain(request): - domain = make_domain() +def _domain(request, target_sat): + domain = target_sat.cli_factory.make_domain() @request.addfinalizer def _cleanup(): - if Domain.exists(search=('name', domain['name'])): - Domain.delete(options={'id': domain['id']}) + if target_sat.cli.Domain.exists(search=('name', domain['name'])): + target_sat.cli.Domain.delete(options={'id': domain['id']}) return domain -def _medium(request): - medium = make_medium() +def _medium(request, target_sat): + medium = target_sat.cli_factory.make_medium() @request.addfinalizer def _cleanup(): - if Medium.exists(search=('name', medium['name'])): - Medium.delete(options={'id': medium['id']}) + if target_sat.cli.Medium.exists(search=('name', medium['name'])): + target_sat.cli.Medium.delete(options={'id': medium['id']}) return medium -def _host_group(request): - host_group = make_hostgroup() +def _host_group(request, target_sat): + host_group = target_sat.cli_factory.hostgroup() @request.addfinalizer def _cleanup(): - if HostGroup.exists(search=('id', host_group['id'])): - HostGroup.delete(options={'id': host_group['id']}) + if target_sat.cli.HostGroup.exists(search=('id', host_group['id'])): + target_sat.cli.HostGroup.delete(options={'id': host_group['id']}) return host_group -def _compute_resource(request): - compute_resource = make_compute_resource() +def _compute_resource(request, target_sat): + compute_resource = target_sat.cli_factory.compute_resource( + { + 'provider': 'Libvirt', + 'url': 'qemu+tcp://localhost:16509/system', + } + ) @request.addfinalizer def _cleanup(): - if ComputeResource.exists(search=('id', compute_resource['id'])): - ComputeResource.delete(options={'id': compute_resource['id']}) + if target_sat.cli.ComputeResource.exists(search=('id', compute_resource['id'])): + target_sat.cli.ComputeResource.delete(options={'id': compute_resource['id']}) return compute_resource -def _template(request): - template = make_template() +def _template(request, target_sat): + template = target_sat.cli_factory.make_template() @request.addfinalizer def _cleanup(): - if Template.exists(search=('name', template['name'])): - Template.delete(options={'id': template['id']}) + if target_sat.cli.Template.exists(search=('name', template['name'])): + target_sat.cli.Template.delete(options={'id': template['id']}) return template -def _user(request): - user = make_user() +def _user(request, target_sat): + user = target_sat.cli_factory.user() @request.addfinalizer def _cleanup(): - if User.exists(search=('login', user['login'])): - User.delete(options={'id': user['id']}) + if target_sat.cli.User.exists(search=('login', user['login'])): + target_sat.cli.User.delete(options={'id': user['id']}) return user @@ -159,7 +139,7 @@ class TestLocation: @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_create_update_delete(self, request): + def test_positive_create_update_delete(self, request, target_sat): """Create new location with attributes, update and delete it :id: e1844d9d-ec4a-44b3-9743-e932cc70020d @@ -174,16 +154,17 @@ def test_positive_create_update_delete(self, request): """ # Create description = gen_string('utf8') - subnet = _subnet(request) - domains = [_domain(request) for _ in range(0, 2)] - host_groups = [_host_group(request) for _ in range(0, 3)] - medium = _medium(request) - compute_resource = _compute_resource(request) - template = _template(request) - user = _user(request) + subnet = _subnet(request, target_sat) + domains = [_domain(request, target_sat) for _ in range(0, 2)] + host_groups = [_host_group(request, target_sat) for _ in range(0, 3)] + medium = _medium(request, target_sat) + compute_resource = _compute_resource(request, target_sat) + template = _template(request, target_sat) + user = _user(request, target_sat) location = _location( request, + target_sat, { 'description': description, 'subnet-ids': subnet['id'], @@ -219,25 +200,25 @@ def test_positive_create_update_delete(self, request): assert location['users'][0] == user['login'] # Update - Location.update( + target_sat.cli.Location.update( { 'id': location['id'], 'domain-ids': domains[1]['id'], 'hostgroup-ids': [host_groups[1]['id'], host_groups[2]['id']], } ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert host_groups[1]['name'] in location['hostgroups'] assert host_groups[2]['name'] in location['hostgroups'] assert location['domains'][0] == domains[1]['name'] # Delete - Location.delete({'id': location['id']}) + target_sat.cli.Location.delete({'id': location['id']}) with pytest.raises(CLIReturnCodeError): - Location.info({'id': location['id']}) + target_sat.cli.Location.info({'id': location['id']}) @pytest.mark.tier1 - def test_positive_create_with_parent(self, request): + def test_positive_create_with_parent(self, request, target_sat): """Create new location with parent location specified :id: 49b34733-103a-4fee-818b-6a3386253af1 @@ -250,12 +231,12 @@ def test_positive_create_with_parent(self, request): expected parent location set """ - parent_location = _location(request) - location = _location(request, {'parent-id': parent_location['id']}) + parent_location = _location(request, target_sat) + location = _location(request, target_sat, {'parent-id': parent_location['id']}) assert location['parent'] == parent_location['name'] @pytest.mark.tier1 - def test_negative_create_with_same_name(self, request): + def test_negative_create_with_same_name(self, request, target_sat): """Try to create location using same name twice :id: 4fbaea41-9775-40a2-85a5-4dc05cc95134 @@ -265,13 +246,13 @@ def test_negative_create_with_same_name(self, request): :CaseImportance: Critical """ name = gen_string('utf8') - location = _location(request, options={'name': name}) + location = _location(request, target_sat, options={'name': name}) assert location['name'] == name with pytest.raises(CLIFactoryError): - _location(request, options={'name': name}) + _location(request, target_sat, options={'name': name}) @pytest.mark.tier1 - def test_negative_create_with_user_by_name(self, request): + def test_negative_create_with_user_by_name(self, request, target_sat): """Try to create new location with incorrect user assigned to it Use user login as a parameter @@ -282,7 +263,7 @@ def test_negative_create_with_user_by_name(self, request): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - _location(request, options={'users': gen_string('utf8', 80)}) + _location(request, target_sat, options={'users': gen_string('utf8', 80)}) @pytest.mark.run_in_one_thread @pytest.mark.tier2 @@ -296,21 +277,24 @@ def test_positive_add_and_remove_capsule(self, request, target_sat): :BZ: 1398695 - :CaseLevel: Integration """ - location = _location(request) + location = _location(request, target_sat) proxy = _proxy(request, target_sat) - Location.add_smart_proxy({'name': location['name'], 'smart-proxy-id': proxy['id']}) - location = Location.info({'name': location['name']}) + target_sat.cli.Location.add_smart_proxy( + {'name': location['name'], 'smart-proxy-id': proxy['id']} + ) + location = target_sat.cli.Location.info({'name': location['name']}) assert proxy['name'] in location['smart-proxies'] - Location.remove_smart_proxy({'name': location['name'], 'smart-proxy': proxy['name']}) - location = Location.info({'name': location['name']}) + target_sat.cli.Location.remove_smart_proxy( + {'name': location['name'], 'smart-proxy': proxy['name']} + ) + location = target_sat.cli.Location.info({'name': location['name']}) assert proxy['name'] not in location['smart-proxies'] @pytest.mark.tier1 - def test_positive_add_update_remove_parameter(self, request): + def test_positive_add_update_remove_parameter(self, request, target_sat): """Add, update and remove parameter to location :id: 61b564f2-a42a-48de-833d-bec3a127d0f5 @@ -323,30 +307,30 @@ def test_positive_add_update_remove_parameter(self, request): param_name = gen_string('alpha') param_value = gen_string('alpha') param_new_value = gen_string('alpha') - location = _location(request) - Location.set_parameter( + location = _location(request, target_sat) + target_sat.cli.Location.set_parameter( {'name': param_name, 'value': param_value, 'location-id': location['id']} ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 1 assert param_value == location['parameters'][param_name.lower()] # Update - Location.set_parameter( + target_sat.cli.Location.set_parameter( {'name': param_name, 'value': param_new_value, 'location': location['name']} ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 1 assert param_new_value == location['parameters'][param_name.lower()] # Remove - Location.delete_parameter({'name': param_name, 'location': location['name']}) - location = Location.info({'id': location['id']}) + target_sat.cli.Location.delete_parameter({'name': param_name, 'location': location['name']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 0 assert param_name.lower() not in location['parameters'] @pytest.mark.tier2 - def test_positive_update_parent(self, request): + def test_positive_update_parent(self, request, target_sat): """Update location's parent location :id: 34522d1a-1190-48d8-9285-fc9a9bcf6c6a @@ -359,16 +343,16 @@ def test_positive_update_parent(self, request): :CaseImportance: High """ - parent_location = _location(request) - location = _location(request, {'parent-id': parent_location['id']}) + parent_location = _location(request, target_sat) + location = _location(request, target_sat, {'parent-id': parent_location['id']}) - parent_location_2 = _location(request) - Location.update({'id': location['id'], 'parent-id': parent_location_2['id']}) - location = Location.info({'id': location['id']}) + parent_location_2 = _location(request, target_sat) + target_sat.cli.Location.update({'id': location['id'], 'parent-id': parent_location_2['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert location['parent'] == parent_location_2['name'] @pytest.mark.tier1 - def test_negative_update_parent_with_child(self, request): + def test_negative_update_parent_with_child(self, request, target_sat): """Attempt to set child location as a parent and vice versa :id: fd4cb1cf-377f-4b48-b7f4-d4f6ca56f544 @@ -381,41 +365,19 @@ def test_negative_update_parent_with_child(self, request): :CaseImportance: High """ - parent_location = _location(request) - location = _location(request, {'parent-id': parent_location['id']}) + parent_location = _location(request, target_sat) + location = _location(request, target_sat, {'parent-id': parent_location['id']}) # set parent as child with pytest.raises(CLIReturnCodeError): - Location.update({'id': parent_location['id'], 'parent-id': location['id']}) - parent_location = Location.info({'id': parent_location['id']}) + target_sat.cli.Location.update( + {'id': parent_location['id'], 'parent-id': location['id']} + ) + parent_location = target_sat.cli.Location.info({'id': parent_location['id']}) assert parent_location.get('parent') is None # set child as parent with pytest.raises(CLIReturnCodeError): - Location.update({'id': location['id'], 'parent-id': location['id']}) - location = Location.info({'id': location['id']}) + target_sat.cli.Location.update({'id': location['id'], 'parent-id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert location['parent'] == parent_location['name'] - - @pytest.mark.skip_if_open("BZ:1937009") - @pytest.mark.tier1 - def test_positive_nested_location(self, request, module_org): - """View nested location in an organization - - :id: cce4a33d-7743-47ef-b437-2539fdc73c44 - - :customerscenario: true - - :BZ: 1937009 - - :expectedresults: Nested location can be viewed - - :CaseImportance: Medium - """ - parent_location = _location(request, {'organization-id': module_org.id}) - location = _location( - request, {'parent-id': parent_location['id'], 'organization-id': module_org.id} - ) - loc_list = Location.list( - {'search': f'title={location["title"]}', 'organization-id': module_org.id} - ) - assert loc_list[0]['title'] == location['title'] diff --git a/tests/foreman/cli/test_logging.py b/tests/foreman/cli/test_logging.py index f926d96a7c8..8d315b3e59c 100644 --- a/tests/foreman/cli/test_logging.py +++ b/tests/foreman/cli/test_logging.py @@ -4,28 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Logging :Team: Rocket -:TestType: Functional - :CaseImportance: Medium -:Upstream: No """ import re -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.logging import logger @@ -227,8 +218,6 @@ def test_positive_logging_from_pulp3(module_org, target_sat): :id: 8d5718e6-3442-47d6-b541-0aa78d007e8b - :CaseLevel: Component - :CaseImportance: High """ source_log = '/var/log/foreman/production.log' @@ -239,10 +228,10 @@ def test_positive_logging_from_pulp3(module_org, target_sat): name = product_name label = product_name desc = product_name - product = make_product( + product = target_sat.cli_factory.make_product( {'description': desc, 'label': label, 'name': name, 'organization-id': module_org.id}, ) - repo = make_repository( + repo = target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -250,8 +239,8 @@ def test_positive_logging_from_pulp3(module_org, target_sat): }, ) # Synchronize the repository - Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Get the id of repository sync from task task_out = target_sat.execute( "hammer task list | grep -F \'Synchronize repository {\"text\"=>\"repository\'" diff --git a/tests/foreman/cli/test_medium.py b/tests/foreman/cli/test_medium.py index 8f3d7ad2400..26017b415ab 100644 --- a/tests/foreman/cli/test_medium.py +++ b/tests/foreman/cli/test_medium.py @@ -4,29 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_alphanumeric +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_os -from robottelo.cli.medium import Medium -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import parametrized, valid_data_list URL = "http://mirror.fakeos.org/%s/$major.$minor/os/$arch" OSES = ['Archlinux', 'Debian', 'Gentoo', 'Redhat', 'Solaris', 'Suse', 'Windows'] @@ -37,7 +26,7 @@ class TestMedium: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list().values())) - def test_positive_crud_with_name(self, name): + def test_positive_crud_with_name(self, name, module_target_sat): """Check if Medium can be created, updated, deleted :id: 66b749b2-0248-47a8-b78f-3366f3804b29 @@ -49,18 +38,18 @@ def test_positive_crud_with_name(self, name): :CaseImportance: Critical """ - medium = make_medium({'name': name}) + medium = module_target_sat.cli_factory.make_medium({'name': name}) assert medium['name'] == name new_name = gen_alphanumeric(6) - Medium.update({'name': medium['name'], 'new-name': new_name}) - medium = Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.update({'name': medium['name'], 'new-name': new_name}) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert medium['name'] == new_name - Medium.delete({'id': medium['id']}) + module_target_sat.cli.Medium.delete({'id': medium['id']}) with pytest.raises(CLIReturnCodeError): - Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.info({'id': medium['id']}) @pytest.mark.tier1 - def test_positive_create_with_location(self): + def test_positive_create_with_location(self, module_target_sat): """Check if medium with location can be created :id: cbc6c586-fae7-4bb9-aeb1-e30158f16a98 @@ -70,12 +59,12 @@ def test_positive_create_with_location(self): :CaseImportance: Medium """ - location = make_location() - medium = make_medium({'location-ids': location['id']}) + location = module_target_sat.cli_factory.make_location() + medium = module_target_sat.cli_factory.make_medium({'location-ids': location['id']}) assert location['name'] in medium['locations'] @pytest.mark.tier1 - def test_positive_create_with_organization_by_id(self): + def test_positive_create_with_organization_by_id(self, module_target_sat): """Check if medium with organization can be created :id: 631bb6ed-e42b-482a-83f0-f6ce0f20729a @@ -85,13 +74,13 @@ def test_positive_create_with_organization_by_id(self): :CaseImportance: Medium """ - org = make_org() - medium = make_medium({'organization-ids': org['id']}) + org = module_target_sat.cli_factory.make_org() + medium = module_target_sat.cli_factory.make_medium({'organization-ids': org['id']}) assert org['name'] in medium['organizations'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_remove_os(self): + def test_positive_remove_os(self, module_target_sat): """Check if Medium can be associated with operating system and then removed from media :id: 23b5b55b-3624-440c-8001-75c7c5a5a004 @@ -99,13 +88,16 @@ def test_positive_remove_os(self): :expectedresults: Operating system removed - :CaseLevel: Integration """ - medium = make_medium() - os = make_os() - Medium.add_operating_system({'id': medium['id'], 'operatingsystem-id': os['id']}) - medium = Medium.info({'id': medium['id']}) + medium = module_target_sat.cli_factory.make_medium() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.Medium.add_operating_system( + {'id': medium['id'], 'operatingsystem-id': os['id']} + ) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert os['title'] in medium['operating-systems'] - Medium.remove_operating_system({'id': medium['id'], 'operatingsystem-id': os['id']}) - medium = Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.remove_operating_system( + {'id': medium['id'], 'operatingsystem-id': os['id']} + ) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert os['name'] not in medium['operating-systems'] diff --git a/tests/foreman/cli/test_model.py b/tests/foreman/cli/test_model.py index aa2d5c8f704..8fc51be9f62 100644 --- a/tests/foreman/cli/test_model.py +++ b/tests/foreman/cli/test_model.py @@ -4,45 +4,42 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_model -from robottelo.cli.model import Model -from robottelo.utils.datafactory import invalid_id_list -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_id_list, + invalid_values_list, + parametrized, + valid_data_list, +) class TestModel: """Test class for Model CLI""" - @pytest.fixture() - def class_model(self): + @pytest.fixture + def class_model(self, target_sat): """Shared model for tests""" - return make_model() + return target_sat.cli_factory.make_model() @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', - **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) + ('name', 'new_name'), + **parametrized( + list(zip(valid_data_list().values(), valid_data_list().values(), strict=True)) + ) ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Successfully creates, updates and deletes a Model. :id: 9ca9d5ff-750a-4d60-91b2-4c4375f0e35f @@ -53,17 +50,17 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: High """ - model = make_model({'name': name}) + model = module_target_sat.cli_factory.make_model({'name': name}) assert model['name'] == name - Model.update({'id': model['id'], 'new-name': new_name}) - model = Model.info({'id': model['id']}) + module_target_sat.cli.Model.update({'id': model['id'], 'new-name': new_name}) + model = module_target_sat.cli.Model.info({'id': model['id']}) assert model['name'] == new_name - Model.delete({'id': model['id']}) + module_target_sat.cli.Model.delete({'id': model['id']}) with pytest.raises(CLIReturnCodeError): - Model.info({'id': model['id']}) + module_target_sat.cli.Model.info({'id': model['id']}) @pytest.mark.tier1 - def test_positive_create_with_vendor_class(self): + def test_positive_create_with_vendor_class(self, module_target_sat): """Check if Model can be created with specific vendor class :id: c36d3490-cd12-4f5f-a453-2ae5d0404496 @@ -73,12 +70,12 @@ def test_positive_create_with_vendor_class(self): :CaseImportance: Medium """ vendor_class = gen_string('utf8') - model = make_model({'vendor-class': vendor_class}) + model = module_target_sat.cli_factory.make_model({'vendor-class': vendor_class}) assert model['vendor-class'] == vendor_class @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, name, module_target_sat): """Don't create an Model with invalid data. :id: b2eade66-b612-47e7-bfcc-6e363023f498 @@ -90,11 +87,11 @@ def test_negative_create_with_name(self, name): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - Model.create({'name': name}) + module_target_sat.cli.Model.create({'name': name}) @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, class_model, new_name): + def test_negative_update_name(self, class_model, new_name, module_target_sat): """Fail to update shared model name :id: 98020a4a-1789-4df3-929c-6c132b57f5a1 @@ -106,13 +103,13 @@ def test_negative_update_name(self, class_model, new_name): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Model.update({'id': class_model['id'], 'new-name': new_name}) - result = Model.info({'id': class_model['id']}) + module_target_sat.cli.Model.update({'id': class_model['id'], 'new-name': new_name}) + result = module_target_sat.cli.Model.info({'id': class_model['id']}) assert class_model['name'] == result['name'] @pytest.mark.tier1 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) - def test_negative_delete_by_id(self, entity_id): + def test_negative_delete_by_id(self, entity_id, module_target_sat): """Delete model by wrong ID :id: f8b0d428-1b3d-4fc9-9ca1-1eb30c8ac20a @@ -124,4 +121,4 @@ def test_negative_delete_by_id(self, entity_id): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - Model.delete({'id': entity_id}) + module_target_sat.cli.Model.delete({'id': entity_id}) diff --git a/tests/foreman/cli/test_operatingsystem.py b/tests/foreman/cli/test_operatingsystem.py index 5bcf6ec535c..48ed8af8099 100644 --- a/tests/foreman/cli/test_operatingsystem.py +++ b/tests/foreman/cli/test_operatingsystem.py @@ -4,35 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Provisioning :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_alphanumeric, gen_string import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_architecture -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_os -from robottelo.cli.factory import make_partition_table -from robottelo.cli.factory import make_template -from robottelo.cli.operatingsys import OperatingSys + from robottelo.constants import DEFAULT_ORG -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, + valid_data_list, +) @filtered_datapoint @@ -44,84 +33,113 @@ def negative_delete_data(): class TestOperatingSystem: """Test class for Operating System CLI.""" - @pytest.mark.tier1 - def test_positive_search_by_name(self): - """Search for newly created OS by name - - :id: ff9f667c-97ca-49cd-902b-a9b18b5aa021 - - :expectedresults: Operating System is created and listed - - :CaseImportance: Critical - """ - os_list_before = OperatingSys.list() - os = make_os() - os_list = OperatingSys.list({'search': 'name=%s' % os['name']}) - os_info = OperatingSys.info({'id': os_list[0]['id']}) - assert os['id'] == os_info['id'] - os_list_after = OperatingSys.list() - assert len(os_list_after) > len(os_list_before) - - @pytest.mark.tier1 - def test_positive_search_by_title(self): - """Search for newly created OS by title - - :id: a555e848-f1f2-4326-aac6-9de8ff45abee - - :expectedresults: Operating System is created and listed - - :CaseImportance: Critical - """ - os_list_before = OperatingSys.list() - os = make_os() - os_list = OperatingSys.list({'search': 'title=\\"%s\\"' % os['title']}) - os_info = OperatingSys.info({'id': os_list[0]['id']}) - assert os['id'] == os_info['id'] - os_list_after = OperatingSys.list() - assert len(os_list_after) > len(os_list_before) - - @pytest.mark.tier1 - def test_positive_list(self): - """Displays list for operating system + @pytest.mark.e2e + @pytest.mark.upgrade + def test_positive_end_to_end_os(self, target_sat): + """End-to-end test for Operating system - :id: fca309c5-edff-4296-a800-55470669935a + :id: 531ab5c0-ccba-45ec-bd52-85b4df250d79 - :expectedresults: Operating System is created and listed + :steps: + 1. Create OS + 2. Check OS is created with all given fields + 3. Read OS + 4. Update OS + 5. Check OS is updated with all given fields + 6. Delete OS + 7. Check if OS is deleted - :CaseImportance: Critical + :expectedresults: All CRUD operations are performed successfully. """ - os_list_before = OperatingSys.list() name = gen_string('alpha') - os = make_os({'name': name}) - os_list = OperatingSys.list({'search': 'name=%s' % name}) - os_info = OperatingSys.info({'id': os_list[0]['id']}) - assert os['id'] == os_info['id'] - os_list_after = OperatingSys.list() - assert len(os_list_after) > len(os_list_before) - - @pytest.mark.tier1 - def test_positive_info_by_id(self): - """Displays info for operating system by its ID - - :id: b8f23b53-439a-4726-9757-164d99d5ed05 - - :expectedresults: Operating System is created and can be looked up by - its ID - - :CaseImportance: Critical - """ - os = make_os() - os_info = OperatingSys.info({'id': os['id']}) - # Info does not return major or minor but a concat of name, - # major and minor - assert os['id'] == os_info['id'] - assert os['name'] == os_info['name'] - assert str(os['major-version']) == os_info['major-version'] - assert str(os['minor-version']) == os_info['minor-version'] + desc = gen_string('alpha') + os_family = 'Redhat' + pass_hash = 'SHA256' + minor_version = gen_string('numeric', 1) + major_version = gen_string('numeric', 1) + architecture = target_sat.cli_factory.make_architecture() + medium = target_sat.cli_factory.make_medium() + ptable = target_sat.cli_factory.make_partition_table() + template = target_sat.cli_factory.make_template() + # Create OS + os = target_sat.cli.OperatingSys.create( + { + 'name': name, + 'description': desc, + 'family': os_family, + 'password-hash': pass_hash, + 'major': major_version, + 'minor': minor_version, + 'architecture-ids': architecture['id'], + 'medium-ids': medium['id'], + 'partition-table-ids': ptable['id'], + 'provisioning-template-ids': template['id'], + } + ) + assert os['name'] == name + assert os['title'] == desc + assert os['family'] == os_family + assert str(os['major-version']) == major_version + assert str(os['minor-version']) == minor_version + assert os['architectures'][0] == architecture['name'] + assert os['installation-media'][0] == medium['name'] + assert ptable['name'] in os['partition-tables'] + assert template['name'] in str(os['templates']) + # Read OS + os = target_sat.cli.OperatingSys.list({'search': f'name={name}'}) + assert os[0]['title'] == desc + os = target_sat.cli.OperatingSys.info({'id': os[0]['id']}) + assert os['name'] == name + assert os['title'] == desc + assert os['family'] == os_family + assert str(os['major-version']) == major_version + assert str(os['minor-version']) == minor_version + assert os['architectures'][0] == architecture['name'] + assert ptable['name'] in os['partition-tables'] + assert template['name'] in str(os['templates']) + new_name = gen_string('alpha') + new_desc = gen_string('alpha') + new_os_family = 'Redhat' + new_pass_hash = 'SHA256' + new_minor_version = gen_string('numeric', 1) + new_major_version = gen_string('numeric', 1) + new_architecture = target_sat.cli_factory.make_architecture() + new_medium = target_sat.cli_factory.make_medium() + new_ptable = target_sat.cli_factory.make_partition_table() + new_template = target_sat.cli_factory.make_template() + os = target_sat.cli.OperatingSys.update( + { + 'id': os['id'], + 'name': new_name, + 'description': new_desc, + 'family': new_os_family, + 'password-hash': new_pass_hash, + 'major': new_major_version, + 'minor': new_minor_version, + 'architecture-ids': new_architecture['id'], + 'medium-ids': new_medium['id'], + 'partition-table-ids': new_ptable['id'], + 'provisioning-template-ids': new_template['id'], + } + ) + os = target_sat.cli.OperatingSys.list({'search': f'title={new_desc}'}) + os = target_sat.cli.OperatingSys.info({'id': os[0]['id']}) + assert os['name'] == new_name + assert os['title'] == new_desc + assert os['family'] == new_os_family + assert str(os['major-version']) == new_major_version + assert str(os['minor-version']) == new_minor_version + assert os['architectures'][0] == new_architecture['name'] + assert os['installation-media'][0] == new_medium['name'] + assert new_ptable['name'] in os['partition-tables'] + assert new_template['name'] in str(os['templates']) + target_sat.cli.OperatingSys.delete({'id': os['id']}) + with pytest.raises(CLIReturnCodeError): + target_sat.cli.OperatingSys.info({'id': os['id']}) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, name, target_sat): """Create Operating System for all variations of name :id: d36eba9b-ccf6-4c9d-a07f-c74eebada89b @@ -132,39 +150,12 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - os = make_os({'name': name}) + os = target_sat.cli_factory.make_os({'name': name}) assert os['name'] == name - @pytest.mark.tier1 - def test_positive_create_with_arch_medium_ptable(self): - """Create an OS pointing to an arch, medium and partition table. - - :id: 05bdb2c6-0d2e-4141-9e07-3ada3933b577 - - :expectedresults: An operating system is created. - - :CaseImportance: Critical - """ - architecture = make_architecture() - medium = make_medium() - ptable = make_partition_table() - operating_system = make_os( - { - 'architecture-ids': architecture['id'], - 'medium-ids': medium['id'], - 'partition-table-ids': ptable['id'], - } - ) - - for attr in ('architectures', 'installation-media', 'partition-tables'): - assert len(operating_system[attr]) == 1 - assert operating_system['architectures'][0] == architecture['name'] - assert operating_system['installation-media'][0] == medium['name'] - assert operating_system['partition-tables'][0] == ptable['name'] - @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, name, target_sat): """Create Operating System using invalid names :id: 848a20ce-292a-47d8-beea-da5916c43f11 @@ -175,48 +166,12 @@ def test_negative_create_with_name(self, name): :CaseImportance: Critical """ - with pytest.raises(CLIFactoryError): - make_os({'name': name}) - - @pytest.mark.tier1 - @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) - def test_positive_update_name(self, new_name): - """Positive update of operating system name - - :id: 49b655f7-ba9b-4bb9-b09d-0f7140969a40 - - :parametrized: yes - - :expectedresults: Operating System name is updated - - :CaseImportance: Critical - """ - os = make_os({'name': gen_alphanumeric()}) - OperatingSys.update({'id': os['id'], 'name': new_name}) - result = OperatingSys.info({'id': os['id']}) - assert result['id'] == os['id'] - assert result['name'] != os['name'] - - @pytest.mark.tier1 - def test_positive_update_major_version(self): - """Update an Operating System's major version. - - :id: 38a89dbe-6d1c-4602-a4c1-664425668de8 - - :expectedresults: Operating System major version is updated - - :CaseImportance: Critical - """ - os = make_os() - # New value for major - major = int(os['major-version']) + 1 - OperatingSys.update({'id': os['id'], 'major': major}) - os = OperatingSys.info({'id': os['id']}) - assert int(os['major-version']) == major + with pytest.raises(CLIReturnCodeError): + target_sat.cli.OperatingSys.create({'name': name}) @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, new_name): + def test_negative_update_name(self, new_name, target_sat): """Negative update of system name :id: 4b18ff6d-7728-4245-a1ce-38e62c05f454 @@ -227,34 +182,15 @@ def test_negative_update_name(self, new_name): :CaseImportance: Critical """ - os = make_os({'name': gen_alphanumeric()}) + os = target_sat.cli_factory.make_os({'name': gen_alphanumeric()}) with pytest.raises(CLIReturnCodeError): - OperatingSys.update({'id': os['id'], 'name': new_name}) - result = OperatingSys.info({'id': os['id']}) + target_sat.cli.OperatingSys.update({'id': os['id'], 'name': new_name}) + result = target_sat.cli.OperatingSys.info({'id': os['id']}) assert result['name'] == os['name'] - @pytest.mark.tier1 - @pytest.mark.upgrade - @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_delete_by_id(self, name): - """Successfully deletes Operating System by its ID - - :id: a67a7b01-081b-42f8-a9ab-1f41166d649e - - :parametrized: yes - - :expectedresults: Operating System is deleted - - :CaseImportance: Critical - """ - os = make_os({'name': name}) - OperatingSys.delete({'id': os['id']}) - with pytest.raises(CLIReturnCodeError): - OperatingSys.info({'id': os['id']}) - @pytest.mark.tier1 @pytest.mark.parametrize('test_data', **parametrized(negative_delete_data())) - def test_negative_delete_by_id(self, test_data): + def test_negative_delete_by_id(self, test_data, target_sat): """Delete Operating System using invalid data :id: d29a9c95-1fe3-4a7a-9f7b-127be065856d @@ -265,78 +201,77 @@ def test_negative_delete_by_id(self, test_data): :CaseImportance: Critical """ - os = make_os() + os = target_sat.cli_factory.make_os() # The delete method requires the ID which we will not pass with pytest.raises(CLIReturnCodeError): - OperatingSys.delete(test_data) + target_sat.cli.OperatingSys.delete(test_data) # Now make sure that it still exists - result = OperatingSys.info({'id': os['id']}) + result = target_sat.cli.OperatingSys.info({'id': os['id']}) assert os['id'] == result['id'] assert os['name'] == result['name'] @pytest.mark.tier2 - def test_positive_add_arch(self): + def test_positive_add_arch(self, target_sat): """Add Architecture to operating system :id: 99add22d-d936-4232-9441-beff85867040 :expectedresults: Architecture is added to Operating System - :CaseLevel: Integration """ - architecture = make_architecture() - os = make_os() - OperatingSys.add_architecture({'architecture-id': architecture['id'], 'id': os['id']}) - os = OperatingSys.info({'id': os['id']}) + architecture = target_sat.cli_factory.make_architecture() + os = target_sat.cli_factory.make_os() + target_sat.cli.OperatingSys.add_architecture( + {'architecture-id': architecture['id'], 'id': os['id']} + ) + os = target_sat.cli.OperatingSys.info({'id': os['id']}) assert len(os['architectures']) == 1 assert architecture['name'] == os['architectures'][0] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_add_template(self): + def test_positive_add_template(self, target_sat): """Add provisioning template to operating system :id: 0ea9eb88-2d27-423d-a9d3-fdd788b4e28a :expectedresults: Provisioning template is added to Operating System - :CaseLevel: Integration """ - template = make_template() - os = make_os() + template = target_sat.cli_factory.make_template() + os = target_sat.cli_factory.make_os() default_template_name = os['default-templates'][0] - OperatingSys.add_provisioning_template( + target_sat.cli.OperatingSys.add_provisioning_template( {'provisioning-template': template['name'], 'id': os['id']} ) - os = OperatingSys.info({'id': os['id']}) + os = target_sat.cli.OperatingSys.info({'id': os['id']}) assert len(os['templates']) == 2 provision_template_name = f"{template['name']} ({template['type']})" assert default_template_name in os['templates'] assert provision_template_name in os['templates'] @pytest.mark.tier2 - def test_positive_add_ptable(self): + def test_positive_add_ptable(self, target_sat): """Add partition table to operating system :id: beba676f-b4e4-48e1-bb0c-18ad91847566 :expectedresults: Partition table is added to Operating System - :CaseLevel: Integration """ # Create a partition table. - ptable_name = make_partition_table()['name'] + ptable_name = target_sat.cli_factory.make_partition_table()['name'] # Create an operating system. - os_id = make_os()['id'] + os_id = target_sat.cli_factory.make_os()['id'] # Add the partition table to the operating system. - OperatingSys.add_ptable({'id': os_id, 'partition-table': ptable_name}) + target_sat.cli.OperatingSys.add_ptable({'id': os_id, 'partition-table': ptable_name}) # Verify that the operating system has a partition table. - os = OperatingSys.info({'id': os_id}) + os = target_sat.cli.OperatingSys.info({'id': os_id}) assert len(os['partition-tables']) == 1 assert os['partition-tables'][0] == ptable_name @pytest.mark.tier2 - def test_positive_update_parameters_attributes(self): + def test_positive_update_parameters_attributes(self, target_sat): """Update os-parameters-attributes to operating system :id: 5d566eea-b323-4128-9356-3bf39943e4d4 @@ -347,8 +282,8 @@ def test_positive_update_parameters_attributes(self): """ param_name = gen_string('alpha') param_value = gen_string('alpha') - os_id = make_os()['id'] - OperatingSys.update( + os_id = target_sat.cli_factory.make_os()['id'] + target_sat.cli.OperatingSys.update( { 'id': os_id, 'os-parameters-attributes': 'name={}, value={}'.format( @@ -356,14 +291,13 @@ def test_positive_update_parameters_attributes(self): ), } ) - os = OperatingSys.info({'id': os_id}, output_format='json') + os = target_sat.cli.OperatingSys.info({'id': os_id}, output_format='json') assert param_name == os['parameters'][0]['name'] assert param_value == os['parameters'][0]['value'] @pytest.mark.tier2 -@pytest.mark.skip_if_open("BZ:1649011") -def test_positive_os_list_with_default_organization_set(satellite_host): +def test_positive_os_list_with_default_organization_set(target_sat): """list operating systems when the default organization is set :id: 2c1ba416-a5d5-4031-b154-54794569a85b @@ -375,19 +309,19 @@ def test_positive_os_list_with_default_organization_set(satellite_host): :expectedresults: os list should list operating systems when the default organization is set """ - satellite_host.api.OperatingSystem().create() - os_list_before_default = satellite_host.cli.OperatingSys.list() + target_sat.api.OperatingSystem().create() + os_list_before_default = target_sat.cli.OperatingSys.list() assert len(os_list_before_default) > 0 try: - satellite_host.cli.Defaults.add({'param-name': 'organization', 'param-value': DEFAULT_ORG}) - result = satellite_host.execute('hammer defaults list') + target_sat.cli.Defaults.add({'param-name': 'organization', 'param-value': DEFAULT_ORG}) + result = target_sat.execute('hammer defaults list') assert result.status == 0 assert DEFAULT_ORG in result.stdout - os_list_after_default = satellite_host.cli.OperatingSys.list() + os_list_after_default = target_sat.cli.OperatingSys.list() assert len(os_list_after_default) > 0 finally: - satellite_host.cli.Defaults.delete({'param-name': 'organization'}) - result = satellite_host.execute('hammer defaults list') + target_sat.cli.Defaults.delete({'param-name': 'organization'}) + result = target_sat.execute('hammer defaults list') assert result.status == 0 assert DEFAULT_ORG not in result.stdout diff --git a/tests/foreman/cli/test_organization.py b/tests/foreman/cli/test_organization.py index d0095c1974e..da79144f141 100644 --- a/tests/foreman/cli/test_organization.py +++ b/tests/foreman/cli/test_organization.py @@ -4,43 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: OrganizationsandLocations :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.factory import make_domain -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_subnet -from robottelo.cli.factory import make_template -from robottelo.cli.factory import make_user -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.org import Org -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_org_names_list +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, + valid_data_list, + valid_org_names_list, +) @filtered_datapoint @@ -69,7 +52,7 @@ def proxy(target_sat): @pytest.mark.tier2 -def test_positive_no_duplicate_lines(): +def test_positive_no_duplicate_lines(module_target_sat): """hammer organization --help types information doubled @@ -82,7 +65,7 @@ def test_positive_no_duplicate_lines(): :CaseImportance: Low """ # org list --help: - result = Org.list({'help': True}, output_format=None) + result = module_target_sat.cli.Org.list({'help': True}, output_format=None) # get list of lines and check they all are unique lines = [ line @@ -94,7 +77,7 @@ def test_positive_no_duplicate_lines(): @pytest.mark.e2e @pytest.mark.tier1 -def test_positive_CRD(): +def test_positive_CRD(module_target_sat): """Create organization with valid name, label and description :id: 35840da7-668e-4f78-990a-738aa688d586 @@ -109,14 +92,16 @@ def test_positive_CRD(): name = valid_org_names_list()[0] label = valid_labels_list()[0] desc = list(valid_data_list().values())[0] - org = make_org({'name': name, 'label': label, 'description': desc}) + org = module_target_sat.cli_factory.make_org( + {'name': name, 'label': label, 'description': desc} + ) assert org['name'] == name assert org['label'] == label assert org['description'] == desc # List - result = Org.list({'search': f'name={name}'}) + result = module_target_sat.cli.Org.list({'search': f'name={name}'}) assert len(result) == 1 assert result[0]['name'] == name @@ -126,30 +111,30 @@ def test_positive_CRD(): f'description ~ {desc[:-5]}', f'name ^ "{name}"', ]: - result = Org.list({'search': query}) + result = module_target_sat.cli.Org.list({'search': query}) assert len(result) == 1 assert result[0]['name'] == name # Search by name and label - result = Org.exists(search=('name', name)) + result = module_target_sat.cli.Org.exists(search=('name', name)) assert result['name'] == name - result = Org.exists(search=('label', label)) + result = module_target_sat.cli.Org.exists(search=('label', label)) assert result['name'] == name # Info by name and label - result = Org.info({'label': label}) + result = module_target_sat.cli.Org.info({'label': label}) assert result['id'] == org['id'] - result = Org.info({'name': name}) + result = module_target_sat.cli.Org.info({'name': name}) assert org['id'] == result['id'] # Delete - Org.delete({'id': org['id']}) + module_target_sat.cli.Org.delete({'id': org['id']}) with pytest.raises(CLIReturnCodeError): - result = Org.info({'id': org['id']}) + result = module_target_sat.cli.Org.info({'id': org['id']}) @pytest.mark.tier2 -def test_positive_create_with_system_admin_user(): +def test_positive_create_with_system_admin_user(module_target_sat): """Create organization using user with system admin role :id: 1482ab6e-18c7-4a62-81a2-cc969ac373fe @@ -161,16 +146,16 @@ def test_positive_create_with_system_admin_user(): login = gen_string('alpha') password = gen_string('alpha') org_name = gen_string('alpha') - make_user({'login': login, 'password': password}) - User.add_role({'login': login, 'role': 'System admin'}) - make_org({'user': login, 'password': password, 'name': org_name}) - result = Org.info({'name': org_name}) + module_target_sat.cli_factory.user({'login': login, 'password': password}) + module_target_sat.cli.User.add_role({'login': login, 'role': 'System admin'}) + module_target_sat.cli_factory.make_org({'users': login, 'name': org_name}) + result = module_target_sat.cli.Org.info({'name': org_name}) assert result['name'] == org_name @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_subnets(module_org): +def test_positive_add_and_remove_subnets(module_org, module_target_sat): """add and remove a subnet from organization :id: adb5310b-76c5-4aca-8220-fdf0fe605cb0 @@ -182,22 +167,22 @@ def test_positive_add_and_remove_subnets(module_org): :expectedresults: Subnets are handled as expected :BZ: 1395229 - - :CaseLevel: Integration """ - subnets = [make_subnet() for _ in range(0, 2)] - Org.add_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) - Org.add_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) - org_info = Org.info({'id': module_org.id}) + subnets = [module_target_sat.cli_factory.make_subnet() for _ in range(0, 2)] + module_target_sat.cli.Org.add_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) + module_target_sat.cli.Org.add_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['subnets']) == 2, "Failed to add subnets" - Org.remove_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) - Org.remove_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) + module_target_sat.cli.Org.remove_subnet( + {'name': module_org.name, 'subnet-id': subnets[1]['id']} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['subnets']) == 0, "Failed to remove subnets" @pytest.mark.tier2 -def test_positive_add_and_remove_users(module_org): +def test_positive_add_and_remove_users(module_org, module_target_sat): """Add and remove (admin) user to organization :id: c35b2e88-a65f-4eea-ba55-89cef59f30be @@ -211,42 +196,40 @@ def test_positive_add_and_remove_users(module_org): 4. create and delete admin user by id :BZ: 1395229 - - :CaseLevel: Integration """ - user = make_user() - admin_user = make_user({'admin': '1'}) + user = module_target_sat.cli_factory.user() + admin_user = module_target_sat.cli_factory.user({'admin': '1'}) assert admin_user['admin'] == 'yes' # add and remove user and admin user by name - Org.add_user({'name': module_org.name, 'user': user['login']}) - Org.add_user({'name': module_org.name, 'user': admin_user['login']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': user['login']}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': admin_user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] in org_info['users'], "Failed to add user by name" assert admin_user['login'] in org_info['users'], "Failed to add admin user by name" - Org.remove_user({'name': module_org.name, 'user': user['login']}) - Org.remove_user({'name': module_org.name, 'user': admin_user['login']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_user({'name': module_org.name, 'user': user['login']}) + module_target_sat.cli.Org.remove_user({'name': module_org.name, 'user': admin_user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] not in org_info['users'], "Failed to remove user by name" assert admin_user['login'] not in org_info['users'], "Failed to remove admin user by name" # add and remove user and admin user by id - Org.add_user({'id': module_org.id, 'user-id': user['id']}) - Org.add_user({'id': module_org.id, 'user-id': admin_user['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.add_user({'id': module_org.id, 'user-id': user['id']}) + module_target_sat.cli.Org.add_user({'id': module_org.id, 'user-id': admin_user['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert user['login'] in org_info['users'], "Failed to add user by id" assert admin_user['login'] in org_info['users'], "Failed to add admin user by id" - Org.remove_user({'id': module_org.id, 'user-id': user['id']}) - Org.remove_user({'id': module_org.id, 'user-id': admin_user['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': user['id']}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': admin_user['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert user['login'] not in org_info['users'], "Failed to remove user by id" assert admin_user['login'] not in org_info['users'], "Failed to remove admin user by id" @pytest.mark.tier2 -def test_positive_add_and_remove_hostgroups(module_org): +def test_positive_add_and_remove_hostgroups(module_org, module_target_sat): """add and remove a hostgroup from an organization :id: 34e2c7c8-dc20-4709-a5a9-83c0dee9d84d @@ -258,19 +241,25 @@ def test_positive_add_and_remove_hostgroups(module_org): :steps: 1. add and remove hostgroup by name 2. add and remove hostgroup by id - - :CaseLevel: Integration """ - hostgroups = [make_hostgroup() for _ in range(0, 2)] + hostgroups = [module_target_sat.cli_factory.hostgroup() for _ in range(0, 2)] - Org.add_hostgroup({'hostgroup-id': hostgroups[0]['id'], 'id': module_org.id}) - Org.add_hostgroup({'hostgroup': hostgroups[1]['name'], 'name': module_org.name}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_hostgroup( + {'hostgroup-id': hostgroups[0]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.add_hostgroup( + {'hostgroup': hostgroups[1]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert hostgroups[0]['name'] in org_info['hostgroups'], "Failed to add hostgroup by id" assert hostgroups[1]['name'] in org_info['hostgroups'], "Failed to add hostgroup by name" - Org.remove_hostgroup({'hostgroup-id': hostgroups[1]['id'], 'id': module_org.id}) - Org.remove_hostgroup({'hostgroup': hostgroups[0]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_hostgroup( + {'hostgroup-id': hostgroups[1]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.remove_hostgroup( + {'hostgroup': hostgroups[0]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert hostgroups[0]['name'] not in org_info['hostgroups'], "Failed to remove hostgroup by name" assert hostgroups[1]['name'] not in org_info['hostgroups'], "Failed to remove hostgroup by id" @@ -279,7 +268,7 @@ def test_positive_add_and_remove_hostgroups(module_org): @pytest.mark.tier2 @pytest.mark.libvirt_discovery @pytest.mark.upgrade -def test_positive_add_and_remove_compute_resources(module_org): +def test_positive_add_and_remove_compute_resources(module_org, module_target_sat): """Add and remove a compute resource from organization :id: 415c14ab-f879-4ed8-9ba7-8af4ada2e277 @@ -291,11 +280,9 @@ def test_positive_add_and_remove_compute_resources(module_org): :steps: 1. Add and remove compute resource by id 2. Add and remove compute resource by name - - :CaseLevel: Integration """ compute_resources = [ - make_compute_resource( + module_target_sat.cli_factory.compute_resource( { 'provider': FOREMAN_PROVIDERS['libvirt'], 'url': f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system', @@ -303,21 +290,21 @@ def test_positive_add_and_remove_compute_resources(module_org): ) for _ in range(0, 2) ] - Org.add_compute_resource( + module_target_sat.cli.Org.add_compute_resource( {'compute-resource-id': compute_resources[0]['id'], 'id': module_org.id} ) - Org.add_compute_resource( + module_target_sat.cli.Org.add_compute_resource( {'compute-resource': compute_resources[1]['name'], 'name': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['compute-resources']) == 2, "Failed to add compute resources" - Org.remove_compute_resource( + module_target_sat.cli.Org.remove_compute_resource( {'compute-resource-id': compute_resources[0]['id'], 'id': module_org.id} ) - Org.remove_compute_resource( + module_target_sat.cli.Org.remove_compute_resource( {'compute-resource': compute_resources[1]['name'], 'name': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( compute_resources[0]['name'] not in org_info['compute-resources'] ), "Failed to remove compute resource by id" @@ -327,7 +314,7 @@ def test_positive_add_and_remove_compute_resources(module_org): @pytest.mark.tier2 -def test_positive_add_and_remove_media(module_org): +def test_positive_add_and_remove_media(module_org, module_target_sat): """Add and remove medium to organization :id: c2943a81-c8f7-44c4-926b-388055d7c290 @@ -339,18 +326,16 @@ def test_positive_add_and_remove_media(module_org): :steps: 1. add and remove medium by id 2. add and remove medium by name - - :CaseLevel: Integration """ - media = [make_medium() for _ in range(0, 2)] - Org.add_medium({'id': module_org.id, 'medium-id': media[0]['id']}) - Org.add_medium({'name': module_org.name, 'medium': media[1]['name']}) - org_info = Org.info({'id': module_org.id}) + media = [module_target_sat.cli_factory.make_medium() for _ in range(0, 2)] + module_target_sat.cli.Org.add_medium({'id': module_org.id, 'medium-id': media[0]['id']}) + module_target_sat.cli.Org.add_medium({'name': module_org.name, 'medium': media[1]['name']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert media[0]['name'] in org_info['installation-media'], "Failed to add medium by id" assert media[1]['name'] in org_info['installation-media'], "Failed to add medium by name" - Org.remove_medium({'name': module_org.name, 'medium': media[0]['name']}) - Org.remove_medium({'id': module_org.id, 'medium-id': media[1]['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_medium({'name': module_org.name, 'medium': media[0]['name']}) + module_target_sat.cli.Org.remove_medium({'id': module_org.id, 'medium-id': media[1]['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert media[0]['name'] not in org_info['installation-media'], "Failed to remove medium by name" assert media[1]['name'] not in org_info['installation-media'], "Failed to remove medium by id" @@ -358,7 +343,7 @@ def test_positive_add_and_remove_media(module_org): @pytest.mark.tier2 @pytest.mark.skip_if_open("BZ:1845860") @pytest.mark.skip_if_open("BZ:1886876") -def test_positive_add_and_remove_templates(module_org): +def test_positive_add_and_remove_templates(module_org, module_target_sat): """Add and remove provisioning templates to organization :id: bd46a192-488f-4da0-bf47-1f370ae5f55c @@ -370,49 +355,51 @@ def test_positive_add_and_remove_templates(module_org): :steps: 1. Add and remove template by id 2. Add and remove template by name - - :CaseLevel: Integration """ # create and remove templates by name name = list(valid_data_list().values())[0] - template = make_template({'content': gen_string('alpha'), 'name': name}) + template = module_target_sat.cli_factory.make_template( + {'content': gen_string('alpha'), 'name': name} + ) # Add provisioning-template - Org.add_provisioning_template( + module_target_sat.cli.Org.add_provisioning_template( {'name': module_org.name, 'provisioning-template': template['name']} ) - org_info = Org.info({'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert ( f"{template['name']} ({template['type']})" in org_info['templates'] ), "Failed to add template by name" # Remove provisioning-template - Org.remove_provisioning_template( + module_target_sat.cli.Org.remove_provisioning_template( {'provisioning-template': template['name'], 'name': module_org.name} ) - org_info = Org.info({'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert ( f"{template['name']} ({template['type']})" not in org_info['templates'] ), "Failed to remove template by name" # add and remove templates by id # Add provisioning-template - Org.add_provisioning_template({'provisioning-template-id': template['id'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.add_provisioning_template( + {'provisioning-template-id': template['id'], 'id': module_org.id} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( f"{template['name']} ({template['type']})" in org_info['templates'] ), "Failed to add template by name" # Remove provisioning-template - Org.remove_provisioning_template( + module_target_sat.cli.Org.remove_provisioning_template( {'provisioning-template-id': template['id'], 'id': module_org.id} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( f"{template['name']} ({template['type']})" not in org_info['templates'] ), "Failed to remove template by id" @pytest.mark.tier2 -def test_positive_add_and_remove_domains(module_org): +def test_positive_add_and_remove_domains(module_org, module_target_sat): """Add and remove domains to organization :id: 97359ffe-4ce6-4e44-9e3f-583d3fdebbc8 @@ -424,25 +411,23 @@ def test_positive_add_and_remove_domains(module_org): :steps: 1. Add and remove domain by name 2. Add and remove domain by id - - :CaseLevel: Integration """ - domains = [make_domain() for _ in range(0, 2)] - Org.add_domain({'domain-id': domains[0]['id'], 'name': module_org.name}) - Org.add_domain({'domain': domains[1]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) + domains = [module_target_sat.cli_factory.make_domain() for _ in range(0, 2)] + module_target_sat.cli.Org.add_domain({'domain-id': domains[0]['id'], 'name': module_org.name}) + module_target_sat.cli.Org.add_domain({'domain': domains[1]['name'], 'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['domains']) == 2, "Failed to add domains" assert domains[0]['name'] in org_info['domains'] assert domains[1]['name'] in org_info['domains'] - Org.remove_domain({'domain': domains[0]['name'], 'name': module_org.name}) - Org.remove_domain({'domain-id': domains[1]['id'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_domain({'domain': domains[0]['name'], 'name': module_org.name}) + module_target_sat.cli.Org.remove_domain({'domain-id': domains[1]['id'], 'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['domains']) == 0, "Failed to remove domains" @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_lce(module_org): +def test_positive_add_and_remove_lce(module_org, module_target_sat): """Remove a lifecycle environment from organization :id: bfa9198e-6078-4f10-b79a-3d7f51b835fd @@ -452,27 +437,27 @@ def test_positive_add_and_remove_lce(module_org): :steps: 1. create and add lce to org 2. remove lce from org - - :CaseLevel: Integration """ # Create a lifecycle environment. - lc_env_name = make_lifecycle_environment({'organization-id': module_org.id})['name'] + lc_env_name = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + )['name'] lc_env_attrs = {'name': lc_env_name, 'organization-id': module_org.id} # Read back information about the lifecycle environment. Verify the # sanity of that information. - response = LifecycleEnvironment.list(lc_env_attrs) + response = module_target_sat.cli.LifecycleEnvironment.list(lc_env_attrs) assert response[0]['name'] == lc_env_name # Delete it. - LifecycleEnvironment.delete(lc_env_attrs) + module_target_sat.cli.LifecycleEnvironment.delete(lc_env_attrs) # We should get a zero-length response when searching for the LC env. - response = LifecycleEnvironment.list(lc_env_attrs) + response = module_target_sat.cli.LifecycleEnvironment.list(lc_env_attrs) assert len(response) == 0 @pytest.mark.run_in_one_thread @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_capsules(proxy, module_org): +def test_positive_add_and_remove_capsules(proxy, module_org, module_target_sat): """Add and remove a capsule from organization :id: 71af64ec-5cbb-4dd8-ba90-652e302305ec @@ -482,26 +467,30 @@ def test_positive_add_and_remove_capsules(proxy, module_org): :steps: 1. add and remove capsule by ip 2. add and remove capsule by name - - :CaseLevel: Integration """ - Org.add_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] in org_info['smart-proxies'], "Failed to add capsule by id" - Org.remove_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_smart_proxy( + {'id': module_org.id, 'smart-proxy-id': proxy['id']} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert proxy['name'] not in org_info['smart-proxies'], "Failed to remove capsule by id" - Org.add_smart_proxy({'name': module_org.name, 'smart-proxy': proxy['name']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_smart_proxy( + {'name': module_org.name, 'smart-proxy': proxy['name']} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] in org_info['smart-proxies'], "Failed to add capsule by name" - Org.remove_smart_proxy({'name': module_org.name, 'smart-proxy': proxy['name']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_smart_proxy( + {'name': module_org.name, 'smart-proxy': proxy['name']} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] not in org_info['smart-proxies'], "Failed to add capsule by name" @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_locations(module_org): +def test_positive_add_and_remove_locations(module_org, module_target_sat): """Add and remove a locations from organization :id: 37b63e5c-8fd5-439c-9540-972b597b590a @@ -513,25 +502,36 @@ def test_positive_add_and_remove_locations(module_org): :steps: 1. add and remove locations by name 2. add and remove locations by id - - :CaseLevel: Integration """ - locations = [make_location() for _ in range(0, 2)] - Org.add_location({'location-id': locations[0]['id'], 'name': module_org.name}) - Org.add_location({'location': locations[1]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) - assert len(org_info['locations']) == 2, "Failed to add locations" + locations = [module_target_sat.cli_factory.make_location() for _ in range(0, 2)] + module_target_sat.cli.Org.add_location( + {'location-id': locations[0]['id'], 'name': module_org.name} + ) + module_target_sat.cli.Org.add_location( + {'location': locations[1]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert locations[0]['name'] in org_info['locations'] assert locations[1]['name'] in org_info['locations'] - Org.remove_location({'location-id': locations[0]['id'], 'id': module_org.id}) - Org.remove_location({'location': locations[1]['name'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) - assert not org_info.get('locations'), "Failed to remove locations" + module_target_sat.cli.Org.remove_location( + {'location-id': locations[0]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.remove_location( + {'location': locations[1]['name'], 'id': module_org.id} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) + found_locations = ( + org_info.get('locations') + if isinstance(org_info.get('locations'), list) + else [org_info.get('locations')] + ) + assert locations[0]['name'] not in found_locations, "Failed to remove locations" + assert locations[1]['name'] not in found_locations, "Failed to remove locations" @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_add_and_remove_parameter(module_org): +def test_positive_add_and_remove_parameter(module_org, module_target_sat): """Remove a parameter from organization :id: e4099279-4e73-4c14-9e7c-912b3787b99f @@ -543,34 +543,36 @@ def test_positive_add_and_remove_parameter(module_org): param_name = gen_string('alpha') param_new_value = gen_string('alpha') - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 0 # Create parameter - Org.set_parameter( + module_target_sat.cli.Org.set_parameter( {'name': param_name, 'value': gen_string('alpha'), 'organization-id': module_org.id} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 1 # Update - Org.set_parameter( + module_target_sat.cli.Org.set_parameter( {'name': param_name, 'value': param_new_value, 'organization': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 1 assert param_new_value == org_info['parameters'][param_name.lower()] # Delete parameter - Org.delete_parameter({'name': param_name, 'organization': module_org.name}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.delete_parameter( + {'name': param_name, 'organization': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 0 assert param_name.lower() not in org_info['parameters'] @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, module_target_sat): """Try to create an organization with invalid name, but valid label and description @@ -579,10 +581,9 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes :expectedresults: organization is not created - """ with pytest.raises(CLIFactoryError): - make_org( + module_target_sat.cli_factory.make_org( { 'description': gen_string('alpha'), 'label': gen_string('alpha'), @@ -592,7 +593,7 @@ def test_negative_create_with_invalid_name(name): @pytest.mark.tier1 -def test_negative_create_same_name(module_org): +def test_negative_create_same_name(module_org, module_target_sat): """Create a new organization with same name, description, and label. :id: 07924e1f-1eff-4bae-b0db-e41b84966bc1 @@ -602,7 +603,7 @@ def test_negative_create_same_name(module_org): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_org( + module_target_sat.cli_factory.make_org( { 'description': module_org.description, 'label': module_org.label, @@ -612,7 +613,7 @@ def test_negative_create_same_name(module_org): @pytest.mark.tier1 -def test_positive_update(module_org): +def test_positive_update(module_org, module_target_sat): """Update organization name and description :id: 66581003-f5d9-443c-8cd6-00f68087e8e9 @@ -626,19 +627,19 @@ def test_positive_update(module_org): new_desc = list(valid_data_list().values())[0] # upgrade name - Org.update({'id': module_org.id, 'new-name': new_name}) - org = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.update({'id': module_org.id, 'new-name': new_name}) + org = module_target_sat.cli.Org.info({'id': module_org.id}) assert org['name'] == new_name # upgrade description - Org.update({'description': new_desc, 'id': org['id']}) - org = Org.info({'id': org['id']}) + module_target_sat.cli.Org.update({'description': new_desc, 'id': org['id']}) + org = module_target_sat.cli.Org.info({'id': org['id']}) assert org['description'] == new_desc @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) -def test_negative_update_name(new_name, module_org): +def test_negative_update_name(new_name, module_org, module_target_sat): """Fail to update organization name for invalid values. :id: 582d41b8-370d-45ed-9b7b-8096608e1324 @@ -646,14 +647,13 @@ def test_negative_update_name(new_name, module_org): :parametrized: yes :expectedresults: organization name is not updated - """ with pytest.raises(CLIReturnCodeError): - Org.update({'id': module_org.id, 'new-name': new_name}) + module_target_sat.cli.Org.update({'id': module_org.id, 'new-name': new_name}) @pytest.mark.tier2 -def test_positive_create_user_with_timezone(module_org): +def test_positive_create_user_with_timezone(module_org, module_target_sat): """Create and remove user with valid timezone in an organization :id: b9b92c00-ee99-4da2-84c5-0a576a862100 @@ -662,8 +662,6 @@ def test_positive_create_user_with_timezone(module_org): :BZ: 1733269 - :CaseLevel: Integration - :CaseImportance: Medium :steps: @@ -672,7 +670,6 @@ def test_positive_create_user_with_timezone(module_org): 3. Remove user from organization and validate :expectedresults: User created and removed successfully with valid timezone - """ users_timezones = [ 'Pacific Time (US & Canada)', @@ -682,11 +679,11 @@ def test_positive_create_user_with_timezone(module_org): 'Samoa', ] for timezone in users_timezones: - user = make_user({'timezone': timezone, 'admin': '1'}) - Org.add_user({'name': module_org.name, 'user': user['login']}) - org_info = Org.info({'name': module_org.name}) + user = module_target_sat.cli_factory.user({'timezone': timezone, 'admin': '1'}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] in org_info['users'] assert user['timezone'] == timezone - Org.remove_user({'id': module_org.id, 'user-id': user['id']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': user['id']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] not in org_info['users'] diff --git a/tests/foreman/cli/test_oscap.py b/tests/foreman/cli/test_oscap.py index 25307107b25..c198e5fa636 100644 --- a/tests/foreman/cli/test_oscap.py +++ b/tests/foreman/cli/test_oscap.py @@ -2,66 +2,47 @@ :Requirement: Oscap -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High :CaseAutomation: Automated -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_scap_policy -from robottelo.cli.factory import make_scapcontent -from robottelo.cli.factory import make_tailoringfile -from robottelo.cli.host import Host -from robottelo.cli.scap_policy import Scappolicy -from robottelo.cli.scapcontent import Scapcontent from robottelo.config import settings -from robottelo.constants import OSCAP_DEFAULT_CONTENT -from robottelo.constants import OSCAP_PERIOD -from robottelo.constants import OSCAP_PROFILE -from robottelo.constants import OSCAP_WEEKDAY -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.constants import OSCAP_DEFAULT_CONTENT, OSCAP_PERIOD, OSCAP_WEEKDAY +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) class TestOpenScap: """Tests related to the oscap cli hammer plugin""" @classmethod - def fetch_scap_and_profile_id(cls, scap_name, scap_profile): + def fetch_scap_and_profile_id(cls, scap_name, sat): """Extracts the scap ID and scap profile id :param scap_name: Scap title - :param scap_profile: Scap profile you want to select :returns: scap_id and scap_profile_id """ - default_content = Scapcontent.info({'title': scap_name}, output_format='json') + default_content = sat.cli.Scapcontent.info({'title': scap_name}, output_format='json') scap_id = default_content['id'] - scap_profile_ids = [ - profile['id'] - for profile in default_content['scap-content-profiles'] - if scap_profile in profile['title'] - ] + scap_profile_ids = default_content['scap-content-profiles'][0]['id'] return scap_id, scap_profile_ids @pytest.mark.tier1 - def test_positive_list_default_content_with_admin(self): + def test_positive_list_default_content_with_admin(self, module_target_sat): """List the default scap content with admin account :id: 32c41c22-6aef-424e-8e69-a65c00f1c811 @@ -85,13 +66,13 @@ def test_positive_list_default_content_with_admin(self): :CaseImportance: Medium """ - scap_contents = [content['title'] for content in Scapcontent.list()] + scap_contents = [content['title'] for content in module_target_sat.cli.Scapcontent.list()] for title in OSCAP_DEFAULT_CONTENT.values(): assert title in scap_contents @pytest.mark.tier1 def test_negative_list_default_content_with_viewer_role( - self, scap_content, default_viewer_role + self, scap_content, default_viewer_role, module_target_sat ): """List the default scap content by user with viewer role @@ -114,17 +95,17 @@ def test_negative_list_default_content_with_viewer_role( :CaseImportance: Medium """ - result = Scapcontent.with_user( + result = module_target_sat.cli.Scapcontent.with_user( default_viewer_role.login, default_viewer_role.password ).list() assert len(result) == 0 with pytest.raises(CLIReturnCodeError): - Scapcontent.with_user(default_viewer_role.login, default_viewer_role.password).info( - {'title': scap_content['title']} - ) + module_target_sat.cli.Scapcontent.with_user( + default_viewer_role.login, default_viewer_role.password + ).info({'title': scap_content['title']}) @pytest.mark.tier1 - def test_positive_view_scap_content_info_admin(self): + def test_positive_view_scap_content_info_admin(self, module_target_sat): """View info of scap content with admin account :id: 539ea982-0701-43f5-bb91-e566e6687e35 @@ -146,12 +127,14 @@ def test_positive_view_scap_content_info_admin(self): :CaseImportance: Medium """ title = gen_string('alpha') - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) - result = Scapcontent.info({'title': title}) + module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) + result = module_target_sat.cli.Scapcontent.info({'title': title}) assert result['title'] == title @pytest.mark.tier1 - def test_negative_info_scap_content(self): + def test_negative_info_scap_content(self, module_target_sat): """View info of scap content with invalid ID as parameter :id: 86f44fb1-2e2b-4004-83c1-4a62162ebea9 @@ -174,11 +157,11 @@ def test_negative_info_scap_content(self): """ invalid_scap_id = gen_string('alpha') with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'id': invalid_scap_id}) + module_target_sat.cli.Scapcontent.info({'id': invalid_scap_id}) @pytest.mark.parametrize('title', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_scap_content_with_valid_title(self, title): + def test_positive_create_scap_content_with_valid_title(self, title, module_target_sat): """Create scap-content with valid title :id: 68e9fbe2-e3c3-48e7-a774-f1260a3b7f4f @@ -203,11 +186,13 @@ def test_positive_create_scap_content_with_valid_title(self, title): :CaseImportance: Medium """ - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title @pytest.mark.tier1 - def test_negative_create_scap_content_with_same_title(self): + def test_negative_create_scap_content_with_same_title(self, module_target_sat): """Create scap-content with same title :id: a8cbacc9-456a-4f6f-bd0e-4d1167a8b401 @@ -235,14 +220,18 @@ def test_negative_create_scap_content_with_same_title(self): :CaseImportance: Medium """ title = gen_string('alpha') - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('title', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_with_invalid_title(self, title): + def test_negative_create_scap_content_with_invalid_title(self, title, module_target_sat): """Create scap-content with invalid title :id: 90a2590e-a6ff-41f1-9e0a-67d4b16435c0 @@ -266,11 +255,15 @@ def test_negative_create_scap_content_with_invalid_title(self, title): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_scap_content_with_valid_originalfile_name(self, name): + def test_positive_create_scap_content_with_valid_originalfile_name( + self, name, module_target_sat + ): """Create scap-content with valid original file name :id: 25441174-11cb-4d9b-9ec5-b1c69411b5bc @@ -293,14 +286,17 @@ def test_positive_create_scap_content_with_valid_originalfile_name(self, name): :CaseImportance: Medium """ - scap_content = make_scapcontent( - {'original-filename': name, 'scap-file': settings.oscap.content_path} + title = gen_string('alpha') + scap_content = module_target_sat.cli_factory.scapcontent( + {'original-filename': name, 'scap-file': settings.oscap.content_path, 'title': title} ) assert scap_content['original-filename'] == name @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_with_invalid_originalfile_name(self, name): + def test_negative_create_scap_content_with_invalid_originalfile_name( + self, name, module_target_sat + ): """Create scap-content with invalid original file name :id: 83feb67a-a6bf-4a99-923d-889e8d1013fa @@ -326,11 +322,13 @@ def test_negative_create_scap_content_with_invalid_originalfile_name(self, name) :BZ: 1482395 """ with pytest.raises(CLIFactoryError): - make_scapcontent({'original-filename': name, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.scapcontent( + {'original-filename': name, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('title', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_without_dsfile(self, title): + def test_negative_create_scap_content_without_dsfile(self, title, module_target_sat): """Create scap-content without scap data stream xml file :id: ea811994-12cd-4382-9382-37fa806cc26f @@ -353,10 +351,10 @@ def test_negative_create_scap_content_without_dsfile(self, title): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title}) + module_target_sat.cli_factory.scapcontent({'title': title}) @pytest.mark.tier1 - def test_positive_update_scap_content_with_newtitle(self): + def test_positive_update_scap_content_with_newtitle(self, module_target_sat): """Update scap content title :id: 2c32e94a-237d-40b9-8a3b-fca2ef26fe79 @@ -380,14 +378,16 @@ def test_positive_update_scap_content_with_newtitle(self): """ title = gen_string('alpha') new_title = gen_string('alpha') - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title - Scapcontent.update({'title': title, 'new-title': new_title}) - result = Scapcontent.info({'title': new_title}, output_format='json') + module_target_sat.cli.Scapcontent.update({'title': title, 'new-title': new_title}) + result = module_target_sat.cli.Scapcontent.info({'title': new_title}, output_format='json') assert result['title'] == new_title @pytest.mark.tier1 - def test_positive_delete_scap_content_with_id(self): + def test_positive_delete_scap_content_with_id(self, module_target_sat): """Delete a scap content with id as parameter :id: 11ae7652-65e0-4751-b1e0-246b27919238 @@ -407,13 +407,16 @@ def test_positive_delete_scap_content_with_id(self): :CaseImportance: Medium """ - scap_content = make_scapcontent({'scap-file': settings.oscap.content_path}) - Scapcontent.delete({'id': scap_content['id']}) + title = gen_string('alpha') + scap_content = module_target_sat.cli_factory.scapcontent( + {'scap-file': settings.oscap.content_path, 'title': title} + ) + module_target_sat.cli.Scapcontent.delete({'id': scap_content['id']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'id': scap_content['id']}) + module_target_sat.cli.Scapcontent.info({'id': scap_content['id']}) @pytest.mark.tier1 - def test_positive_delete_scap_content_with_title(self): + def test_positive_delete_scap_content_with_title(self, module_target_sat): """Delete a scap content with title as parameter :id: aa4ca830-3250-4517-b40c-0256cdda5e0a @@ -435,14 +438,19 @@ def test_positive_delete_scap_content_with_title(self): :CaseImportance: Medium """ - scap_content = make_scapcontent({'scap-file': settings.oscap.content_path}) - Scapcontent.delete({'title': scap_content['title']}) + title = gen_string('alpha') + scap_content = module_target_sat.cli_factory.scapcontent( + {'scap-file': settings.oscap.content_path, 'title': title} + ) + module_target_sat.cli.Scapcontent.delete({'title': scap_content['title']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'title': scap_content['title']}) + module_target_sat.cli.Scapcontent.info({'title': scap_content['title']}) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier2 - def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): + def test_postive_create_scap_policy_with_valid_name( + self, name, scap_content, module_target_sat + ): """Create scap policy with valid name :id: c9327675-62b2-4e22-933a-02818ef68c11 @@ -464,7 +472,7 @@ def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): :CaseImportance: Medium """ - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -475,10 +483,16 @@ def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): } ) assert scap_policy['name'] == name + # Deleting policy which created for all valid input (ex- latin1, cjk, utf-8, etc.) + module_target_sat.cli.Scappolicy.delete({'name': scap_policy['name']}) + with pytest.raises(CLIReturnCodeError): + module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']}) @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier2 - def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content): + def test_negative_create_scap_policy_with_invalid_name( + self, name, scap_content, module_target_sat + ): """Create scap policy with invalid name :id: 0d163968-7759-4cfd-9c4d-98533d8db925 @@ -501,7 +515,7 @@ def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content) :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scap_policy( + module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -513,7 +527,7 @@ def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content) ) @pytest.mark.tier2 - def test_negative_create_scap_policy_without_content(self, scap_content): + def test_negative_create_scap_policy_without_content(self, scap_content, module_target_sat): """Create scap policy without scap content :id: 88a8fba3-f45a-4e22-9ee1-f0d701f1135f @@ -534,7 +548,7 @@ def test_negative_create_scap_policy_without_content(self, scap_content): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scap_policy( + module_target_sat.cli_factory.make_scap_policy( { 'deploy-by': 'ansible', 'scap-content-profile-id': scap_content["scap_profile_id"], @@ -544,7 +558,7 @@ def test_negative_create_scap_policy_without_content(self, scap_content): ) @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): + def test_positive_associate_scap_policy_with_hostgroups(self, scap_content, module_target_sat): """Associate hostgroups to scap policy :id: 916403a0-572d-4cf3-9155-3e3d0373577f @@ -566,9 +580,9 @@ def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): :CaseImportance: Medium """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -582,7 +596,9 @@ def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): assert scap_policy['hostgroups'][0] == hostgroup['name'] @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_content): + def test_positive_associate_scap_policy_with_hostgroup_via_ansible( + self, scap_content, module_target_sat + ): """Associate hostgroup to scap policy via ansible :id: 2df303c6-bff5-4977-a865-a3afabfb8726 @@ -604,9 +620,9 @@ def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_co :expectedresults: The policy is created via ansible deploy option and associated successfully. """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -624,7 +640,7 @@ def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_co @pytest.mark.upgrade @pytest.mark.tier2 def test_positive_associate_scap_policy_with_tailoringfiles( - self, deploy, scap_content, tailoring_file_path + self, deploy, scap_content, tailoring_file_path, module_target_sat ): """Associate tailoring file by name/id to scap policy with all deployments @@ -641,12 +657,16 @@ def test_positive_associate_scap_policy_with_tailoringfiles( :expectedresults: The policy is created and associated successfully. """ - tailoring_file_a = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) + tailoring_file_a = module_target_sat.cli_factory.tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) tailoring_file_profile_a_id = tailoring_file_a['tailoring-file-profiles'][0]['id'] - tailoring_file_b = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) + tailoring_file_b = module_target_sat.cli_factory.tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) tailoring_file_profile_b_id = tailoring_file_b['tailoring-file-profiles'][0]['id'] - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_content["scap_id"], 'deploy-by': deploy, @@ -661,22 +681,22 @@ def test_positive_associate_scap_policy_with_tailoringfiles( assert scap_policy['tailoring-file-id'] == tailoring_file_a['id'] assert scap_policy['tailoring-file-profile-id'] == tailoring_file_profile_a_id - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'name': scap_policy['name'], 'tailoring-file': tailoring_file_b['name'], 'tailoring-file-profile-id': tailoring_file_profile_b_id, } ) - scap_info = Scappolicy.info({'name': scap_policy['name']}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']}) assert scap_info['tailoring-file-id'] == tailoring_file_b['id'] assert scap_info['tailoring-file-profile-id'] == tailoring_file_profile_b_id - Scappolicy.delete({'name': scap_policy['name']}) + module_target_sat.cli.Scappolicy.delete({'name': scap_policy['name']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'name': scap_policy['name']}) + module_target_sat.cli.Scapcontent.info({'name': scap_policy['name']}) - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_content["scap_id"], 'deploy-by': deploy, @@ -691,26 +711,26 @@ def test_positive_associate_scap_policy_with_tailoringfiles( assert scap_policy['tailoring-file-id'] == tailoring_file_a['id'] assert scap_policy['tailoring-file-profile-id'] == tailoring_file_profile_a_id - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'tailoring-file-id': tailoring_file_b['id'], 'tailoring-file-profile-id': tailoring_file_profile_b_id, } ) - scap_info = Scappolicy.info({'id': scap_policy['id']}) + scap_info = module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']}) assert scap_info['tailoring-file-id'] == tailoring_file_b['id'] assert scap_info['tailoring-file-profile-id'] == tailoring_file_profile_b_id - Scappolicy.delete({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.delete({'id': scap_policy['id']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'name': scap_policy['name']}) + module_target_sat.cli.Scapcontent.info({'name': scap_policy['name']}) @pytest.mark.parametrize('deploy', **parametrized(['manual', 'ansible'])) @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.e2e - def test_positive_scap_policy_end_to_end(self, deploy, scap_content): + def test_positive_scap_policy_end_to_end(self, deploy, scap_content, module_target_sat): """List all scap policies and read info using id, name :id: d14ab43e-c7a9-4eee-b61c-420b07ca1da9 @@ -735,9 +755,9 @@ def test_positive_scap_policy_end_to_end(self, deploy, scap_content): :CaseImportance: Critical """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': deploy, @@ -748,28 +768,31 @@ def test_positive_scap_policy_end_to_end(self, deploy, scap_content): 'hostgroups': hostgroup['name'], } ) - result = Scappolicy.list() + result = module_target_sat.cli.Scappolicy.list() assert name in [policy['name'] for policy in result] - assert Scappolicy.info({'id': scap_policy['id']})['id'] == scap_policy['id'] - assert Scappolicy.info({'name': scap_policy['name']})['name'] == name + assert ( + module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']})['id'] + == scap_policy['id'] + ) + assert module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']})['name'] == name - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'period': OSCAP_PERIOD['monthly'].lower(), 'day-of-month': 15, } ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['period'] == OSCAP_PERIOD['monthly'].lower() assert scap_info['day-of-month'] == '15' - Scappolicy.delete({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.delete({'id': scap_policy['id']}) with pytest.raises(CLIReturnCodeError): - Scappolicy.info({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']}) @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_update_scap_policy_with_hostgroup(self, scap_content): + def test_positive_update_scap_policy_with_hostgroup(self, scap_content, module_target_sat): """Update scap policy by addition of hostgroup :id: 21b9b82b-7c6c-4944-bc2f-67631e1d4086 @@ -790,9 +813,9 @@ def test_positive_update_scap_policy_with_hostgroup(self, scap_content): :CaseImportance: Medium """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -805,17 +828,17 @@ def test_positive_update_scap_policy_with_hostgroup(self, scap_content): ) assert scap_policy['hostgroups'][0] == hostgroup['name'] assert scap_policy['deployment-option'] == 'ansible' - new_hostgroup = make_hostgroup() - Scappolicy.update( + new_hostgroup = module_target_sat.cli_factory.hostgroup() + module_target_sat.cli.Scappolicy.update( {'id': scap_policy['id'], 'deploy-by': 'ansible', 'hostgroups': new_hostgroup['name']} ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['hostgroups'][0] == new_hostgroup['name'] # Assert if the deployment is updated assert scap_info['deployment-option'] == 'ansible' @pytest.mark.tier2 - def test_positive_update_scap_policy_period(self, scap_content): + def test_positive_update_scap_policy_period(self, scap_content, module_target_sat): """Update scap policy by updating the period strategy from monthly to weekly @@ -838,7 +861,7 @@ def test_positive_update_scap_policy_period(self, scap_content): :CaseImportance: Medium """ name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -849,20 +872,20 @@ def test_positive_update_scap_policy_period(self, scap_content): } ) assert scap_policy['period'] == OSCAP_PERIOD['weekly'].lower() - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'period': OSCAP_PERIOD['monthly'].lower(), 'day-of-month': 15, } ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['period'] == OSCAP_PERIOD['monthly'].lower() assert scap_info['day-of-month'] == '15' @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_update_scap_policy_with_content(self, scap_content): + def test_positive_update_scap_policy_with_content(self, scap_content, module_target_sat): """Update the scap policy by updating the scap content associated with the policy @@ -885,7 +908,7 @@ def test_positive_update_scap_policy_with_content(self, scap_content): :CaseImportance: Medium """ name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -897,17 +920,19 @@ def test_positive_update_scap_policy_with_content(self, scap_content): ) assert scap_policy['scap-content-id'] == scap_content["scap_id"] scap_id, scap_profile_id = self.fetch_scap_and_profile_id( - OSCAP_DEFAULT_CONTENT['rhel_firefox'], OSCAP_PROFILE['firefox'] + OSCAP_DEFAULT_CONTENT['rhel_firefox'], module_target_sat ) - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( {'name': name, 'scap-content-id': scap_id, 'scap-content-profile-id': scap_profile_id} ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['scap-content-id'] == scap_id - assert scap_info['scap-content-profile-id'] == scap_profile_id[0] + assert scap_info['scap-content-profile-id'] == scap_profile_id @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_single_server(self, scap_content): + def test_positive_associate_scap_policy_with_single_server( + self, scap_content, module_target_sat + ): """Assign an audit policy to a single server :id: 30566c27-f466-4b4d-beaf-0a5bfda98b89 @@ -931,7 +956,7 @@ def test_positive_associate_scap_policy_with_single_server(self, scap_content): host = entities.Host() host.create() name = gen_string('alpha') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -942,8 +967,10 @@ def test_positive_associate_scap_policy_with_single_server(self, scap_content): } ) host_name = host.name + "." + host.domain.name - Scappolicy.update({'id': scap_policy['id'], 'hosts': host_name}) - hosts = Host.list({'search': 'compliance_policy_id = {}'.format(scap_policy['id'])}) + module_target_sat.cli.Scappolicy.update({'id': scap_policy['id'], 'hosts': host_name}) + hosts = module_target_sat.cli.Host.list( + {'search': 'compliance_policy_id = {}'.format(scap_policy['id'])} + ) assert host_name in [host['name'] for host in hosts] @pytest.mark.stubbed diff --git a/tests/foreman/cli/test_oscap_tailoringfiles.py b/tests/foreman/cli/test_oscap_tailoringfiles.py index 97c9675f053..e95cfb2891a 100644 --- a/tests/foreman/cli/test_oscap_tailoringfiles.py +++ b/tests/foreman/cli/test_oscap_tailoringfiles.py @@ -2,32 +2,25 @@ :Requirement: tailoringfiles -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High :CaseAutomation: Automated -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_tailoringfile -from robottelo.cli.scap_tailoring_files import TailoringFiles -from robottelo.constants import DataFile -from robottelo.constants import SNIPPET_DATA_FILE -from robottelo.utils.datafactory import invalid_names_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.constants import SNIPPET_DATA_FILE, DataFile +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_names_list, + parametrized, + valid_data_list, +) class TestTailoringFiles: @@ -35,7 +28,7 @@ class TestTailoringFiles: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create(self, tailoring_file_path, name): + def test_positive_create(self, tailoring_file_path, name, module_target_sat): """Create new Tailoring Files using different values types as name :id: e1bb4de2-1b64-4904-bc7c-f0befa9dbd6f @@ -48,13 +41,17 @@ def test_positive_create(self, tailoring_file_path, name): :parametrized: yes """ - tailoring_file = make_tailoringfile( + tailoring_file = module_target_sat.cli_factory.tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name + # Delete tailoring files which created for all valid input (ex- latin1, cjk, utf-8, etc.) + module_target_sat.cli.TailoringFiles.delete({'id': tailoring_file['id']}) + with pytest.raises(CLIReturnCodeError): + module_target_sat.cli.TailoringFiles.info({'id': tailoring_file['id']}) @pytest.mark.tier1 - def test_positive_create_with_space(self, tailoring_file_path): + def test_positive_create_with_space(self, tailoring_file_path, module_target_sat): """Create tailoring files with space in name :id: c98ef4e7-41c5-4a8b-8a0b-8d53100b75a8 @@ -68,13 +65,13 @@ def test_positive_create_with_space(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('alphanumeric') + ' ' + gen_string('alphanumeric') - tailoring_file = make_tailoringfile( + tailoring_file = module_target_sat.cli_factory.tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name @pytest.mark.tier1 - def test_positive_get_info_of_tailoring_file(self, tailoring_file_path): + def test_positive_get_info_of_tailoring_file(self, tailoring_file_path, module_target_sat): """Get information of tailoring file :id: bc201194-e8c8-4385-a577-09f3455f5a4d @@ -92,12 +89,14 @@ def test_positive_get_info_of_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('alphanumeric') - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) - result = TailoringFiles.info({'name': name}) + module_target_sat.cli_factory.tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) + result = module_target_sat.cli.TailoringFiles.info({'name': name}) assert result['name'] == name @pytest.mark.tier1 - def test_positive_list_tailoring_file(self, tailoring_file_path): + def test_positive_list_tailoring_file(self, tailoring_file_path, module_target_sat): """List all created tailoring files :id: 2ea63c4b-eebe-468d-8153-807e86d1b6a2 @@ -114,8 +113,10 @@ def test_positive_list_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('utf8', length=5) - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) - result = TailoringFiles.list() + module_target_sat.cli_factory.tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) + result = module_target_sat.cli.TailoringFiles.list() assert name in [tailoringfile['name'] for tailoringfile in result] @pytest.mark.tier1 @@ -135,11 +136,13 @@ def test_negative_create_with_invalid_file(self, target_sat): target_sat.put(DataFile.SNIPPET_DATA_FILE, f'/tmp/{SNIPPET_DATA_FILE}') name = gen_string('alphanumeric') with pytest.raises(CLIFactoryError): - make_tailoringfile({'name': name, 'scap-file': f'/tmp/{SNIPPET_DATA_FILE}'}) + target_sat.cli_factory.tailoringfile( + {'name': name, 'scap-file': f'/tmp/{SNIPPET_DATA_FILE}'} + ) @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, tailoring_file_path, name): + def test_negative_create_with_invalid_name(self, tailoring_file_path, name, module_target_sat): """Create Tailoring files with invalid name :id: 973eee82-9735-49bb-b534-0de619aa0279 @@ -155,7 +158,9 @@ def test_negative_create_with_invalid_name(self, tailoring_file_path, name): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) + module_target_sat.cli_factory.tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) @pytest.mark.stubbed @pytest.mark.tier2 @@ -198,11 +203,13 @@ def test_positive_download_tailoring_file(self, tailoring_file_path, target_sat) """ name = gen_string('alphanumeric') file_path = f'/var{tailoring_file_path["satellite"]}' - tailoring_file = make_tailoringfile( + tailoring_file = target_sat.cli_factory.tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name - result = TailoringFiles.download_tailoring_file({'name': name, 'path': '/var/tmp/'}) + result = target_sat.cli.TailoringFiles.download_tailoring_file( + {'name': name, 'path': '/var/tmp/'} + ) assert file_path in result result = target_sat.execute(f'find {file_path} 2> /dev/null') assert result.status == 0 @@ -210,7 +217,7 @@ def test_positive_download_tailoring_file(self, tailoring_file_path, target_sat) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_tailoring_file(self, tailoring_file_path): + def test_positive_delete_tailoring_file(self, tailoring_file_path, module_target_sat): """Delete tailoring file :id: 8bab5478-1ef1-484f-aafd-98e5cba7b1e7 @@ -224,10 +231,12 @@ def test_positive_delete_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ - tailoring_file = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) - TailoringFiles.delete({'id': tailoring_file['id']}) + tailoring_file = module_target_sat.cli_factory.tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) + module_target_sat.cli.TailoringFiles.delete({'id': tailoring_file['id']}) with pytest.raises(CLIReturnCodeError): - TailoringFiles.info({'id': tailoring_file['id']}) + module_target_sat.cli.TailoringFiles.info({'id': tailoring_file['id']}) @pytest.mark.stubbed @pytest.mark.tier4 diff --git a/tests/foreman/cli/test_ostreebranch.py b/tests/foreman/cli/test_ostreebranch.py index 9f44e365e3a..d768b3f2d70 100644 --- a/tests/foreman/cli/test_ostreebranch.py +++ b/tests/foreman/cli/test_ostreebranch.py @@ -4,30 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest from nailgun import entities +import pytest -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_org_with_credentials -from robottelo.cli.factory import make_product_with_credentials -from robottelo.cli.factory import make_repository_with_credentials -from robottelo.cli.ostreebranch import OstreeBranch -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.constants.repos import OSTREE_REPO @@ -47,15 +35,17 @@ def ostree_user_credentials(): @pytest.fixture(scope='module') -def ostree_repo_with_user(ostree_user_credentials): +def ostree_repo_with_user(ostree_user_credentials, module_target_sat): """Create an user, organization, product and ostree repo, sync ostree repo for particular user, create content view and publish it for particular user """ - org = make_org_with_credentials(credentials=ostree_user_credentials) - product = make_product_with_credentials({'organization-id': org['id']}, ostree_user_credentials) + org = module_target_sat.cli_factory.org_with_credentials(credentials=ostree_user_credentials) + product = module_target_sat.cli_factory.product_with_credentials( + {'organization-id': org['id']}, ostree_user_credentials + ) # Create new custom ostree repo - ostree_repo = make_repository_with_credentials( + ostree_repo = module_target_sat.cli_factory.repository_with_credentials( { 'product-id': product['id'], 'content-type': 'ostree', @@ -64,97 +54,105 @@ def ostree_repo_with_user(ostree_user_credentials): }, ostree_user_credentials, ) - Repository.with_user(*ostree_user_credentials).synchronize({'id': ostree_repo['id']}) - cv = make_content_view( + module_target_sat.cli.Repository.with_user(*ostree_user_credentials).synchronize( + {'id': ostree_repo['id']} + ) + cv = module_target_sat.cli_factory.make_content_view( {'organization-id': org['id'], 'repository-ids': [ostree_repo['id']]}, ostree_user_credentials, ) - ContentView.with_user(*ostree_user_credentials).publish({'id': cv['id']}) - cv = ContentView.with_user(*ostree_user_credentials).info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(*ostree_user_credentials).publish({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.with_user(*ostree_user_credentials).info( + {'id': cv['id']} + ) return {'cv': cv, 'org': org, 'ostree_repo': ostree_repo, 'product': product} @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list(ostree_user_credentials, ostree_repo_with_user): +def test_positive_list(ostree_user_credentials, ostree_repo_with_user, module_target_sat): """List Ostree Branches :id: 0f5e7e63-c0e3-43fc-8238-caf19a478a46 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list() + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list() assert len(result) > 0 @pytest.mark.upgrade -def test_positive_list_by_repo_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_repo_id( + ostree_repo_with_user, ostree_user_credentials, module_target_sat +): """List Ostree branches by repo id :id: 8cf1a973-031c-4c02-af14-0faba22ab60b :expectedresults: Ostree Branch List is displayed - """ - branch = OstreeBranch.with_user(*ostree_user_credentials) + branch = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials) result = branch.list({'repository-id': ostree_repo_with_user['ostree_repo']['id']}) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_product_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_product_id( + ostree_repo_with_user, ostree_user_credentials, module_target_sat +): """List Ostree branches by product id :id: e7b9d04d-cace-4271-b166-214017200c53 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'product-id': ostree_repo_with_user['product']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_org_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_org_id(ostree_repo_with_user, ostree_user_credentials, module_target_sat): """List Ostree branches by org id :id: 5b169619-305f-4934-b363-068193330701 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'organization-id': ostree_repo_with_user['org']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_cv_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_cv_id(ostree_repo_with_user, ostree_user_credentials, module_target_sat): """List Ostree branches by cv id :id: 3654f107-44ee-4af2-a9e4-f9fd8c68491e :expectedresults: Ostree Branch List is displayed - """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'content-view-id': ostree_repo_with_user['cv']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_info_by_id(ostree_user_credentials, ostree_repo_with_user): +def test_positive_info_by_id(ostree_user_credentials, ostree_repo_with_user, module_target_sat): """Get info for Ostree branch by id :id: 7838c9a8-56da-44de-883c-28571ecfa75c :expectedresults: Ostree Branch Info is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list() + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list() assert len(result) > 0 # Grab a random branch branch = random.choice(result) - result = OstreeBranch.with_user(*ostree_user_credentials).info({'id': branch['id']}) + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).info( + {'id': branch['id']} + ) assert branch['id'] == result['id'] diff --git a/tests/foreman/cli/test_partitiontable.py b/tests/foreman/cli/test_partitiontable.py index 95854b1f0f1..35e52cbbe65 100644 --- a/tests/foreman/cli/test_partitiontable.py +++ b/tests/foreman/cli/test_partitiontable.py @@ -4,29 +4,20 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import randint -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_os -from robottelo.cli.factory import make_partition_table -from robottelo.cli.partitiontable import PartitionTable -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import parametrized +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import generate_strings_list, parametrized class TestPartitionTable: @@ -34,7 +25,7 @@ class TestPartitionTable: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(generate_strings_list(length=1))) - def test_positive_create_with_one_character_name(self, name): + def test_positive_create_with_one_character_name(self, name, target_sat): """Create Partition table with 1 character in name :id: cfec857c-ed6e-4472-93bb-70e1d4f39bae @@ -47,23 +38,24 @@ def test_positive_create_with_one_character_name(self, name): :CaseImportance: Medium """ - ptable = make_partition_table({'name': name}) + ptable = target_sat.cli_factory.make_partition_table({'name': name}) assert ptable['name'] == name @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized( list( zip( generate_strings_list(length=randint(4, 30)), generate_strings_list(length=randint(4, 30)), + strict=True, ) ) ) ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Create, read, update and delete Partition Tables with different names :id: ce512fef-fbf2-4365-b70b-d30221111d96 @@ -74,17 +66,17 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: Critical """ - ptable = make_partition_table({'name': name}) + ptable = module_target_sat.cli_factory.make_partition_table({'name': name}) assert ptable['name'] == name - PartitionTable.update({'id': ptable['id'], 'new-name': new_name}) - ptable = PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.update({'id': ptable['id'], 'new-name': new_name}) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert ptable['name'] == new_name - PartitionTable.delete({'name': ptable['name']}) + module_target_sat.cli.PartitionTable.delete({'name': ptable['name']}) with pytest.raises(CLIReturnCodeError): - PartitionTable.info({'name': ptable['name']}) + module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) @pytest.mark.tier1 - def test_positive_create_with_content(self): + def test_positive_create_with_content(self, module_target_sat): """Create a Partition Table with content :id: 28bfbd8b-2ada-44d0-89f3-63885cfb3495 @@ -94,13 +86,13 @@ def test_positive_create_with_content(self): :CaseImportance: Critical """ content = 'Fake ptable' - ptable = make_partition_table({'content': content}) - ptable_content = PartitionTable().dump({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) + ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) assert content in ptable_content @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_with_content_length(self): + def test_positive_create_with_content_length(self, module_target_sat): """Create a Partition Table with content length more than 4096 chars :id: 59e6f9ef-85c2-4229-8831-00edb41b19f4 @@ -110,12 +102,12 @@ def test_positive_create_with_content_length(self): :BZ: 1270181 """ content = gen_string('alpha', 5000) - ptable = make_partition_table({'content': content}) - ptable_content = PartitionTable().dump({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) + ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) assert content in ptable_content @pytest.mark.tier1 - def test_positive_delete_by_id(self): + def test_positive_delete_by_id(self, module_target_sat): """Create a Partition Table then delete it by its ID :id: 4d2369eb-4dc1-4ab5-96d4-c872c39f4ff5 @@ -124,13 +116,13 @@ def test_positive_delete_by_id(self): :CaseImportance: Critical """ - ptable = make_partition_table() - PartitionTable.delete({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table() + module_target_sat.cli.PartitionTable.delete({'id': ptable['id']}) with pytest.raises(CLIReturnCodeError): - PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) @pytest.mark.tier2 - def test_positive_add_remove_os_by_id(self): + def test_positive_add_remove_os_by_id(self, module_target_sat): """Create a partition table then add and remove an operating system to it using IDs for association @@ -138,20 +130,23 @@ def test_positive_add_remove_os_by_id(self): :expectedresults: Operating system is added to partition table - :CaseLevel: Integration """ - ptable = make_partition_table() - os = make_os() - PartitionTable.add_operating_system({'id': ptable['id'], 'operatingsystem-id': os['id']}) - ptable = PartitionTable.info({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.PartitionTable.add_operating_system( + {'id': ptable['id'], 'operatingsystem-id': os['id']} + ) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert os['title'] in ptable['operating-systems'] - PartitionTable.remove_operating_system({'id': ptable['id'], 'operatingsystem-id': os['id']}) - ptable = PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.remove_operating_system( + {'id': ptable['id'], 'operatingsystem-id': os['id']} + ) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert os['title'] not in ptable['operating-systems'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_add_remove_os_by_name(self): + def test_positive_add_remove_os_by_name(self, module_target_sat): """Create a partition table then add and remove an operating system to it using names for association @@ -159,17 +154,16 @@ def test_positive_add_remove_os_by_name(self): :expectedresults: Operating system is added to partition table - :CaseLevel: Integration """ - ptable = make_partition_table() - os = make_os() - PartitionTable.add_operating_system( + ptable = module_target_sat.cli_factory.make_partition_table() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.PartitionTable.add_operating_system( {'name': ptable['name'], 'operatingsystem': os['title']} ) - ptable = PartitionTable.info({'name': ptable['name']}) + ptable = module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) assert os['title'] in ptable['operating-systems'] - PartitionTable.remove_operating_system( + module_target_sat.cli.PartitionTable.remove_operating_system( {'name': ptable['name'], 'operatingsystem': os['title']} ) - ptable = PartitionTable.info({'name': ptable['name']}) + ptable = module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) assert os['title'] not in ptable['operating-systems'] diff --git a/tests/foreman/cli/test_ping.py b/tests/foreman/cli/test_ping.py index 1993ad01049..0a288143233 100644 --- a/tests/foreman/cli/test_ping.py +++ b/tests/foreman/cli/test_ping.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hammer :Team: Endeavour -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest pytestmark = [pytest.mark.tier1, pytest.mark.upgrade] -@pytest.mark.build_sanity @pytest.mark.parametrize('switch_user', [False, True], ids=['root', 'non-root']) def test_positive_ping(target_sat, switch_user): """hammer ping return code @@ -53,7 +47,7 @@ def test_positive_ping(target_sat, switch_user): # iterate over the lines grouping every 3 lines # example [1, 2, 3, 4, 5, 6] will return [(1, 2, 3), (4, 5, 6)] # only the status line is relevant for this test - for _, status, _ in zip(*[iter(result.stdout)] * 3): + for _, status, _ in zip(*[iter(result.stdout)] * 3, strict=False): # should this be strict? status_count += 1 if status.split(':')[1].strip().lower() == 'ok': diff --git a/tests/foreman/cli/test_product.py b/tests/foreman/cli/test_product.py index be4eedbdf50..3d413209db3 100644 --- a/tests/foreman/cli/test_product.py +++ b/tests/foreman/cli/test_product.py @@ -4,41 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_alphanumeric, gen_integer, gen_string, gen_url import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_url - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_content_credential -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import make_sync_plan -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository + from robottelo.config import settings from robottelo.constants import FAKE_0_YUM_REPO_PACKAGES_COUNT -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_labels_list +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, + valid_labels_list, +) @pytest.mark.tier1 @@ -56,11 +40,11 @@ def test_positive_CRUD(module_org, target_sat): :CaseImportance: Critical """ desc = list(valid_data_list().values())[0] - gpg_key = make_content_credential({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) name = list(valid_data_list().values())[0] label = valid_labels_list()[0] - sync_plan = make_sync_plan({'organization-id': module_org.id}) - product = make_product( + sync_plan = target_sat.cli_factory.sync_plan({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product( { 'description': desc, 'gpg-key-id': gpg_key['id'], @@ -79,10 +63,10 @@ def test_positive_CRUD(module_org, target_sat): # update desc = list(valid_data_list().values())[0] - new_gpg_key = make_content_credential({'organization-id': module_org.id}) - new_sync_plan = make_sync_plan({'organization-id': module_org.id}) + new_gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + new_sync_plan = target_sat.cli_factory.sync_plan({'organization-id': module_org.id}) new_prod_name = gen_string('alpha', 8) - Product.update( + target_sat.cli.Product.update( { 'description': desc, 'id': product['id'], @@ -91,7 +75,7 @@ def test_positive_CRUD(module_org, target_sat): 'name': new_prod_name, } ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['name'] == new_prod_name assert product['description'] == desc assert product['gpg']['gpg-key-id'] == new_gpg_key['id'] @@ -100,36 +84,36 @@ def test_positive_CRUD(module_org, target_sat): assert product['sync-plan-id'] != sync_plan['id'] # synchronize - repo = make_repository( + repo = target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'url': settings.repos.yum_0.url, }, ) - Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) - packages = Package.list({'product-id': product['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) + packages = target_sat.cli.Package.list({'product-id': product['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['packages']) == len(packages) assert len(packages) == FAKE_0_YUM_REPO_PACKAGES_COUNT # delete - Product.remove_sync_plan({'id': product['id']}) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Product.remove_sync_plan({'id': product['id']}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert len(product['sync-plan-id']) == 0 - Product.delete({'id': product['id']}) + target_sat.cli.Product.delete({'id': product['id']}) target_sat.wait_for_tasks( search_query="label = Actions::Katello::Product::Destroy" f" and resource_id = {product['id']}", max_tries=10, ) with pytest.raises(CLIReturnCodeError): - Product.info({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name, module_org): +def test_negative_create_with_name(name, module_org, module_target_sat): """Check that only valid names can be used :id: 2da26ab2-8d79-47ea-b4d2-defcd98a0649 @@ -141,14 +125,14 @@ def test_negative_create_with_name(name, module_org): :CaseImportance: High """ with pytest.raises(CLIFactoryError): - make_product({'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.make_product({'name': name, 'organization-id': module_org.id}) @pytest.mark.tier2 @pytest.mark.parametrize( 'label', **parametrized([gen_string(e, 15) for e in ('latin1', 'utf8', 'html')]) ) -def test_negative_create_with_label(label, module_org): +def test_negative_create_with_label(label, module_org, module_target_sat): """Check that only valid labels can be used :id: 7cf970aa-48dc-425b-ae37-1e15dfab0626 @@ -160,7 +144,7 @@ def test_negative_create_with_label(label, module_org): :CaseImportance: High """ with pytest.raises(CLIFactoryError): - make_product( + module_target_sat.cli_factory.make_product( { 'label': label, 'name': gen_alphanumeric(), @@ -183,18 +167,19 @@ def test_product_list_with_default_settings(module_org, target_sat): :customerscenario: true :expectedresults: product/reporsitory list should work as expected. - """ org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') - non_default_org = make_org() - default_product = make_product({'name': default_product_name, 'organization-id': org_id}) - non_default_product = make_product( + non_default_org = target_sat.cli_factory.make_org() + default_product = target_sat.cli_factory.make_product( + {'name': default_product_name, 'organization-id': org_id} + ) + non_default_product = target_sat.cli_factory.make_product( {'name': non_default_product_name, 'organization-id': non_default_org['id']} ) for product in default_product, non_default_product: - make_repository( + target_sat.cli_factory.make_repository( { 'organization-id': org_id, 'product-id': product['id'], @@ -202,7 +187,7 @@ def test_product_list_with_default_settings(module_org, target_sat): }, ) - Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) + target_sat.cli.Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = target_sat.cli.Defaults.list(per_page=False) assert any([res['value'] == org_id for res in result if res['parameter'] == 'organization_id']) @@ -214,20 +199,20 @@ def test_product_list_with_default_settings(module_org, target_sat): assert any([res['product'] == default_product_name for res in result]) # verify that defaults setting should not affect other entities - product_list = Product.list({'organization-id': non_default_org['id']}) + product_list = target_sat.cli.Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] - repository_list = Repository.list({'organization-id': non_default_org['id']}) + repository_list = target_sat.cli.Repository.list({'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: - Defaults.delete({'param-name': 'organization_id'}) + target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) result = target_sat.cli.Defaults.list(per_page=False) assert not [res for res in result if res['parameter'] == 'organization_id'] @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1999541') -def test_positive_product_sync_state(module_org): +def test_positive_product_sync_state(module_org, module_target_sat): """hammer product info shows correct sync state. :id: 58af6239-85d7-4b8b-bd2d-ab4cd4f29840 @@ -236,7 +221,7 @@ def test_positive_product_sync_state(module_org): :customerscenario: true - :Steps: + :steps: 1. Sync a custom repository that fails. 2. Run `hammer product info --product-id `. 3. Successfully sync another repository under the same product. @@ -245,8 +230,8 @@ def test_positive_product_sync_state(module_org): :expectedresults: hammer should show 'Sync Incomplete' in both cases. """ - product = make_product({'organization-id': module_org.id}) - repo_a1 = make_repository( + product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo_a1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -256,13 +241,15 @@ def test_positive_product_sync_state(module_org): ) with pytest.raises(CLIReturnCodeError): - Repository.synchronize({'id': repo_a1['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_a1['id']}) - product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) - product_list = Product.list({'organization-id': module_org.id}) + product_info = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) + product_list = module_target_sat.cli.Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list] - repo_a2 = make_repository( + repo_a2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -271,7 +258,9 @@ def test_positive_product_sync_state(module_org): }, ) - Repository.synchronize({'id': repo_a2['id']}) - product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) - product_list = Product.list({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': repo_a2['id']}) + product_info = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) + product_list = module_target_sat.cli.Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list] diff --git a/tests/foreman/cli/test_provisioning.py b/tests/foreman/cli/test_provisioning.py index 8e67bdcbb48..1c7799edc53 100644 --- a/tests/foreman/cli/test_provisioning.py +++ b/tests/foreman/cli/test_provisioning.py @@ -4,17 +4,12 @@ :CaseAutomation: NotAutomated -:CaseLevel: System - :CaseComponent: Provisioning :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest diff --git a/tests/foreman/cli/test_provisioningtemplate.py b/tests/foreman/cli/test_provisioningtemplate.py index e379ecfaa9c..59aabc2d2c4 100644 --- a/tests/foreman/cli/test_provisioningtemplate.py +++ b/tests/foreman/cli/test_provisioningtemplate.py @@ -4,39 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ProvisioningTemplates :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random from random import randint -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_template -from robottelo.cli.template import Template -from robottelo.cli.user import User +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') -def module_os_with_minor(): - return entities.OperatingSystem(minor=randint(0, 10)).create() +def module_os_with_minor(module_target_sat): + return module_target_sat.api.OperatingSystem(minor=randint(0, 10)).create() @pytest.mark.e2e -def test_positive_end_to_end_crud(module_org, module_location, module_os, target_sat): +@pytest.mark.upgrade +def test_positive_end_to_end_crud( + module_org, module_location, module_os_with_minor, module_target_sat +): """Create a new provisioning template with several attributes, list, update them, clone the provisioning template and then delete it @@ -59,14 +53,14 @@ def test_positive_end_to_end_crud(module_org, module_location, module_os, target cloned_template_name = gen_string('alpha') template_type = random.choice(constants.TEMPLATE_TYPES) # create - template = target_sat.cli_factory.make_template( + template = module_target_sat.cli_factory.make_template( { 'name': name, 'audit-comment': gen_string('alpha'), 'description': gen_string('alpha'), 'locked': 'no', 'type': template_type, - 'operatingsystem-ids': module_os.id, + 'operatingsystem-ids': module_os_with_minor.id, 'organization-ids': module_org.id, 'location-ids': module_location.id, } @@ -74,22 +68,22 @@ def test_positive_end_to_end_crud(module_org, module_location, module_os, target assert template['name'] == name assert template['locked'] == 'no' assert template['type'] == template_type - assert module_os.title in template['operating-systems'] + assert module_os_with_minor.title in template['operating-systems'] assert module_org.name in template['organizations'] assert module_location.name in template['locations'] # list - template_list = target_sat.cli.Template.list({'search': f'name={name}'}) + template_list = module_target_sat.cli.Template.list({'search': f'name={name}'}) assert template_list[0]['name'] == name assert template_list[0]['type'] == template_type # update - updated_pt = target_sat.cli.Template.update({'id': template['id'], 'name': new_name}) - template = target_sat.cli.Template.info({'id': updated_pt[0]['id']}) + updated_pt = module_target_sat.cli.Template.update({'id': template['id'], 'name': new_name}) + template = module_target_sat.cli.Template.info({'id': updated_pt[0]['id']}) assert new_name == template['name'], "The Provisioning template wasn't properly renamed" # clone - template_clone = target_sat.cli.Template.clone( + template_clone = module_target_sat.cli.Template.clone( {'id': template['id'], 'new-name': cloned_template_name} ) - new_template = target_sat.cli.Template.info({'id': template_clone[0]['id']}) + new_template = module_target_sat.cli.Template.info({'id': template_clone[0]['id']}) assert new_template['name'] == cloned_template_name assert new_template['locked'] == template['locked'] assert new_template['type'] == template['type'] @@ -97,47 +91,16 @@ def test_positive_end_to_end_crud(module_org, module_location, module_os, target assert new_template['organizations'] == template['organizations'] assert new_template['locations'] == template['locations'] # delete - target_sat.cli.Template.delete({'id': template['id']}) + module_target_sat.cli.Template.delete({'id': template['id']}) with pytest.raises(CLIReturnCodeError): - target_sat.cli.Template.info({'id': template['id']}) - - -@pytest.mark.tier1 -def test_positive_create_with_name(): - """Check if Template can be created - - :id: 77deaae8-447b-47cc-8af3-8b17476c905f - - :expectedresults: Template is created - - :CaseImportance: Critical - """ - name = gen_string('alpha') - template = make_template({'name': name}) - assert template['name'] == name + module_target_sat.cli.Template.info({'id': template['id']}) @pytest.mark.tier1 -def test_positive_update_name(): - """Check if Template can be updated - - :id: 99bdab7b-1279-4349-a655-4294395ecbe1 - - :expectedresults: Template is updated - - :CaseImportance: Critical - """ - template = make_template() - updated_name = gen_string('alpha') - Template.update({'id': template['id'], 'name': updated_name}) - template = Template.info({'id': template['id']}) - assert updated_name == template['name'] - - -@pytest.mark.tier1 -def test_positive_update_with_manager_role(module_location, module_org): +@pytest.mark.parametrize('role_name', ['Manager', 'Organization admin']) +def test_positive_update_with_role(module_target_sat, module_location, module_org, role_name): """Create template providing the initial name, then update its name - with manager user role. + with manager/organization admin user roles. :id: 28c4357a-93cb-4b01-a445-5db50435bcc0 @@ -147,47 +110,35 @@ def test_positive_update_with_manager_role(module_location, module_org): :CaseImportance: Medium :BZ: 1277308 + + :parametrized: yes """ new_name = gen_string('alpha') username = gen_string('alpha') password = gen_string('alpha') - template = make_template( + template = module_target_sat.cli_factory.make_template( {'organization-ids': module_org.id, 'location-ids': module_location.id} ) - # Create user with Manager role - user = entities.User( + # Create user with Manager/Organization admin role + user = module_target_sat.api.User( login=username, password=password, admin=False, organization=[module_org.id], location=[module_location.id], ).create() - User.add_role({'id': user.id, 'role': "Manager"}) + module_target_sat.cli.User.add_role({'id': user.id, 'role': role_name}) # Update template name with that user - Template.with_user(username=username, password=password).update( + module_target_sat.cli.Template.with_user(username=username, password=password).update( {'id': template['id'], 'name': new_name} ) - template = Template.info({'id': template['id']}) + template = module_target_sat.cli.Template.info({'id': template['id']}) assert new_name == template['name'] @pytest.mark.tier1 -def test_positive_create_with_loc(module_location): - """Check if Template with Location can be created - - :id: 263aba0e-4f54-4227-af97-f4bc8f5c0788 - - :expectedresults: Template is created and new Location has been - assigned - - :CaseImportance: Medium - """ - new_template = make_template({'location-ids': module_location.id}) - assert module_location.name in new_template['locations'] - - -@pytest.mark.tier1 -def test_positive_create_locked(): +@pytest.mark.upgrade +def test_positive_create_locked(module_target_sat): """Check that locked Template can be created :id: ff10e369-85c6-45f3-9cda-7e1c17a6632d @@ -197,80 +148,39 @@ def test_positive_create_locked(): :CaseImportance: Medium """ - new_template = make_template({'locked': 'true', 'name': gen_string('alpha')}) + new_template = module_target_sat.cli_factory.make_template( + {'locked': 'true', 'name': gen_string('alpha')} + ) assert new_template['locked'] == 'yes' -@pytest.mark.tier2 -def test_positive_create_with_org(module_org): - """Check if Template with Organization can be created - - :id: 5de5ca76-1a39-46ac-8dd4-5d41b4b49076 - - :expectedresults: Template is created and new Organization has been - assigned - - :CaseImportance: Medium - """ - new_template = make_template({'name': gen_string('alpha'), 'organization-ids': module_org.id}) - assert module_org.name in new_template['organizations'] - - @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_os_by_id(module_os_with_minor): - """Check if operating system can be added to a template +def test_positive_add_remove_os_by_id(module_target_sat, module_os_with_minor): + """Check if operating system can be added and removed to a template :id: d9f481b3-9757-4208-b451-baf4792d4d70 - :expectedresults: Operating system is added to the template - - :CaseLevel: Integration + :expectedresults: Operating system is added/removed from the template """ - new_template = make_template() - Template.add_operatingsystem( - {'id': new_template['id'], 'operatingsystem-id': module_os_with_minor.id} - ) - new_template = Template.info({'id': new_template['id']}) - os_string = ( - f'{module_os_with_minor.name} {module_os_with_minor.major}.{module_os_with_minor.minor}' + os = module_os_with_minor + os_string = f'{os.name} {os.major}.{os.minor}' + new_template = module_target_sat.cli_factory.make_template() + module_target_sat.cli.Template.add_operatingsystem( + {'id': new_template['id'], 'operatingsystem-id': os.id} ) + new_template = module_target_sat.cli.Template.info({'id': new_template['id']}) assert os_string in new_template['operating-systems'] - - -@pytest.mark.tier2 -def test_positive_remove_os_by_id(module_os_with_minor): - """Check if operating system can be removed from a template - - :id: b5362565-6dce-4770-81e1-4fe3ec6f6cee - - :expectedresults: Operating system is removed from template - - :CaseLevel: Integration - - :CaseImportance: Medium - - :BZ: 1395229 - """ - template = make_template() - Template.add_operatingsystem( - {'id': template['id'], 'operatingsystem-id': module_os_with_minor.id} - ) - template = Template.info({'id': template['id']}) - os_string = ( - f'{module_os_with_minor.name} {module_os_with_minor.major}.{module_os_with_minor.minor}' + module_target_sat.cli.Template.remove_operatingsystem( + {'id': new_template['id'], 'operatingsystem-id': os.id} ) - assert os_string in template['operating-systems'] - Template.remove_operatingsystem( - {'id': template['id'], 'operatingsystem-id': module_os_with_minor.id} - ) - template = Template.info({'id': template['id']}) - assert os_string not in template['operating-systems'] + new_template = module_target_sat.cli.Template.info({'id': new_template['id']}) + assert os_string not in new_template['operating-systems'] @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_create_with_content(): +def test_positive_create_with_content(module_target_sat): """Check if Template can be created with specific content :id: 0fcfc46d-5e97-4451-936a-e8684acac275 @@ -281,42 +191,25 @@ def test_positive_create_with_content(): """ content = gen_string('alpha') name = gen_string('alpha') - template = make_template({'content': content, 'name': name}) + template = module_target_sat.cli_factory.make_template({'content': content, 'name': name}) assert template['name'] == name - template_content = Template.dump({'id': template['id']}) + template_content = module_target_sat.cli.Template.dump({'id': template['id']}) assert content in template_content -@pytest.mark.tier1 -@pytest.mark.upgrade -def test_positive_delete_by_id(): - """Check if Template can be deleted - - :id: 8e5245ee-13dd-44d4-8111-d4382cacf005 - - :expectedresults: Template is deleted - - :CaseImportance: Critical - """ - template = make_template() - Template.delete({'id': template['id']}) - with pytest.raises(CLIReturnCodeError): - Template.info({'id': template['id']}) - - @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_clone(): +def test_positive_clone(module_target_sat): """Assure ability to clone a provisioning template :id: 27d69c1e-0d83-4b99-8a3c-4f1bdec3d261 :expectedresults: The template is cloned successfully - - :CaseLevel: Integration """ cloned_template_name = gen_string('alpha') - template = make_template() - result = Template.clone({'id': template['id'], 'new-name': cloned_template_name}) - new_template = Template.info({'id': result[0]['id']}) + template = module_target_sat.cli_factory.make_template() + result = module_target_sat.cli.Template.clone( + {'id': template['id'], 'new-name': cloned_template_name} + ) + new_template = module_target_sat.cli.Template.info({'id': result[0]['id']}) assert new_template['name'] == cloned_template_name diff --git a/tests/foreman/cli/test_puppetclass.py b/tests/foreman/cli/test_puppetclass.py index 9d849ff0ff1..3a8b50856bb 100644 --- a/tests/foreman/cli/test_puppetclass.py +++ b/tests/foreman/cli/test_puppetclass.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/foreman/cli/test_realm.py b/tests/foreman/cli/test_realm.py index 1d7eb9c4b20..c05dc1291f8 100644 --- a/tests/foreman/cli/test_realm.py +++ b/tests/foreman/cli/test_realm.py @@ -4,31 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_realm -from robottelo.cli.realm import Realm +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError @pytest.mark.tier1 -def test_negative_create_name_only(): +def test_negative_create_name_only(module_target_sat): """Create a realm with just a name parameter :id: 5606279f-0707-4d36-a307-b204ebb981ad @@ -36,11 +28,11 @@ def test_negative_create_name_only(): :expectedresults: Realm creation fails, requires proxy_id and type """ with pytest.raises(CLIFactoryError): - make_realm({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli_factory.realm({'name': gen_string('alpha', random.randint(1, 30))}) @pytest.mark.tier1 -def test_negative_create_invalid_id(): +def test_negative_create_invalid_id(module_target_sat): """Create a realm with an invalid proxy ID :id: 916bd1fb-4649-469c-b511-b0b07301a990 @@ -48,7 +40,7 @@ def test_negative_create_invalid_id(): :expectedresults: Realm creation fails, proxy_id must be numeric """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': gen_string('alphanumeric'), @@ -58,7 +50,7 @@ def test_negative_create_invalid_id(): @pytest.mark.tier1 -def test_negative_create_invalid_realm_type(): +def test_negative_create_invalid_realm_type(module_target_sat): """Create a realm with an invalid type :id: 423a0969-9311-48d2-9220-040a42159a89 @@ -67,7 +59,7 @@ def test_negative_create_invalid_realm_type(): e.g. Red Hat Identity Management or Active Directory """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -77,7 +69,7 @@ def test_negative_create_invalid_realm_type(): @pytest.mark.tier1 -def test_negative_create_invalid_location(): +def test_negative_create_invalid_location(module_target_sat): """Create a realm with an invalid location :id: 95335c3a-413f-4156-b727-91b525738171 @@ -85,7 +77,7 @@ def test_negative_create_invalid_location(): :expectedresults: Realm creation fails, location not found """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -96,7 +88,7 @@ def test_negative_create_invalid_location(): @pytest.mark.tier1 -def test_negative_create_invalid_organization(): +def test_negative_create_invalid_organization(module_target_sat): """Create a realm with an invalid organization :id: c0ffbc6d-a2da-484b-9627-5454687a3abb @@ -104,7 +96,7 @@ def test_negative_create_invalid_organization(): :expectedresults: Realm creation fails, organization not found """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -115,7 +107,7 @@ def test_negative_create_invalid_organization(): @pytest.mark.tier2 -def test_negative_delete_nonexistent_realm_name(): +def test_negative_delete_nonexistent_realm_name(module_target_sat): """Delete a realm with a name that does not exist :id: 616db509-9643-4817-ba6b-f05cdb1cecb0 @@ -123,11 +115,11 @@ def test_negative_delete_nonexistent_realm_name(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.delete({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli.Realm.delete({'name': gen_string('alpha', random.randint(1, 30))}) @pytest.mark.tier2 -def test_negative_delete_nonexistent_realm_id(): +def test_negative_delete_nonexistent_realm_id(module_target_sat): """Delete a realm with an ID that does not exist :id: 70bb9d4e-7e71-479a-8c82-e6fcff88ea14 @@ -135,11 +127,11 @@ def test_negative_delete_nonexistent_realm_id(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.delete({'id': 0}) + module_target_sat.cli.Realm.delete({'id': 0}) @pytest.mark.tier2 -def test_negative_info_nonexistent_realm_name(): +def test_negative_info_nonexistent_realm_name(module_target_sat): """Get info for a realm with a name that does not exist :id: 24e4fbfa-7141-4f90-8c5d-eb88b162bd64 @@ -147,11 +139,11 @@ def test_negative_info_nonexistent_realm_name(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.info({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli.Realm.info({'name': gen_string('alpha', random.randint(1, 30))}) @pytest.mark.tier2 -def test_negative_info_nonexistent_realm_id(): +def test_negative_info_nonexistent_realm_id(module_target_sat): """Get info for a realm with an ID that does not exist :id: db8382eb-6d0b-4d6a-a9bf-38a462389f7b @@ -159,4 +151,4 @@ def test_negative_info_nonexistent_realm_id(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.info({'id': 0}) + module_target_sat.cli.Realm.info({'id': 0}) diff --git a/tests/foreman/cli/test_registration.py b/tests/foreman/cli/test_registration.py index dfd26184d35..89d560ab862 100644 --- a/tests/foreman/cli/test_registration.py +++ b/tests/foreman/cli/test_registration.py @@ -2,8 +2,6 @@ :Requirement: Registration -:CaseLevel: Acceptance - :CaseComponent: Registration :CaseAutomation: Automated @@ -12,14 +10,12 @@ :Team: Rocket -:TestType: Functional - -:Upstream: No """ import pytest from robottelo.config import settings from robottelo.constants import CLIENT_PORT +from robottelo.exceptions import CLIReturnCodeError pytestmark = pytest.mark.tier1 @@ -27,9 +23,9 @@ @pytest.mark.e2e @pytest.mark.no_containers def test_host_registration_end_to_end( - module_org, + module_entitlement_manifest_org, module_location, - module_ak_with_synced_repo, + module_activation_key, module_target_sat, module_capsule_configured, rhel_contenthost, @@ -47,8 +43,9 @@ def test_host_registration_end_to_end( :customerscenario: true """ + org = module_entitlement_manifest_org result = rhel_contenthost.register( - module_org, module_location, module_ak_with_synced_repo.name, module_target_sat + org, module_location, [module_activation_key.name], module_target_sat ) rc = 1 if rhel_contenthost.os_version.major == 6 else 0 @@ -56,20 +53,20 @@ def test_host_registration_end_to_end( # Verify server.hostname and server.port from subscription-manager config assert module_target_sat.hostname == rhel_contenthost.subscription_config['server']['hostname'] - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] + assert rhel_contenthost.subscription_config['server']['port'] == CLIENT_PORT # Update module_capsule_configured to include module_org/module_location module_target_sat.cli.Capsule.update( { 'name': module_capsule_configured.hostname, - 'organization-ids': module_org.id, + 'organization-ids': org.id, 'location-ids': module_location.id, } ) result = rhel_contenthost.register( - module_org, + org, module_location, - module_ak_with_synced_repo.name, + [module_activation_key.name], module_capsule_configured, force=True, ) @@ -81,7 +78,7 @@ def test_host_registration_end_to_end( module_capsule_configured.hostname == rhel_contenthost.subscription_config['server']['hostname'] ) - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] + assert rhel_contenthost.subscription_config['server']['port'] == CLIENT_PORT def test_upgrade_katello_ca_consumer_rpm( @@ -118,7 +115,7 @@ def test_upgrade_katello_ca_consumer_rpm( f'rpm -Uvh "http://{target_sat.hostname}/pub/{consumer_cert_name}-1.0-1.noarch.rpm"' ) # Check server URL is not Red Hat CDN's "subscription.rhsm.redhat.com" - assert 'subscription.rhsm.redhat.com' != vm.subscription_config['server']['hostname'] + assert vm.subscription_config['server']['hostname'] != 'subscription.rhsm.redhat.com' assert target_sat.hostname == vm.subscription_config['server']['hostname'] # Get consumer cert source file @@ -139,7 +136,7 @@ def test_upgrade_katello_ca_consumer_rpm( # Install new rpmbuild/RPMS/noarch/katello-ca-consumer-*-2.noarch.rpm assert vm.execute(f'yum install -y rpmbuild/RPMS/noarch/{new_consumer_cert_rpm}') # Check server URL is not Red Hat CDN's "subscription.rhsm.redhat.com" - assert 'subscription.rhsm.redhat.com' != vm.subscription_config['server']['hostname'] + assert vm.subscription_config['server']['hostname'] != 'subscription.rhsm.redhat.com' assert target_sat.hostname == vm.subscription_config['server']['hostname'] # Register as final check @@ -147,3 +144,40 @@ def test_upgrade_katello_ca_consumer_rpm( result = vm.execute('subscription-manager identity') # Result will be 0 if registered assert result.status == 0 + + +@pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.tier3 +def test_negative_register_twice(module_ak_with_cv, module_org, rhel_contenthost, target_sat): + """Attempt to register a host twice to Satellite + + :id: 0af81129-cd69-4fa7-a128-9e8fcf2d03b1 + + :expectedresults: host cannot be registered twice + + :parametrized: yes + """ + rhel_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) + assert rhel_contenthost.subscribed + result = rhel_contenthost.register( + module_org, None, module_ak_with_cv.name, target_sat, force=False + ) + # host being already registered. + assert result.status == 1 + assert 'This system is already registered' in str(result.stderr) + + +@pytest.mark.tier1 +def test_negative_global_registration_without_ak(module_target_sat): + """Attempt to register a host without ActivationKey + + :id: e48a6260-97e0-4234-a69c-77bbbcde85df + + :expectedresults: Generate command is disabled without ActivationKey + """ + with pytest.raises(CLIReturnCodeError) as context: + module_target_sat.cli.HostRegistration.generate_command(options=None) + assert ( + 'Failed to generate registration command:\n Missing activation key!' + in context.value.message + ) diff --git a/tests/foreman/cli/test_remoteexecution.py b/tests/foreman/cli/test_remoteexecution.py index e00ff969a35..6c2e2bab0a5 100644 --- a/tests/foreman/cli/test_remoteexecution.py +++ b/tests/foreman/cli/test_remoteexecution.py @@ -4,115 +4,69 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta +from calendar import monthrange +from datetime import datetime, timedelta from time import sleep -import pytest -from broker import Broker +from dateutil.relativedelta import FR, relativedelta from fauxfactory import gen_string -from nailgun import entities +import pytest from wait_for import wait_for -from robottelo import constants -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_job_invocation -from robottelo.cli.factory import make_job_invocation_with_credentials -from robottelo.cli.factory import make_job_template -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.filter import Filter -from robottelo.cli.globalparam import GlobalParameter from robottelo.cli.host import Host -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.recurring_logic import RecurringLogic -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.task import Task -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.hosts import ContentHost -from robottelo.logging import logger from robottelo.utils import ohsnap -@pytest.fixture() -def fixture_sca_vmsetup(request, module_sca_manifest_org, target_sat): - """Create VM and register content host to Simple Content Access organization""" - if '_count' in request.param.keys(): - with Broker( - nick=request.param['nick'], - host_class=ContentHost, - _count=request.param['_count'], - ) as clients: - for client in clients: - client.configure_rex(satellite=target_sat, org=module_sca_manifest_org) - yield clients - else: - with Broker(nick=request.param['nick'], host_class=ContentHost) as client: - client.configure_rex(satellite=target_sat, org=module_sca_manifest_org) - yield client - - -@pytest.fixture() -def fixture_enable_receptor_repos(request, target_sat): - """Enable RHSCL repo required by receptor installer""" - target_sat.enable_repo(constants.REPOS['rhscl7']['id']) - target_sat.enable_repo(constants.REPOS['rhae2']['id']) - target_sat.enable_repo(constants.REPOS['rhs7']['id']) - - -@pytest.fixture() +@pytest.fixture def infra_host(request, target_sat, module_capsule_configured): infra_hosts = {'target_sat': target_sat, 'module_capsule_configured': module_capsule_configured} - yield infra_hosts[request.param] + return infra_hosts[request.param] -def assert_job_invocation_result(invocation_command_id, client_hostname, expected_result='success'): +def assert_job_invocation_result( + sat, invocation_command_id, client_hostname, expected_result='success' +): """Asserts the job invocation finished with the expected result and fetches job output when error occurs. Result is one of: success, pending, error, warning""" - result = JobInvocation.info({'id': invocation_command_id}) + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) try: assert result[expected_result] == '1' - except AssertionError: + except AssertionError as err: raise AssertionError( 'host output: {}'.format( ' '.join( - JobInvocation.get_output({'id': invocation_command_id, 'host': client_hostname}) + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) ) ) - ) + ) from err -def assert_job_invocation_status(invocation_command_id, client_hostname, status): +def assert_job_invocation_status(sat, invocation_command_id, client_hostname, status): """Asserts the job invocation status and fetches job output when error occurs. Status is one of: queued, stopped, running, paused""" - result = JobInvocation.info({'id': invocation_command_id}) + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) try: assert result['status'] == status - except AssertionError: + except AssertionError as err: raise AssertionError( 'host output: {}'.format( ' '.join( - JobInvocation.get_output({'id': invocation_command_id, 'host': client_hostname}) + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) ) ) - ) + ) from err class TestRemoteExecution: @@ -122,7 +76,9 @@ class TestRemoteExecution: @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.rhel_ver_list([8]) - def test_positive_run_default_job_template_by_ip(self, rex_contenthost): + def test_positive_run_default_job_template_by_ip( + self, module_org, rex_contenthost, module_target_sat + ): """Run default template on host connected by ip and list task :id: 811c7747-bec6-4a2d-8e5c-b5045d3fbc0d @@ -130,7 +86,7 @@ def test_positive_run_default_job_template_by_ip(self, rex_contenthost): :expectedresults: Verify the job was successfully ran against the host and task can be listed by name and ID - :BZ: 1647582 + :BZ: 1647582, 1896628 :customerscenario: true @@ -138,23 +94,32 @@ def test_positive_run_default_job_template_by_ip(self, rex_contenthost): """ client = rex_contenthost command = f'echo {gen_string("alpha")}' - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', 'search-query': f"name ~ {client.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - task = Task.list_tasks({'search': command})[0] - search = Task.list_tasks({'search': f'id={task["id"]}'}) + assert_job_invocation_result(module_target_sat, invocation_command['id'], client.hostname) + task = module_target_sat.cli.Task.list_tasks({'search': command})[0] + search = module_target_sat.cli.Task.list_tasks({'search': f'id={task["id"]}'}) assert search[0]['action'] == task['action'] + out = module_target_sat.cli.JobInvocation.get_output( + { + 'id': invocation_command['id'], + 'host': client.hostname, + 'organization-id': module_org.id, + } + ) + assert 'Exit' in out + assert 'Internal Server Error' not in out @pytest.mark.tier3 @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.rhel_ver_list([7, 8, 9]) - def test_positive_run_job_effective_user_by_ip(self, rex_contenthost): + def test_positive_run_job_effective_user_by_ip(self, rex_contenthost, module_target_sat): """Run default job template as effective user on a host by ip :id: 0cd75cab-f699-47e6-94d3-4477d2a94bb7 @@ -170,16 +135,16 @@ def test_positive_run_job_effective_user_by_ip(self, rex_contenthost): # create a user on client via remote job username = gen_string('alpha') filename = gen_string('alpha') - make_user_job = make_job_invocation( + make_user_job = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=useradd -m {username}", 'search-query': f"name ~ {client.hostname}", } ) - assert_job_invocation_result(make_user_job['id'], client.hostname) + assert_job_invocation_result(module_target_sat, make_user_job['id'], client.hostname) # create a file as new user - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=touch /home/{username}/{filename}", @@ -187,7 +152,7 @@ def test_positive_run_job_effective_user_by_ip(self, rex_contenthost): 'effective-user': f'{username}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(module_target_sat, invocation_command['id'], client.hostname) # check the file owner result = client.execute( f'''stat -c '%U' /home/{username}/{filename}''', @@ -218,20 +183,19 @@ def test_positive_run_custom_job_template_by_ip(self, rex_contenthost, module_or template_file = 'template_file.txt' target_sat.execute(f'echo "echo Enforcing" > {template_file}') template_name = gen_string('alpha', 7) - make_job_template( + target_sat.cli_factory.job_template( {'organizations': self.org.name, 'name': template_name, 'file': template_file} ) - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.job_invocation( {'job-template': template_name, 'search-query': f'name ~ {client.hostname}'} ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) @pytest.mark.tier3 @pytest.mark.upgrade - @pytest.mark.no_containers @pytest.mark.rhel_ver_list([8]) def test_positive_run_default_job_template_multiple_hosts_by_ip( - self, registered_hosts, module_org + self, rex_contenthosts, module_target_sat ): """Run default job template against multiple hosts by ip @@ -241,8 +205,8 @@ def test_positive_run_default_job_template_multiple_hosts_by_ip( :parametrized: yes """ - clients = registered_hosts - invocation_command = make_job_invocation( + clients = rex_contenthosts + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -256,21 +220,24 @@ def test_positive_run_default_job_template_multiple_hosts_by_ip( 'host output from {}: {}'.format( vm.hostname, ' '.join( - JobInvocation.get_output( + module_target_sat.cli.JobInvocation.get_output( {'id': invocation_command['id'], 'host': vm.hostname} ) ), ) ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '2', output_msgs @pytest.mark.tier3 + @pytest.mark.no_containers @pytest.mark.rhel_ver_list([8]) @pytest.mark.skipif( (not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url' ) - def test_positive_install_multiple_packages_with_a_job_by_ip(self, rex_contenthost, module_org): + def test_positive_install_multiple_packages_with_a_job_by_ip( + self, rhel_contenthost, module_org, module_ak_with_cv, target_sat + ): """Run job to install several packages on host by ip :id: 8b73033f-83c9-4024-83c3-5e442a79d320 @@ -280,44 +247,57 @@ def test_positive_install_multiple_packages_with_a_job_by_ip(self, rex_contentho :parametrized: yes """ - self.org = module_org - client = rex_contenthost + client = rhel_contenthost packages = ['monkey', 'panda', 'seal'] - # Create a custom repo - repo = entities.Repository( - content_type='yum', - product=entities.Product(organization=self.org).create(), - url=settings.repos.yum_3.url, - ).create() - repo.sync() - prod = repo.product.read() - subs = entities.Subscription(organization=self.org).search( - query={'search': f'name={prod.name}'} + client.register( + module_org, + None, + module_ak_with_cv.name, + target_sat, + repo=settings.repos.yum_3.url, ) - assert len(subs) > 0, 'No subscriptions matching the product returned' - - ak = entities.ActivationKey( - organization=self.org, - content_view=self.org.default_content_view, - environment=self.org.library, - ).create() - ak.add_subscriptions(data={'subscriptions': [{'id': subs[0].id}]}) - client.register_contenthost(org=self.org.label, activation_key=ak.name) - - invocation_command = make_job_invocation( + # Install packages + invocation_command = target_sat.cli_factory.job_invocation( { 'job-template': 'Install Package - Katello Script Default', 'inputs': 'package={} {} {}'.format(*packages), 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run(f'rpm -q {" ".join(packages)}') assert result.status == 0 + # Update packages + pre_versions = result.stdout.splitlines() + result = client.run(f'dnf -y downgrade {" ".join(packages)}') + assert result.status == 0 + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Update Package - Katello Script Default', + 'inputs': f'package={" ".join(packages)}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + post_versions = client.run(f'rpm -q {" ".join(packages)}').stdout.splitlines() + assert set(pre_versions) == set(post_versions) + # Remove packages + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Remove Package - Katello Script Default', + 'inputs': f'package={" ".join(packages)}', + 'search-query': f'name ~ {client.hostname}', + } + ) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) + result = client.run(f'rpm -q {" ".join(packages)}') + assert result.status == len(packages) @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) - def test_positive_run_recurring_job_with_max_iterations_by_ip(self, rex_contenthost): + def test_positive_run_recurring_job_with_max_iterations_by_ip( + self, rex_contenthost, target_sat + ): """Run default job template multiple times with max iteration by ip :id: 0a3d1627-95d9-42ab-9478-a908f2a7c509 @@ -328,7 +308,7 @@ def test_positive_run_recurring_job_with_max_iterations_by_ip(self, rex_contenth :parametrized: yes """ client = rex_contenthost - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -337,13 +317,100 @@ def test_positive_run_recurring_job_with_max_iterations_by_ip(self, rex_contenth 'max-iteration': 2, # just two runs } ) - result = JobInvocation.info({'id': invocation_command['id']}) - assert_job_invocation_status(invocation_command['id'], client.hostname, 'queued') + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert_job_invocation_status( + target_sat, invocation_command['id'], client.hostname, 'queued' + ) sleep(150) - rec_logic = RecurringLogic.info({'id': result['recurring-logic-id']}) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) assert rec_logic['state'] == 'finished' assert rec_logic['iteration'] == '2' + @pytest.mark.tier3 + @pytest.mark.rhel_ver_list([8]) + def test_positive_time_expressions(self, rex_contenthost, target_sat): + """Test various expressions for extended cronline syntax + + :id: 584e7b27-9484-436a-b850-11acb900a7d8 + + :expectedresults: Verify the job was scheduled to the expected + iteration + + :bz: 1967030 + + :customerscenario: true + + """ + client = rex_contenthost + today = datetime.today() + hour = datetime.utcnow().hour + last_day_of_month = monthrange(today.year, today.month)[1] + days_to = (2 - today.weekday()) % 7 + # cronline uses https://github.com/floraison/fugit + fugit_expressions = [ + ['@yearly', f'{today.year + 1}/01/01 00:00:00'], + [ + '@monthly', + f'{(today + relativedelta(months=+1)).strftime("%Y/%m")}/01 00:00:00', + ], + [ + '@weekly', + f'{(today + timedelta(days=-today.weekday() +6)).strftime("%Y/%m/%d")} 00:00:00', + ], + [ + '@midnight', + f'{(today + timedelta(days=1)).strftime("%Y/%m/%d")} 00:00:00', + ], + [ + '@hourly', + f'{(datetime.utcnow() + timedelta(hours=1)).strftime("%Y/%m/%d %H")}:00:00', + ], + [ + '0 0 * * wed-fri', + f'{(today + timedelta(days=(days_to if days_to > 0 else 1))).strftime("%Y/%m/%d")} ' + '00:00:00', + ], + # 23 mins after every other hour + [ + '23 0-23/2 * * *', + f'{today.strftime("%Y/%m/%d")} ' + f'{ (str(hour if hour % 2 == 0 else hour + 1)).rjust(2,"0") }:23:00', + ], + # last day of month + [ + '0 0 last * *', + f'{today.strftime("%Y/%m")}/{last_day_of_month} 00:00:00', + ], + # last 7 days of month + [ + '0 0 -7-L * *', + f'{today.strftime("%Y/%m")}/{last_day_of_month-6} 00:00:00', + ], + # last friday of month at 7 + [ + '0 7 * * fri#-1', + f'{(today+relativedelta(day=31, weekday=FR(-1))).strftime("%Y/%m/%d")} 07:00:00', + ], + ] + for exp in fugit_expressions: + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Script Default', + 'inputs': 'command=ls', + 'search-query': f"name ~ {client.hostname}", + 'cron-line': exp[0], + 'max-iteration': 1, + } + ) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) + assert_job_invocation_status( + target_sat, invocation_command['id'], client.hostname, 'queued' + ) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) + assert ( + rec_logic['next-occurrence'] == exp[1] + ), f'Job was not scheduled as expected using {exp[0]}' + @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) def test_positive_run_scheduled_job_template_by_ip(self, rex_contenthost, target_sat): @@ -368,7 +435,7 @@ def test_positive_run_scheduled_job_template_by_ip(self, rex_contenthost, target 'parameter-type': 'boolean', } ) - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -379,442 +446,106 @@ def test_positive_run_scheduled_job_template_by_ip(self, rex_contenthost, target # Wait until the job runs pending_state = '1' while pending_state != '0': - invocation_info = JobInvocation.info({'id': invocation_command['id']}) + invocation_info = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) pending_state = invocation_info['pending'] sleep(30) - assert_job_invocation_result(invocation_command['id'], client.hostname) - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.skip("Receptor plugin is deprecated/removed for Satellite >= 6.11") - def test_positive_run_receptor_installer( - self, target_sat, subscribe_satellite, fixture_enable_receptor_repos - ): - """Run Receptor installer ("Configure Cloud Connector") - - :CaseComponent: RHCloud-CloudConnector - - :Team: Platform - - :id: 811c7747-bec6-1a2d-8e5c-b5045d3fbc0d - - :expectedresults: The job passes, installs Receptor that peers with c.r.c - - :BZ: 1818076 - """ - result = target_sat.execute('stat /etc/receptor/*/receptor.conf') - if result.status == 0: - pytest.skip( - 'Cloud Connector has already been configured on this system. ' - 'It is possible to reconfigure it but then the test would not really ' - 'check if everything is correctly configured from scratch. Skipping.' - ) - # Copy foreman-proxy user's key to root@localhost user's authorized_keys - target_sat.add_rex_key(satellite=target_sat) - - # Set Host parameter source_display_name to something random. - # To avoid 'name has already been taken' error when run multiple times - # on a machine with the same hostname. - host_id = Host.info({'name': target_sat.hostname})['id'] - Host.set_parameter( - {'host-id': host_id, 'name': 'source_display_name', 'value': gen_string('alpha')} - ) - - template_name = 'Configure Cloud Connector' - invocation = make_job_invocation( - { - 'async': True, - 'job-template': template_name, - 'inputs': f'satellite_user="{settings.server.admin_username}",\ - satellite_password="{settings.server.admin_password}"', - 'search-query': f'name ~ {target_sat.hostname}', - } - ) - invocation_id = invocation['id'] - wait_for( - lambda: entities.JobInvocation(id=invocation_id).read().status_label - in ['succeeded', 'failed'], - timeout='1500s', - ) - - result = JobInvocation.get_output({'id': invocation_id, 'host': target_sat.hostname}) - logger.debug(f'Invocation output>>\n{result}\n< print enabled repos - repolist = target_sat.execute('yum repolist') - logger.debug(f'Repolist>>\n{repolist}\n<= 1 - - -class TestAnsibleREX: - """Test class for remote execution via Ansible""" - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.pit_client - @pytest.mark.pit_server - @pytest.mark.rhel_ver_list([7, 8, 9]) - def test_positive_run_effective_user_job(self, rex_contenthost): - """Tests Ansible REX job having effective user runs successfully - - :id: a5fa20d8-c2bd-4bbf-a6dc-bf307b59dd8c - - :Steps: - - 0. Create a VM and register to SAT and prepare for REX (ssh key) - - 1. Run Ansible Command job for the host to create a user - - 2. Run Ansible Command job using effective user - - 3. Check the job result at the host is done under that user - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :CaseLevel: System - - :parametrized: yes - """ - client = rex_contenthost - # create a user on client via remote job - username = gen_string('alpha') - filename = gen_string('alpha') - make_user_job = make_job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': f"command=useradd -m {username}", - 'search-query': f"name ~ {client.hostname}", - } - ) - assert_job_invocation_result(make_user_job['id'], client.hostname) - # create a file as new user - invocation_command = make_job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': f"command=touch /home/{username}/{filename}", - 'search-query': f"name ~ {client.hostname}", - 'effective-user': f'{username}', - } - ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - # check the file owner - result = client.execute( - f'''stat -c '%U' /home/{username}/{filename}''', - ) - # assert the file is owned by the effective user - assert username == result.stdout.strip('\n'), "file ownership mismatch" + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.rhel_ver_list([8]) - def test_positive_run_reccuring_job(self, rex_contenthost): - """Tests Ansible REX reccuring job runs successfully multiple times + @pytest.mark.rhel_ver_list([8, 9]) + def test_recurring_with_unreachable_host(self, module_target_sat, rhel_contenthost): + """Run a recurring task against a host that is not reachable and verify it gets rescheduled - :id: 49b0d31d-58f9-47f1-aa5d-561a1dcb0d66 + :id: 570f6d75-6bbf-40b0-a1df-6c26b588dca8 - :Steps: + :expectedresults: The job is being rescheduled indefinitely even though it fails - 0. Create a VM and register to SAT and prepare for REX (ssh key) + :BZ: 1784254 - 1. Run recurring Ansible Command job for the host - - 2. Check the multiple job results at the host - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :CaseLevel: System + :customerscenario: true :parametrized: yes """ - client = rex_contenthost - invocation_command = make_job_invocation( + cli = module_target_sat.cli + host = module_target_sat.cli_factory.make_fake_host() + # shutdown the host and wait for it to surely be unreachable + rhel_contenthost.execute("shutdown -h +1") # shutdown after one minute + sleep(120) + invocation = module_target_sat.cli_factory.job_invocation( { - 'job-template': 'Run Command - Ansible Default', - 'inputs': 'command=ls', - 'search-query': f"name ~ {client.hostname}", + 'job-template': 'Run Command - Script Default', + 'inputs': 'command=echo this wont ever run', + 'search-query': f'name ~ {host.name}', 'cron-line': '* * * * *', # every minute - 'max-iteration': 2, # just two runs } ) - result = JobInvocation.info({'id': invocation_command['id']}) - sleep(150) - rec_logic = RecurringLogic.info({'id': result['recurring-logic-id']}) - assert rec_logic['state'] == 'finished' - assert rec_logic['iteration'] == '2' - - @pytest.mark.tier3 - @pytest.mark.no_containers - def test_positive_run_concurrent_jobs(self, registered_hosts, module_org): - """Tests Ansible REX concurent jobs without batch trigger - - :id: ad0f108c-03f2-49c7-8732-b1056570567b - - :Steps: - - 0. Create 2 hosts, disable foreman_tasks_proxy_batch_trigger - - 1. Run Ansible Command job with concurrency-setting - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :customerscenario: true - - :CaseLevel: System - - :BZ: 1817320 - - :parametrized: yes - """ - param_name = 'foreman_tasks_proxy_batch_trigger' - GlobalParameter().set({'name': param_name, 'value': 'false'}) - clients = registered_hosts - output_msgs = [] - invocation_command = make_job_invocation( - { - 'job-template': 'Run Command - Ansible Default', - 'inputs': 'command=ls', - 'search-query': f'name ~ {clients[0].hostname} or name ~ {clients[1].hostname}', - 'concurrency-level': 2, - } + cli.RecurringLogic.info( + {'id': cli.JobInvocation.info({'id': invocation.id})['recurring-logic-id']} ) - for vm in clients: - output_msgs.append( - 'host output from {}: {}'.format( - vm.hostname, - ' '.join( - JobInvocation.get_output( - {'id': invocation_command['id'], 'host': vm.hostname} - ) - ), - ) + # wait for the third task to be planned which verifies the BZ + wait_for( + lambda: int( + cli.RecurringLogic.info( + {'id': cli.JobInvocation.info({'id': invocation.id})['recurring-logic-id']} + )['task-count'] ) - result = JobInvocation.info({'id': invocation_command['id']}) - assert result['success'] == '2', output_msgs - GlobalParameter().delete({'name': param_name}) - assert len(GlobalParameter().list({'search': param_name})) == 0 - - @pytest.mark.tier3 - @pytest.mark.upgrade - @pytest.mark.e2e - @pytest.mark.pit_server - @pytest.mark.rhel_ver_match('[^6].*') - @pytest.mark.skipif( - (not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url' - ) - def test_positive_run_packages_and_services_job(self, rex_contenthost, module_org): - """Tests Ansible REX job can install packages and start services - - :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 - - :Steps: - - 0. Create a VM and register to SAT and prepare for REX (ssh key) - - 1. Run Ansible Package job for the host to install a package - - 2. Check the package is present at the host - - 3. Run Ansible Service job for the host to start a service - - 4. Check the service is started on the host - - :expectedresults: multiple asserts along the code - - :CaseAutomation: Automated - - :CaseLevel: System - - :bz: 1872688, 1811166 - - :CaseImportance: Critical - - :customerscenario: true - - :parametrized: yes - """ - self.org = module_org - client = rex_contenthost - packages = ['tapir'] - # Create a custom repo - repo = entities.Repository( - content_type='yum', - product=entities.Product(organization=self.org).create(), - url=settings.repos.yum_3.url, - ).create() - repo.sync() - prod = repo.product.read() - subs = entities.Subscription(organization=self.org).search( - query={'search': f'name={prod.name}'} - ) - assert len(subs), 'No subscriptions matching the product returned' - ak = entities.ActivationKey( - organization=self.org, - content_view=self.org.default_content_view, - environment=self.org.library, - ).create() - ak.add_subscriptions(data={'subscriptions': [{'id': subs[0].id}]}) - client.register_contenthost(org=self.org.label, activation_key=ak.name) - - # install package - invocation_command = make_job_invocation( - { - 'job-template': 'Package Action - Ansible Default', - 'inputs': 'state=latest, name={}'.format(*packages), - 'search-query': f'name ~ {client.hostname}', - } + > 2, + timeout=180, + delay=10, ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - result = client.run(f'rpm -q {" ".join(packages)}') - assert result.status == 0 - - # stop a service - service = "rsyslog" - invocation_command = make_job_invocation( - { - 'job-template': 'Service Action - Ansible Default', - 'inputs': f'state=stopped, name={service}', - 'search-query': f"name ~ {client.hostname}", - } - ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - result = client.execute(f'systemctl status {service}') - assert result.status == 3 - - # start it again - invocation_command = make_job_invocation( - { - 'job-template': 'Service Action - Ansible Default', - 'inputs': f'state=started, name={service}', - 'search-query': f'name ~ {client.hostname}', - } - ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - result = client.execute(f'systemctl status {service}') - assert result.status == 0 - - @pytest.mark.tier3 - @pytest.mark.parametrize( - 'fixture_sca_vmsetup', [{'nick': 'rhel8'}], ids=['rhel8'], indirect=True - ) - def test_positive_install_ansible_collection( - self, fixture_sca_vmsetup, module_sca_manifest_org - ): - """Test whether Ansible collection can be installed via REX - - :Steps: - 1. Upload a manifest. - 2. Enable and sync Ansible repository. - 3. Register content host to Satellite. - 4. Enable Ansible repo on content host. - 5. Install ansible package. - 6. Run REX job to install Ansible collection on content host. - - :id: ad25aee5-4ea3-4743-a301-1c6271856f79 - - :CaseComponent: Ansible - - :Team: Rocket - """ - # Configure repository to prepare for installing ansible on host - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['rhae2.9_el8'], - 'organization-id': module_sca_manifest_org.id, - 'product': PRDS['rhae'], - 'releasever': '8', - } - ) - Repository.synchronize( - { - 'name': REPOS['rhae2.9_el8']['name'], - 'organization-id': module_sca_manifest_org.id, - 'product': PRDS['rhae'], - } - ) - client = fixture_sca_vmsetup - client.execute('subscription-manager refresh') - client.execute(f'subscription-manager repos --enable {REPOS["rhae2.9_el8"]["id"]}') - client.execute('dnf -y install ansible') - collection_job = make_job_invocation( - { - 'job-template': 'Ansible Collection - Install from Galaxy', - 'inputs': 'ansible_collections_list="oasis_roles.system"', - 'search-query': f'name ~ {client.hostname}', - } - ) - result = JobInvocation.info({'id': collection_job['id']}) - assert result['success'] == '1' - collection_path = client.execute('ls /etc/ansible/collections/ansible_collections').stdout - assert 'oasis_roles' in collection_path - - # Extend test with custom collections_path advanced input field - collection_job = make_job_invocation( - { - 'job-template': 'Ansible Collection - Install from Galaxy', - 'inputs': 'ansible_collections_list="oasis_roles.system", collections_path="~/"', - 'search-query': f'name ~ {client.hostname}', - } - ) - result = JobInvocation.info({'id': collection_job['id']}) - assert result['success'] == '1' - collection_path = client.execute('ls ~/ansible_collections').stdout - assert 'oasis_roles' in collection_path + # check that the first task indeed failed which verifies the test was done correctly + assert cli.JobInvocation.info({'id': invocation.id})['failed'] != '0' class TestRexUsers: """Tests related to remote execution users""" @pytest.fixture(scope='class') - def class_rexmanager_user(self, module_org): + def class_rexmanager_user(self, module_org, class_target_sat): """Creates a user with Remote Execution Manager role""" password = gen_string('alpha') rexmanager = gen_string('alpha') - make_user({'login': rexmanager, 'password': password, 'organization-ids': module_org.id}) - User.add_role({'login': rexmanager, 'role': 'Remote Execution Manager'}) - yield (rexmanager, password) + class_target_sat.cli_factory.user( + {'login': rexmanager, 'password': password, 'organization-ids': module_org.id} + ) + class_target_sat.cli.User.add_role( + {'login': rexmanager, 'role': 'Remote Execution Manager'} + ) + return (rexmanager, password) @pytest.fixture(scope='class') - def class_rexinfra_user(self, module_org): + def class_rexinfra_user(self, module_org, class_target_sat): """Creates a user with all Remote Execution related permissions""" password = gen_string('alpha') rexinfra = gen_string('alpha') - make_user({'login': rexinfra, 'password': password, 'organization-ids': module_org.id}) - role = make_role({'organization-ids': module_org.id}) + class_target_sat.cli_factory.user( + {'login': rexinfra, 'password': password, 'organization-ids': module_org.id} + ) + role = class_target_sat.cli_factory.make_role({'organization-ids': module_org.id}) invocation_permissions = [ permission['name'] - for permission in Filter.available_permissions( + for permission in class_target_sat.cli.Filter.available_permissions( {'search': 'resource_type=JobInvocation'} ) ] template_permissions = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=JobTemplate'}) + for permission in class_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=JobTemplate'} + ) ] permissions = ','.join(invocation_permissions) - make_filter({'role-id': role['id'], 'permissions': permissions}) + class_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) permissions = ','.join(template_permissions) # needs execute_jobs_on_infrastructure_host permission - make_filter({'role-id': role['id'], 'permissions': permissions}) - User.add_role({'login': rexinfra, 'role': role['name']}) - User.add_role({'login': rexinfra, 'role': 'Remote Execution Manager'}) - yield (rexinfra, password) + class_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) + class_target_sat.cli.User.add_role({'login': rexinfra, 'role': role['name']}) + class_target_sat.cli.User.add_role({'login': rexinfra, 'role': 'Remote Execution Manager'}) + return (rexinfra, password) @pytest.mark.tier3 @pytest.mark.upgrade @@ -856,11 +587,13 @@ def test_positive_rex_against_infra_hosts( """ client = rex_contenthost infra_host.add_rex_key(satellite=target_sat) - Host.update({'name': infra_host.hostname, 'new-organization-id': module_org.id}) + target_sat.cli.Host.update( + {'name': infra_host.hostname, 'new-organization-id': module_org.id} + ) # run job as admin command = f"echo {gen_string('alpha')}" - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -871,14 +604,16 @@ def test_positive_rex_against_infra_hosts( hostnames = [client.hostname, infra_host.hostname] for hostname in hostnames: inv_output = ' '.join( - JobInvocation.get_output({'id': invocation_command['id'], 'host': hostname}) + target_sat.cli.JobInvocation.get_output( + {'id': invocation_command['id'], 'host': hostname} + ) ) output_msgs.append(f"host output from {hostname}: { inv_output }") - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '2', output_msgs # run job as regular rex user on all hosts - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -887,11 +622,11 @@ def test_positive_rex_against_infra_hosts( class_rexmanager_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '1' # run job as regular rex user just on infra hosts - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -899,11 +634,11 @@ def test_positive_rex_against_infra_hosts( }, class_rexmanager_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '0' # run job as rex user on Satellite - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -911,7 +646,7 @@ def test_positive_rex_against_infra_hosts( }, class_rexinfra_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '1' @@ -964,14 +699,16 @@ def test_positive_run_job_on_host_registered_to_async_ssh_provider( assert result.status == 0, f'Failed to register host: {result.stderr}' # run script provider rex command, longer-running command is needed to # verify the connection is not shut down too soon - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=echo start; sleep 10; echo done', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) class TestPullProviderRex: @@ -981,6 +718,12 @@ class TestPullProviderRex: @pytest.mark.upgrade @pytest.mark.no_containers @pytest.mark.rhel_ver_match('[^6].*') + @pytest.mark.parametrize( + 'setting_update', + ['remote_execution_global_proxy=False'], + ids=["no_global_proxy"], + indirect=True, + ) def test_positive_run_job_on_host_converted_to_pull_provider( self, module_org, @@ -989,6 +732,7 @@ def test_positive_run_job_on_host_converted_to_pull_provider( module_target_sat, module_capsule_configured_mqtt, rhel_contenthost, + setting_update, ): """Run custom template on host converted to mqtt @@ -1037,48 +781,56 @@ def test_positive_run_job_on_host_converted_to_pull_provider( result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) - # check katello-agent runs along ygdrassil (SAT-1671) - result = rhel_contenthost.execute('systemctl status goferd') - assert result.status == 0, 'Failed to start goferd on client' - + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) # run Ansible rex command to prove ssh provider works, remove katello-agent - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { - 'job-template': 'Package Action - Ansible Default', - 'inputs': 'state=absent, name=katello-agent', + 'job-template': 'Remove Package - Katello Script Default', + 'inputs': 'package=katello-agent', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) # check katello-agent removal did not influence ygdrassil (SAT-1672) result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) - result = JobInvocation.info({'id': invocation_command['id']}) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.e2e @pytest.mark.no_containers @pytest.mark.rhel_ver_match('[^6].*') + @pytest.mark.parametrize( + 'setting_update', + ['remote_execution_global_proxy=False'], + ids=["no_global_proxy"], + indirect=True, + ) def test_positive_run_job_on_host_registered_to_pull_provider( self, module_org, @@ -1087,6 +839,7 @@ def test_positive_run_job_on_host_registered_to_pull_provider( module_ak_with_cv, module_capsule_configured_mqtt, rhel_contenthost, + setting_update, ): """Run custom template on host registered to mqtt, check effective user setting @@ -1131,27 +884,31 @@ def test_positive_run_job_on_host_registered_to_pull_provider( result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Service Action - Script Default', 'inputs': 'action=status, service=yggdrasild', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) # create user on host username = gen_string('alpha') filename = gen_string('alpha') - make_user_job = make_job_invocation( + make_user_job = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=useradd -m {username}", 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(make_user_job['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, make_user_job['id'], rhel_contenthost.hostname + ) # create a file as new user - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=touch /home/{username}/{filename}", @@ -1159,7 +916,9 @@ def test_positive_run_job_on_host_registered_to_pull_provider( 'effective-user': f'{username}', } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) # check the file owner result = rhel_contenthost.execute( f'''stat -c '%U' /home/{username}/{filename}''', @@ -1224,7 +983,7 @@ def test_positive_run_pull_job_on_offline_host( result = rhel_contenthost.execute('systemctl stop yggdrasild') assert result.status == 0, f'Failed to stop yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -1233,10 +992,14 @@ def test_positive_run_pull_job_on_offline_host( } ) # assert the job is waiting to be picked up by client - assert_job_invocation_status(invocation_command['id'], rhel_contenthost.hostname, 'running') + assert_job_invocation_status( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname, 'running' + ) # start client on host result = rhel_contenthost.execute('systemctl start yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' # wait twice the mqtt_resend_interval (set in module_capsule_configured_mqtt) sleep(60) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) diff --git a/tests/foreman/cli/test_report.py b/tests/foreman/cli/test_report.py index b2d84d3adb8..ea4f14371bb 100644 --- a/tests/foreman/cli/test_report.py +++ b/tests/foreman/cli/test_report.py @@ -4,23 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import random import pytest +from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') @@ -84,14 +80,72 @@ def test_positive_install_configure_host( :BZ: 2126891, 2026239 """ puppet_infra_host = [session_puppet_enabled_sat, session_puppet_enabled_capsule] - for client, puppet_proxy in zip(content_hosts, puppet_infra_host): + for client, puppet_proxy in zip(content_hosts, puppet_infra_host, strict=True): client.configure_puppet(proxy_hostname=puppet_proxy.hostname) report = session_puppet_enabled_sat.cli.ConfigReport.list( - {'search': f'host~{client.hostname},origin=Puppet'} + {'search': f'host~{client.hostname}'} ) assert len(report) + assert report[0]['origin'] == 'Puppet' facts = session_puppet_enabled_sat.cli.Fact.list({'search': f'host~{client.hostname}'}) assert len(facts) + assert [f for f in facts if f['fact'] == 'puppetmaster_fqdn'][0][ + 'value' + ] == puppet_proxy.hostname session_puppet_enabled_sat.cli.ConfigReport.delete({'id': report[0]['id']}) with pytest.raises(CLIReturnCodeError): session_puppet_enabled_sat.cli.ConfigReport.info({'id': report[0]['id']}) + + +@pytest.mark.e2e +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_run_puppet_agent_generate_report_when_no_message( + session_puppet_enabled_sat, rhel_contenthost +): + """Verify that puppet-agent can be installed from the sat-client repo + and configured to report back to the Satellite, and contains the origin + + :id: 07777fbb-4f2e-4fab-ba5a-2b698f9b9f39 + + :setup: + 1. Satellite and Capsule with enabled puppet plugin. + 2. Blank RHEL content host + + :steps: + 1. Configure puppet on the content host. This creates sat-client repository, + installs puppet-agent, configures it, runs it to create the puppet cert, + signs in on the Satellite side and reruns it. + 2. Assert that Config report created in the Satellite for the content host, + and has Puppet origin + 3. Assert that Facts were reported for the content host. + + :expectedresults: + 1. Configuration passes without errors. + 2. Config report is created and contains Puppet origin + 3. Facts are acquired. + + :customerscenario: true + + :BZ: 2192939, 2257327, 2257314 + :parametrized: yes + """ + sat = session_puppet_enabled_sat + client = rhel_contenthost + client.configure_puppet(proxy_hostname=sat.hostname, run_puppet_agent=False) + # Run either 'puppet agent --detailed-exitcodes' or 'systemctl restart puppet' + # to generate Puppet config report for host without any messages + assert client.execute('/opt/puppetlabs/bin/puppet agent --detailed-exitcodes').status == 0 + wait_for( + lambda: sat.cli.ConfigReport.list({'search': f'host~{client.hostname}'}) != [], + timeout=300, + delay=30, + ) + report = sat.cli.ConfigReport.list({'search': f'host~{client.hostname}'}) + assert len(report) + assert report[0]['origin'] == 'Puppet' + facts = sat.cli.Fact.list({'search': f'host~{client.hostname}'}) + assert len(facts) + assert [f for f in facts if f['fact'] == 'puppetmaster_fqdn'][0]['value'] == sat.hostname + sat.cli.ConfigReport.delete({'id': report[0]['id']}) + with pytest.raises(CLIReturnCodeError): + sat.cli.ConfigReport.info({'id': report[0]['id']}) diff --git a/tests/foreman/cli/test_reporttemplates.py b/tests/foreman/cli/test_reporttemplates.py index 151312a72a7..a27123201f1 100644 --- a/tests/foreman/cli/test_reporttemplates.py +++ b/tests/foreman/cli/test_reporttemplates.py @@ -3,100 +3,75 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Reporting :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker from fauxfactory import gen_alpha +import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import Base -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_activation_key -from robottelo.cli.factory import make_architecture -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_fake_host -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_medium -from robottelo.cli.factory import make_os -from robottelo.cli.factory import make_partition_table -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_report_template -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_template_input -from robottelo.cli.factory import make_user -from robottelo.cli.factory import setup_org_for_a_custom_repo -from robottelo.cli.factory import setup_org_for_a_rh_repo -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.location import Location -from robottelo.cli.org import Org -from robottelo.cli.report_template import ReportTemplate -from robottelo.cli.repository import Repository -from robottelo.cli.settings import Settings -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import DEFAULT_LOC -from robottelo.constants import DEFAULT_ORG -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import PRDS -from robottelo.constants import REPORT_TEMPLATE_FILE -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + DEFAULT_LOC, + DEFAULT_ORG, + DEFAULT_SUBSCRIPTION_NAME, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, + FAKE_2_CUSTOM_PACKAGE, + PRDS, + REPORT_TEMPLATE_FILE, + REPOS, + REPOSET, +) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost @pytest.fixture(scope='module') -def local_environment(module_entitlement_manifest_org): +def local_environment(module_entitlement_manifest_org, module_target_sat): """Create a lifecycle environment with CLI factory""" - return make_lifecycle_environment({'organization-id': module_entitlement_manifest_org.id}) + return module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_entitlement_manifest_org.id} + ) @pytest.fixture(scope='module') -def local_content_view(module_entitlement_manifest_org): +def local_content_view(module_entitlement_manifest_org, module_target_sat): """Create content view, repository, and product""" - new_product = make_product({'organization-id': module_entitlement_manifest_org.id}) - new_repo = make_repository({'product-id': new_product['id']}) - Repository.synchronize({'id': new_repo['id']}) - content_view = make_content_view({'organization-id': module_entitlement_manifest_org.id}) - ContentView.add_repository( + new_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_entitlement_manifest_org.id} + ) + new_repo = module_target_sat.cli_factory.make_repository({'product-id': new_product['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_entitlement_manifest_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': new_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) return content_view @pytest.fixture(scope='module') -def local_ak(module_entitlement_manifest_org, local_environment, local_content_view): +def local_ak( + module_entitlement_manifest_org, local_environment, local_content_view, module_target_sat +): """Promote a content view version and create an activation key with CLI Factory""" - cvv = ContentView.info({'id': local_content_view['id']})['versions'][0] - ContentView.version_promote( + cvv = module_target_sat.cli.ContentView.info({'id': local_content_view['id']})['versions'][0] + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': local_environment['id']} ) - return make_activation_key( + return module_target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': local_environment['id'], 'content-view': local_content_view['name'], @@ -107,19 +82,21 @@ def local_ak(module_entitlement_manifest_org, local_environment, local_content_v @pytest.fixture(scope='module') -def local_subscription(module_entitlement_manifest_org, local_ak): - for subscription in Subscription.list( +def local_subscription(module_entitlement_manifest_org, local_ak, module_target_sat): + for subscription in module_target_sat.cli.Subscription.list( {'organization-id': module_entitlement_manifest_org.id}, per_page=False ): if subscription['name'] == DEFAULT_SUBSCRIPTION_NAME: break - ActivationKey.add_subscription({'id': local_ak['id'], 'subscription-id': subscription['id']}) + module_target_sat.cli.ActivationKey.add_subscription( + {'id': local_ak['id'], 'subscription-id': subscription['id']} + ) return subscription @pytest.mark.tier2 -def test_positive_report_help(): +def test_positive_report_help(module_target_sat): """hammer level of help included in test: Base level hammer help includes report-templates, Command level hammer help contains usage details, @@ -138,11 +115,10 @@ def test_positive_report_help(): :expectedresults: report-templates command is included in help, report-templates command details are displayed, report-templates create command details are displayed - """ - command_output = Base().execute('--help') + command_output = module_target_sat.cli.Base().execute('--help') assert 'report-template' in command_output - command_output = Base().execute('report-template --help') + command_output = module_target_sat.cli.Base().execute('report-template --help') assert all( [ phrase in command_output @@ -155,7 +131,7 @@ def test_positive_report_help(): ] ] ) - command_output = Base().execute('report-template create --help') + command_output = module_target_sat.cli.Base().execute('report-template create --help') assert all( [ phrase in command_output @@ -166,7 +142,7 @@ def test_positive_report_help(): @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud_and_list(): +def test_positive_end_to_end_crud_and_list(target_sat): """CRUD test + list test for report templates :id: 2a143ddf-683f-49e2-badb-f9a387cfc53c @@ -194,34 +170,36 @@ def test_positive_end_to_end_crud_and_list(): """ # create name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = target_sat.cli_factory.report_template({'name': name}) assert report_template['name'] == name # list - create second template tmp_name = gen_alpha() - tmp_report_template = make_report_template({'name': tmp_name}) - result_list = ReportTemplate.list() + tmp_report_template = target_sat.cli_factory.report_template({'name': tmp_name}) + result_list = target_sat.cli.ReportTemplate.list() assert name in [rt['name'] for rt in result_list] # info - result = ReportTemplate.info({'id': report_template['id']}) + result = target_sat.cli.ReportTemplate.info({'id': report_template['id']}) assert name == result['name'] # update new_name = gen_alpha() - result = ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + result = target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) assert new_name == result[0]['name'] - rt_list = ReportTemplate.list() + rt_list = target_sat.cli.ReportTemplate.list() assert name not in [rt['name'] for rt in rt_list] # delete tmp - ReportTemplate.delete({'name': tmp_report_template['name']}) + target_sat.cli.ReportTemplate.delete({'name': tmp_report_template['name']}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.info({'id': tmp_report_template['id']}) + target_sat.cli.ReportTemplate.info({'id': tmp_report_template['id']}) @pytest.mark.tier1 -def test_positive_generate_report_nofilter_and_with_filter(): +def test_positive_generate_report_nofilter_and_with_filter(module_target_sat): """Generate Host Status report without filter and with filter :id: 5af03399-b918-468a-9306-1c76dda6a369 @@ -240,21 +218,23 @@ def test_positive_generate_report_nofilter_and_with_filter(): :CaseImportance: Critical """ host_name = gen_alpha() - host1 = make_fake_host({'name': host_name}) + host1 = module_target_sat.cli_factory.make_fake_host({'name': host_name}) host_name_2 = gen_alpha() - host2 = make_fake_host({'name': host_name_2}) + host2 = module_target_sat.cli_factory.make_fake_host({'name': host_name_2}) - result_list = ReportTemplate.list() + result_list = module_target_sat.cli.ReportTemplate.list() assert 'Host - Statuses' in [rt['name'] for rt in result_list] - rt_host_statuses = ReportTemplate.info({'name': 'Host - Statuses'}) - result_no_filter = ReportTemplate.generate({'name': rt_host_statuses['name']}) + rt_host_statuses = module_target_sat.cli.ReportTemplate.info({'name': 'Host - Statuses'}) + result_no_filter = module_target_sat.cli.ReportTemplate.generate( + {'name': rt_host_statuses['name']} + ) assert host1['name'] in [item.split(',')[0] for item in result_no_filter.splitlines()] assert host2['name'] in [item.split(',')[0] for item in result_no_filter.splitlines()] - result = ReportTemplate.generate( + result = module_target_sat.cli.ReportTemplate.generate( { 'name': rt_host_statuses['name'], 'inputs': ( @@ -267,7 +247,7 @@ def test_positive_generate_report_nofilter_and_with_filter(): @pytest.mark.tier2 -def test_positive_lock_and_unlock_report(): +def test_positive_lock_and_unlock_report(module_target_sat): """Lock and unlock report template :id: df306515-8798-4ce3-9430-6bc3bf9b9b33 @@ -284,19 +264,23 @@ def test_positive_lock_and_unlock_report(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1}) + report_template = module_target_sat.cli_factory.report_template({'name': name}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 1}) new_name = gen_alpha() with pytest.raises(CLIReturnCodeError): - ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) - ReportTemplate.update({'name': report_template['name'], 'locked': 0}) - result = ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 0}) + result = module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) assert result[0]['name'] == new_name @pytest.mark.tier2 -def test_positive_report_add_userinput(): +def test_positive_report_add_userinput(module_target_sat): """Add user input to template :id: 84b577db-144e-4761-a46e-e83887464986 @@ -308,20 +292,19 @@ def test_positive_report_add_userinput(): 1. hammer template-input create ... :expectedresults: User input is assigned to the report template - """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.report_template({'name': name}) ti_name = gen_alpha() - template_input = make_template_input( + template_input = module_target_sat.cli_factory.template_input( {'name': ti_name, 'input-type': 'user', 'template-id': report_template['id']} ) - result = ReportTemplate.info({'name': report_template['name']}) + result = module_target_sat.cli.ReportTemplate.info({'name': report_template['name']}) assert result['template-inputs'][0]['name'] == template_input['name'] @pytest.mark.tier2 -def test_positive_dump_report(): +def test_positive_dump_report(module_target_sat): """Export report template :id: 84b577db-144e-4761-a42e-a83887464986 @@ -338,13 +321,13 @@ def test_positive_dump_report(): """ name = gen_alpha() content = gen_alpha() - report_template = make_report_template({'name': name, 'content': content}) - result = ReportTemplate.dump({'id': report_template['id']}) + report_template = module_target_sat.cli_factory.report_template({'name': name, 'file': content}) + result = module_target_sat.cli.ReportTemplate.dump({'id': report_template['id']}) assert content in result @pytest.mark.tier2 -def test_positive_clone_locked_report(): +def test_positive_clone_locked_report(module_target_sat): """Clone locked report template :id: cc843731-b9c2-4fc9-9e15-d1ee5d967cda @@ -361,19 +344,21 @@ def test_positive_clone_locked_report(): """ name = gen_alpha() - report_template = make_report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1, 'default': 1}) + report_template = module_target_sat.cli_factory.report_template({'name': name}) + module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'locked': 1, 'default': 1} + ) new_name = gen_alpha() - ReportTemplate.clone({'id': report_template['id'], 'new-name': new_name}) - result_list = ReportTemplate.list() + module_target_sat.cli.ReportTemplate.clone({'id': report_template['id'], 'new-name': new_name}) + result_list = module_target_sat.cli.ReportTemplate.list() assert new_name in [rt['name'] for rt in result_list] - result_info = ReportTemplate.info({'id': report_template['id']}) + result_info = module_target_sat.cli.ReportTemplate.info({'id': report_template['id']}) assert result_info['locked'] == 'yes' assert result_info['default'] == 'yes' @pytest.mark.tier2 -def test_positive_generate_report_sanitized(): +def test_positive_generate_report_sanitized(module_target_sat): """Generate report template where there are values in comma outputted which might brake CSV format @@ -392,10 +377,10 @@ def test_positive_generate_report_sanitized(): """ # create a name that has a comma in it, some randomized text, and no spaces. os_name = gen_alpha(start='test', separator=',').replace(' ', '') - architecture = make_architecture() - partition_table = make_partition_table() - medium = make_medium() - os = make_os( + architecture = module_target_sat.cli_factory.make_architecture() + partition_table = module_target_sat.cli_factory.make_partition_table() + medium = module_target_sat.cli_factory.make_medium() + os = module_target_sat.cli_factory.make_os( { 'name': os_name, 'architecture-ids': architecture['id'], @@ -405,7 +390,7 @@ def test_positive_generate_report_sanitized(): ) host_name = gen_alpha() - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( { 'name': host_name, 'architecture-id': architecture['id'], @@ -415,9 +400,9 @@ def test_positive_generate_report_sanitized(): } ) - report_template = make_report_template({'content': REPORT_TEMPLATE_FILE}) + report_template = module_target_sat.cli_factory.report_template({'file': REPORT_TEMPLATE_FILE}) - result = ReportTemplate.generate({'name': report_template['name']}) + result = module_target_sat.cli.ReportTemplate.generate({'name': report_template['name']}) assert 'Name,Operating System' in result # verify header of custom template assert f'{host["name"]},"{host["operating-system"]["operating-system"]}"' in result @@ -445,7 +430,6 @@ def test_positive_applied_errata(): :expectedresults: 1. A report is generated with all applied errata listed 2,3. A report is generated asynchronously - """ @@ -491,7 +475,7 @@ def test_positive_generate_email_uncompressed(): @pytest.mark.tier2 -def test_negative_create_report_without_name(): +def test_negative_create_report_without_name(module_target_sat): """Try to create a report template with empty name :id: 84b577db-144e-4771-a42e-e93887464986 @@ -507,11 +491,11 @@ def test_negative_create_report_without_name(): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_report_template({'name': ''}) + module_target_sat.cli_factory.report_template({'name': ''}) @pytest.mark.tier2 -def test_negative_delete_locked_report(): +def test_negative_delete_locked_report(module_target_sat): """Try to delete a locked report template :id: 84b577db-144e-4871-a42e-e93887464986 @@ -527,16 +511,16 @@ def test_negative_delete_locked_report(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 1}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.delete({'name': report_template['name']}) + module_target_sat.cli.ReportTemplate.delete({'name': report_template['name']}) @pytest.mark.tier2 -def test_negative_bad_email(): +def test_negative_bad_email(module_target_sat): """Report can't be generated when incorrectly formed mail specified :id: a4ba77db-144e-4871-a42e-e93887464986 @@ -552,14 +536,16 @@ def test_negative_bad_email(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.report_template({'name': name}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.schedule({'name': report_template['name'], 'mail-to': gen_alpha()}) + module_target_sat.cli.ReportTemplate.schedule( + {'name': report_template['name'], 'mail-to': gen_alpha()} + ) @pytest.mark.tier3 -def test_negative_nonauthor_of_report_cant_download_it(): +def test_negative_nonauthor_of_report_cant_download_it(module_target_sat): """The resulting report should only be downloadable by the user that generated it or admin. Check. @@ -578,10 +564,10 @@ def test_negative_nonauthor_of_report_cant_download_it(): uname_viewer2 = gen_alpha() password = gen_alpha() - loc = Location.info({'name': DEFAULT_LOC}) - org = Org.info({'name': DEFAULT_ORG}) + loc = module_target_sat.cli.Location.info({'name': DEFAULT_LOC}) + org = module_target_sat.cli.Org.info({'name': DEFAULT_ORG}) - user1 = make_user( + user1 = module_target_sat.cli_factory.user( { 'login': uname_viewer, 'password': password, @@ -590,7 +576,7 @@ def test_negative_nonauthor_of_report_cant_download_it(): } ) - user2 = make_user( + user2 = module_target_sat.cli_factory.user( { 'login': uname_viewer2, 'password': password, @@ -599,62 +585,72 @@ def test_negative_nonauthor_of_report_cant_download_it(): } ) - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions_org = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=Organization'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=Organization'} + ) ] permissions_loc = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=Location'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=Location'} + ) ] permissions_rt = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=ReportTemplate'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=ReportTemplate'} + ) ] permissions_pt = [ permission['name'] - for permission in Filter.available_permissions( + for permission in module_target_sat.cli.Filter.available_permissions( {'search': 'resource_type=ProvisioningTemplate'} ) ] permissions_jt = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=JobTemplate'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=JobTemplate'} + ) ] # Assign filters to created role for perm in [permissions_org, permissions_loc, permissions_rt, permissions_pt, permissions_jt]: - make_filter({'role-id': role['id'], 'permissions': perm}) - User.add_role({'login': user1['login'], 'role-id': role['id']}) - User.add_role({'login': user2['login'], 'role-id': role['id']}) + module_target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': perm}) + module_target_sat.cli.User.add_role({'login': user1['login'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'login': user2['login'], 'role-id': role['id']}) name = gen_alpha() content = gen_alpha() - report_template = ReportTemplate.with_user(username=user1['login'], password=password).create( + report_template = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).create( {'name': name, 'organization-id': org['id'], 'location-id': loc['id'], 'file': content} ) - schedule = ReportTemplate.with_user(username=user1['login'], password=password).schedule( - {'name': report_template['name']} - ) + schedule = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).schedule({'name': report_template['name']}) job_id = schedule.split('Job ID: ', 1)[1].strip() - report_data = ReportTemplate.with_user(username=user1['login'], password=password).report_data( - {'id': report_template['name'], 'job-id': job_id} - ) + report_data = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).report_data({'id': report_template['name'], 'job-id': job_id}) assert content in report_data with pytest.raises(CLIReturnCodeError): - ReportTemplate.with_user(username=user2['login'], password=password).report_data( - {'id': report_template['name'], 'job-id': job_id} - ) + module_target_sat.cli.ReportTemplate.with_user( + username=user2['login'], password=password + ).report_data({'id': report_template['name'], 'job-id': job_id}) @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1750924') -def test_positive_generate_with_name_and_org(): +def test_positive_generate_with_name_and_org(module_target_sat): """Generate Host Status report, specifying template name and organization :id: 5af03399-b918-468a-1306-1c76dda6f369 @@ -677,16 +673,18 @@ def test_positive_generate_with_name_and_org(): :BZ: 1750924 """ host_name = gen_alpha() - host = make_fake_host({'name': host_name}) + host = module_target_sat.cli_factory.make_fake_host({'name': host_name}) - result = ReportTemplate.generate({'name': 'Host - Statuses', 'organization': DEFAULT_ORG}) + result = module_target_sat.cli.ReportTemplate.generate( + {'name': 'Host - Statuses', 'organization': DEFAULT_ORG} + ) assert host['name'] in [item.split(',')[0] for item in result.split('\n')] @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1782807') -def test_positive_generate_ansible_template(): +def test_positive_generate_ansible_template(module_target_sat): """Report template named 'Ansible Inventory' (default name is specified in settings) must be present in Satellite 6.7 and later in order to provide enhanced functionality for Ansible Tower inventory synchronization with Satellite. @@ -707,19 +705,19 @@ def test_positive_generate_ansible_template(): :CaseImportance: Medium """ - settings = Settings.list({'search': 'name=ansible_inventory_template'}) - assert 1 == len(settings) + settings = module_target_sat.cli.Settings.list({'search': 'name=ansible_inventory_template'}) + assert len(settings) == 1 template_name = settings[0]['value'] - report_list = ReportTemplate.list() + report_list = module_target_sat.cli.ReportTemplate.list() assert template_name in [rt['name'] for rt in report_list] login = gen_alpha().lower() password = gen_alpha().lower() - loc = Location.info({'name': DEFAULT_LOC}) - org = Org.info({'name': DEFAULT_ORG}) + loc = module_target_sat.cli.Location.info({'name': DEFAULT_LOC}) + org = module_target_sat.cli.Org.info({'name': DEFAULT_ORG}) - user = make_user( + user = module_target_sat.cli_factory.user( { 'login': login, 'password': password, @@ -728,19 +726,21 @@ def test_positive_generate_ansible_template(): } ) - User.add_role({'login': user['login'], 'role': 'Ansible Tower Inventory Reader'}) + module_target_sat.cli.User.add_role( + {'login': user['login'], 'role': 'Ansible Tower Inventory Reader'} + ) host_name = gen_alpha().lower() - host = make_fake_host({'name': host_name}) + host = module_target_sat.cli_factory.make_fake_host({'name': host_name}) - schedule = ReportTemplate.with_user(username=user['login'], password=password).schedule( - {'name': template_name} - ) + schedule = module_target_sat.cli.ReportTemplate.with_user( + username=user['login'], password=password + ).schedule({'name': template_name}) job_id = schedule.split('Job ID: ', 1)[1].strip() - report_data = ReportTemplate.with_user(username=user['login'], password=password).report_data( - {'name': template_name, 'job-id': job_id} - ) + report_data = module_target_sat.cli.ReportTemplate.with_user( + username=user['login'], password=password + ).report_data({'name': template_name, 'job-id': job_id}) assert host['name'] in [item.split(',')[1] for item in report_data.split('\n') if len(item) > 0] @@ -773,7 +773,7 @@ def test_positive_generate_entitlements_report_multiple_formats( client.install_katello_ca(target_sat) client.register_contenthost(module_entitlement_manifest_org.label, local_ak['name']) assert client.subscribed - result_html = ReportTemplate.generate( + result_html = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -783,7 +783,7 @@ def test_positive_generate_entitlements_report_multiple_formats( ) assert client.hostname in result_html assert local_subscription['name'] in result_html - result_yaml = ReportTemplate.generate( + result_yaml = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -796,7 +796,7 @@ def test_positive_generate_entitlements_report_multiple_formats( assert client.hostname in entry elif 'Subscription Name:' in entry: assert local_subscription['name'] in entry - result_csv = ReportTemplate.generate( + result_csv = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -835,7 +835,7 @@ def test_positive_schedule_entitlements_report( client.install_katello_ca(target_sat) client.register_contenthost(module_entitlement_manifest_org.label, local_ak['name']) assert client.subscribed - scheduled_csv = ReportTemplate.schedule( + scheduled_csv = target_sat.cli.ReportTemplate.schedule( { 'name': 'Subscription - Entitlement Report', 'organization': module_entitlement_manifest_org.name, @@ -843,7 +843,7 @@ def test_positive_schedule_entitlements_report( 'inputs': 'Days from Now=no limit', } ) - data_csv = ReportTemplate.report_data( + data_csv = target_sat.cli.ReportTemplate.report_data( { 'name': 'Subscription - Entitlement Report', 'job-id': scheduled_csv.split('\n', 1)[0].split('Job ID: ', 1)[1], @@ -876,7 +876,7 @@ def test_positive_generate_hostpkgcompare( :BZ: 1860430 """ # Add subscription to Satellite Tools repo to activation key - setup_org_for_a_rh_repo( + target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -887,7 +887,7 @@ def test_positive_generate_hostpkgcompare( 'activationkey-id': local_ak['id'], } ) - setup_org_for_a_custom_repo( + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_6.url, 'organization-id': module_entitlement_manifest_org.id, @@ -909,7 +909,7 @@ def test_positive_generate_hostpkgcompare( client.enable_repo(REPOS['rhst7']['id']) client.install_katello_agent() clients.sort(key=lambda client: client.hostname) - hosts_info = [Host.info({'name': client.hostname}) for client in clients] + hosts_info = [target_sat.cli.Host.info({'name': client.hostname}) for client in clients] host1, host2 = hosts_info res = clients[0].execute( @@ -919,7 +919,7 @@ def test_positive_generate_hostpkgcompare( res = clients[1].execute(f'yum -y install {FAKE_2_CUSTOM_PACKAGE}') assert not res.status - result = ReportTemplate.generate( + result = target_sat.cli.ReportTemplate.generate( { 'name': 'Host - compare content hosts packages', 'inputs': f'Host 1 = {host1["name"]}, ' f'Host 2 = {host2["name"]}', @@ -960,7 +960,7 @@ def test_positive_generate_hostpkgcompare( @pytest.mark.tier3 -def test_negative_generate_hostpkgcompare_nonexistent_host(): +def test_negative_generate_hostpkgcompare_nonexistent_host(module_target_sat): """Try to generate 'Host - compare content hosts packages' report with nonexistent hosts inputs @@ -979,13 +979,13 @@ def test_negative_generate_hostpkgcompare_nonexistent_host(): :BZ: 1860351 """ with pytest.raises(CLIReturnCodeError) as cm: - ReportTemplate.generate( + module_target_sat.cli.ReportTemplate.generate( { 'name': 'Host - compare content hosts packages', 'inputs': 'Host 1 = nonexistent1, ' 'Host 2 = nonexistent2', } ) - assert "At least one of the hosts couldn't be found" in cm.exception.stderr + assert "At least one of the hosts couldn't be found" in cm.value.stderr @pytest.mark.rhel_ver_list([7, 8, 9]) @@ -1018,7 +1018,7 @@ def test_positive_generate_installed_packages_report( :customerscenario: true """ - setup_org_for_a_custom_repo( + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_6.url, 'organization-id': module_entitlement_manifest_org.id, diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py new file mode 100644 index 00000000000..cf6e10db5c0 --- /dev/null +++ b/tests/foreman/cli/test_repositories.py @@ -0,0 +1,126 @@ +"""Test module for Repositories CLI. + +:Requirement: Repository + +:CaseAutomation: Automated + +:CaseComponent: Repositories + +:team: Phoenix-content + +:CaseImportance: Critical + +""" +import pytest +from requests.exceptions import HTTPError + +from robottelo.constants import DEFAULT_ARCHITECTURE, REPOS, REPOSET + + +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_custom_products_disabled_by_default( + setup_content, + default_location, + rhel_contenthost, + target_sat, +): + """Verify that custom products should be disabled by default for content hosts + + :id: ba237e11-3b41-49e3-94b3-63e1f404d9e5 + + :steps: + 1. Create custom product and upload repository + 2. Attach to activation key + 3. Register Host + 4. Assert that custom proudcts are disabled by default + + :expectedresults: Custom products should be disabled by default. "Enabled: 0" + + :customerscenario: true + + :BZ: 1265120 + """ + ak, org, _ = setup_content + rhel_contenthost.register(org, default_location, ak.name, target_sat) + assert rhel_contenthost.subscribed + product_details = rhel_contenthost.run('subscription-manager repos --list') + assert 'Enabled: 0' in product_details.stdout + + +def test_negative_invalid_repo_fails_publish( + module_repository, + module_org, + target_sat, +): + """Verify that an invalid repository fails when trying to publish in a content view + + :id: 64e03f28-8213-467a-a229-44c8cbfaaef1 + + :steps: + 1. Create custom product and upload repository + 2. Run Katello commands to make repository invalid + 3. Create content view and add repository + 4. Verify Publish fails + + :expectedresults: Publishing a content view with an invalid repository fails + + :customerscenario: true + + :BZ: 2032040 + """ + repo = module_repository + target_sat.execute( + 'echo "root = ::Katello::RootRepository.last; ::Katello::Resources::Candlepin::Product.' + 'remove_content(root.product.organization.label, root.product.cp_id, root.content_id); ' + '::Katello::Resources::Candlepin::Content.destroy(root.product.organization.label, ' + 'root.content_id)" | foreman-rake console' + ) + cv = target_sat.api.ContentView( + organization=module_org.name, + repository=[repo.id], + ).create() + with pytest.raises(HTTPError) as context: + cv.publish() + assert 'Remove the invalid repository before publishing again' in context.value.response.text + + +def test_positive_disable_rh_repo_with_basearch(module_target_sat, module_entitlement_manifest_org): + """Verify that users can disable Red Hat Repositories with basearch + + :id: dd3b63b7-1dbf-4d8a-ab66-348de0ad7cf3 + + :steps: + 1. You have the Appstream Kicstart repositories release version + "8" synced in from the release of RHEL 8 + 2. hammer repository-set disable --basearch --name --product-id + --organization --releasever + + + :expectedresults: Users can now disable Red Hat repositories with + basearch + + :customerscenario: true + + :BZ: 1932486 + """ + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=REPOS['kickstart']['rhel8_aps']['product'], + repo=REPOS['kickstart']['rhel8_aps']['name'], + reposet=REPOS['kickstart']['rhel8_aps']['reposet'], + releasever=REPOS['kickstart']['rhel8_aps']['version'], + ) + repo = module_target_sat.api.Repository(id=rh_repo_id).read() + repo.sync(timeout=600) + disabled_repo = module_target_sat.cli.RepositorySet.disable( + { + 'basearch': DEFAULT_ARCHITECTURE, + 'name': REPOSET['kickstart']['rhel8'], + 'product-id': repo.product.id, + 'organization-id': module_entitlement_manifest_org.id, + 'releasever': REPOS['kickstart']['rhel8_aps']['version'], + 'repository-id': rh_repo_id, + } + ) + assert 'Repository disabled' in disabled_repo[0]['message'] diff --git a/tests/foreman/cli/test_repository.py b/tests/foreman/cli/test_repository.py index 3f2d0f93d5c..21788a32891 100644 --- a/tests/foreman/cli/test_repository.py +++ b/tests/foreman/cli/test_repository.py @@ -4,87 +4,56 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import choice from string import punctuation +from fauxfactory import gen_alphanumeric, gen_integer, gen_string, gen_url +from nailgun import entities import pytest import requests -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_url -from nailgun import entities from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.content_export import ContentExport -from robottelo.cli.content_import import ContentImport -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_content_credential -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.file import File -from robottelo.cli.filter import Filter -from robottelo.cli.module_stream import ModuleStream -from robottelo.cli.org import Org -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.role import Role -from robottelo.cli.settings import Settings -from robottelo.cli.srpm import Srpm -from robottelo.cli.task import Task -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import CUSTOM_FILE_REPO_FILES_COUNT -from robottelo.constants import CUSTOM_LOCAL_FOLDER -from robottelo.constants import DataFile -from robottelo.constants import DOWNLOAD_POLICIES -from robottelo.constants import MIRRORING_POLICIES -from robottelo.constants import OS_TEMPLATE_DATA_FILE -from robottelo.constants import REPO_TYPE -from robottelo.constants import RPM_TO_UPLOAD -from robottelo.constants import SRPM_TO_UPLOAD -from robottelo.constants.repos import ANSIBLE_GALAXY -from robottelo.constants.repos import CUSTOM_3RD_PARTY_REPO -from robottelo.constants.repos import CUSTOM_FILE_REPO -from robottelo.constants.repos import CUSTOM_RPM_SHA -from robottelo.constants.repos import FAKE_5_YUM_REPO -from robottelo.constants.repos import FAKE_YUM_DRPM_REPO -from robottelo.constants.repos import FAKE_YUM_MD5_REPO -from robottelo.constants.repos import FAKE_YUM_SRPM_REPO +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + CUSTOM_FILE_REPO_FILES_COUNT, + CUSTOM_LOCAL_FOLDER, + DOWNLOAD_POLICIES, + FAKE_3_YUM_REPO_RPMS, + MIRRORING_POLICIES, + OS_TEMPLATE_DATA_FILE, + REPO_TYPE, + RPM_TO_UPLOAD, + SRPM_TO_UPLOAD, + DataFile, +) +from robottelo.constants.repos import ( + ANSIBLE_GALAXY, + CUSTOM_3RD_PARTY_REPO, + CUSTOM_FILE_REPO, + CUSTOM_RPM_SHA, + FAKE_5_YUM_REPO, + FAKE_YUM_MD5_REPO, + FAKE_YUM_SRPM_REPO, +) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_docker_repository_names -from robottelo.utils.datafactory import valid_http_credentials - -# from robottelo.constants.repos import FEDORA_OSTREE_REPO - +from robottelo.utils.datafactory import ( + invalid_values_list, + parametrized, + valid_data_list, + valid_docker_repository_names, + valid_http_credentials, +) +from tests.foreman.api.test_contentview import content_view YUM_REPOS = ( settings.repos.yum_0.url, @@ -102,23 +71,26 @@ ) -def _get_image_tags_count(repo): - return Repository.info({'id': repo['id']}) +def _get_image_tags_count(repo, sat): + return sat.cli.Repository.info({'id': repo['id']}) -def _validated_image_tags_count(repo): +def _validated_image_tags_count(repo, sat): """Wrapper around Repository.info(), that returns once container-image-tags in repo is greater than 0. Needed due to BZ#1664631 (container-image-tags is not populated immediately after synchronization), which was CLOSED WONTFIX """ wait_for( - lambda: int(_get_image_tags_count(repo=repo)['content-counts']['container-image-tags']) > 0, + lambda: int( + _get_image_tags_count(repo=repo, sat=sat)['content-counts']['container-image-tags'] + ) + > 0, timeout=30, delay=2, logger=logger, ) - return _get_image_tags_count(repo=repo) + return _get_image_tags_count(repo=repo, sat=sat) @pytest.fixture @@ -131,15 +103,15 @@ def repo_options(request, module_org, module_product): @pytest.fixture -def repo(repo_options): +def repo(repo_options, target_sat): """create a new repository.""" - return make_repository(repo_options) + return target_sat.cli_factory.make_repository(repo_options) @pytest.fixture -def gpg_key(module_org): +def gpg_key(module_org, module_target_sat): """Create a new GPG key.""" - return make_content_credential({'organization-id': module_org.id}) + return module_target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) class TestRepository: @@ -240,27 +212,6 @@ def test_positive_create_with_yum_repo(self, repo_options, repo): for key in 'url', 'content-type': assert repo.get(key) == repo_options[key] - @pytest.mark.tier1 - @pytest.mark.upgrade - @pytest.mark.parametrize( - 'repo_options', - **parametrized([{'content-type': 'file', 'url': CUSTOM_FILE_REPO}]), - indirect=True, - ) - def test_positive_create_with_file_repo(self, repo_options, repo): - """Create file repository - - :id: 46f63419-1acc-4ae2-be8c-d97816ba342f - - :parametrized: yes - - :expectedresults: file repository is created - - :CaseImportance: Critical - """ - for key in 'url', 'content-type': - assert repo.get(key) == repo_options[key] - @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', @@ -292,7 +243,7 @@ def test_positive_create_with_auth_yum_repo(self, repo_options, repo): for key in 'url', 'content-type': assert repo.get(key) == repo_options[key] repo = entities.Repository(id=repo['id']).read() - assert getattr(repo, 'upstream_username') == repo_options['upstream-username'] + assert repo.upstream_username == repo_options['upstream-username'] @pytest.mark.tier1 @pytest.mark.upgrade @@ -345,7 +296,7 @@ def test_positive_mirroring_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_positive_create_with_default_download_policy(self, repo_options, repo): + def test_positive_create_with_default_download_policy(self, repo_options, repo, target_sat): """Verify if the default download policy is assigned when creating a YUM repo without `--download-policy` @@ -357,7 +308,7 @@ def test_positive_create_with_default_download_policy(self, repo_options, repo): :CaseImportance: Critical """ - default_dl_policy = Settings.list({'search': 'name=default_download_policy'}) + default_dl_policy = target_sat.cli.Settings.list({'search': 'name=default_download_policy'}) assert default_dl_policy assert repo.get('download-policy') == default_dl_policy[0]['value'] @@ -365,7 +316,7 @@ def test_positive_create_with_default_download_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo): + def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo, target_sat): """Update `immediate` download policy to `on_demand` for a newly created YUM repository @@ -380,8 +331,8 @@ def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo) :BZ: 1732056 """ assert repo.get('download-policy') == 'immediate' - Repository.update({'id': repo['id'], 'download-policy': 'on_demand'}) - result = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'download-policy': 'on_demand'}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result.get('download-policy') == 'on_demand' @pytest.mark.tier1 @@ -390,7 +341,7 @@ def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo) **parametrized([{'content-type': 'yum', 'download-policy': 'on_demand'}]), indirect=True, ) - def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo): + def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo, target_sat): """Update `on_demand` download policy to `immediate` for a newly created YUM repository @@ -402,13 +353,13 @@ def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo) :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'download-policy': 'immediate'}) - result = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'download-policy': 'immediate'}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result['download-policy'] == 'immediate' @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key): + def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key, target_sat): """Check if repository can be created with gpg key ID :id: 6d22f0ea-2d27-4827-9b7a-3e1550a47285 @@ -420,7 +371,7 @@ def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key): :CaseImportance: Critical """ repo_options['gpg-key-id'] = gpg_key['id'] - repo = make_repository(repo_options) + repo = target_sat.cli_factory.make_repository(repo_options) assert repo['gpg-key']['id'] == gpg_key['id'] assert repo['gpg-key']['name'] == gpg_key['name'] @@ -585,16 +536,18 @@ def test_positive_create_repo_with_new_organization_and_location(self, target_sa :CaseImportance: High """ - new_org = make_org() - new_location = make_location() - new_product = make_product( + new_org = target_sat.cli_factory.make_org() + new_location = target_sat.cli_factory.make_location() + new_product = target_sat.cli_factory.make_product( {'organization-id': new_org['id'], 'description': 'test_product'} ) - Org.add_location({'location-id': new_location['id'], 'name': new_org['name']}) - assert new_location['name'] in Org.info({'id': new_org['id']})['locations'] - make_repository( + target_sat.cli.Org.add_location( + {'location-id': new_location['id'], 'name': new_org['name']} + ) + assert new_location['name'] in target_sat.cli.Org.info({'id': new_org['id']})['locations'] + target_sat.cli_factory.make_repository( { - 'location-id': new_location['id'], + 'content-type': 'yum', 'organization-id': new_org['id'], 'product-id': new_product['id'], } @@ -612,7 +565,7 @@ def test_positive_create_repo_with_new_organization_and_location(self, target_sa **parametrized([{'name': name} for name in invalid_values_list()]), indirect=True, ) - def test_negative_create_with_name(self, repo_options): + def test_negative_create_with_name(self, repo_options, target_sat): """Repository name cannot be 300-characters long :id: af0652d3-012d-4846-82ac-047918f74722 @@ -624,7 +577,7 @@ def test_negative_create_with_name(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -632,7 +585,9 @@ def test_negative_create_with_name(self, repo_options): **parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}.com'}]), indirect=True, ) - def test_negative_create_with_url_with_special_characters(self, repo_options): + def test_negative_create_with_url_with_special_characters( + self, repo_options, module_target_sat + ): """Verify that repository URL cannot contain unquoted special characters :id: 2bd5ee17-0fe5-43cb-9cdc-dc2178c5374c @@ -644,7 +599,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -652,7 +607,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): **parametrized([{'content-type': 'yum', 'download-policy': gen_string('alpha', 5)}]), indirect=True, ) - def test_negative_create_with_invalid_download_policy(self, repo_options): + def test_negative_create_with_invalid_download_policy(self, repo_options, module_target_sat): """Verify that YUM repository cannot be created with invalid download policy @@ -666,13 +621,13 @@ def test_negative_create_with_invalid_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_negative_update_to_invalid_download_policy(self, repo_options, repo): + def test_negative_update_to_invalid_download_policy(self, repo_options, repo, target_sat): """Verify that YUM repository cannot be updated to invalid download policy @@ -686,7 +641,9 @@ def test_negative_update_to_invalid_download_policy(self, repo_options, repo): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - Repository.update({'id': repo['id'], 'download-policy': gen_string('alpha', 5)}) + target_sat.cli.Repository.update( + {'id': repo['id'], 'download-policy': gen_string('alpha', 5)} + ) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -694,14 +651,14 @@ def test_negative_update_to_invalid_download_policy(self, repo_options, repo): **parametrized( [ {'content-type': content_type, 'download-policy': 'on_demand'} - for content_type in REPO_TYPE.keys() + for content_type in REPO_TYPE if content_type != 'yum' if content_type != 'ostree' ] ), indirect=True, ) - def test_negative_create_non_yum_with_download_policy(self, repo_options): + def test_negative_create_non_yum_with_download_policy(self, repo_options, module_target_sat): """Verify that non-YUM repositories cannot be created with download policy TODO: Remove ostree from exceptions when ostree is added back in Satellite 7 @@ -720,43 +677,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): CLIFactoryError, match='Download policy Cannot set attribute download_policy for content type', ): - make_repository(repo_options) - - @pytest.mark.tier1 - @pytest.mark.parametrize( - 'repo_options', - **parametrized( - [ - {'content-type': 'yum', 'url': url} - for url in ( - settings.repos.yum_1.url, - settings.repos.yum_3.url, - settings.repos.yum_4.url, - ) - ] - ), - indirect=True, - ) - def test_positive_synchronize_yum_repo(self, repo_options, repo): - """Check if repository can be created and synced - - :id: e3a62529-edbd-4062-9246-bef5f33bdcf0 - - :parametrized: yes - - :expectedresults: Repository is created and synced - - :CaseLevel: Integration - - :CaseImportance: Critical - """ - # Repo is not yet synced - assert repo['sync']['status'] == 'Not Synced' - # Synchronize it - Repository.synchronize({'id': repo['id']}) - # Verify it has finished - repo = Repository.info({'id': repo['id']}) - assert repo['sync']['status'] == 'Success' + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -764,7 +685,7 @@ def test_positive_synchronize_yum_repo(self, repo_options, repo): **parametrized([{'content-type': 'file', 'url': CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_synchronize_file_repo(self, repo_options, repo): + def test_positive_synchronize_file_repo(self, repo_options, repo, target_sat): """Check if repository can be created and synced :id: eafc421d-153e-41e1-afbd-938e556ef827 @@ -773,16 +694,14 @@ def test_positive_synchronize_file_repo(self, repo_options, repo): :expectedresults: Repository is created and synced - :CaseLevel: Integration - :CaseImportance: Critical """ # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert int(repo['content-counts']['files']) == CUSTOM_FILE_REPO_FILES_COUNT @@ -804,7 +723,7 @@ def test_positive_synchronize_file_repo(self, repo_options, repo): ), indirect=True, ) - def test_positive_synchronize_auth_yum_repo(self, repo): + def test_positive_synchronize_auth_yum_repo(self, repo, target_sat): """Check if secured repository can be created and synced :id: b0db676b-e0f0-428c-adf3-1d7c0c3599f0 @@ -815,14 +734,13 @@ def test_positive_synchronize_auth_yum_repo(self, repo): :BZ: 1328092 - :CaseLevel: Integration """ # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - new_repo = Repository.info({'id': repo['id']}) + new_repo = target_sat.cli.Repository.info({'id': repo['id']}) assert new_repo['sync']['status'] == 'Success' @pytest.mark.skip_if_open("BZ:2035025") @@ -845,7 +763,7 @@ def test_positive_synchronize_auth_yum_repo(self, repo): ), indirect=['repo_options'], ) - def test_negative_synchronize_auth_yum_repo(self, repo): + def test_negative_synchronize_auth_yum_repo(self, repo, target_sat): """Check if secured repo fails to synchronize with invalid credentials :id: 809905ae-fb76-465d-9468-1f99c4274aeb @@ -856,11 +774,12 @@ def test_negative_synchronize_auth_yum_repo(self, repo): :BZ: 1405503, 1453118 - :CaseLevel: Integration """ # Try to synchronize it - repo_sync = Repository.synchronize({'id': repo['id'], 'async': True}) - response = Task.progress({'id': repo_sync[0]['id']}, return_raw_response=True) + repo_sync = target_sat.cli.Repository.synchronize({'id': repo['id'], 'async': True}) + response = target_sat.cli.Task.progress( + {'id': repo_sync[0]['id']}, return_raw_response=True + ) assert "Error: 401, message='Unauthorized'" in response.stderr[1].decode('utf-8') @pytest.mark.tier2 @@ -879,7 +798,9 @@ def test_negative_synchronize_auth_yum_repo(self, repo): ), indirect=True, ) - def test_positive_synchronize_docker_repo(self, repo, module_product, module_org): + def test_positive_synchronize_docker_repo( + self, repo, module_product, module_org, module_target_sat + ): """Check if Docker repository can be created, synced, and deleted :id: cb9ae788-743c-4785-98b2-6ae0c161bc9a @@ -895,17 +816,17 @@ def test_positive_synchronize_docker_repo(self, repo, module_product, module_org # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - new_repo = Repository.info({'id': repo['id']}) + new_repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert new_repo['sync']['status'] == 'Success' # For BZ#1810165, assert repo can be deleted - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) assert ( new_repo['name'] - not in Product.info({'id': module_product.id, 'organization-id': module_org.id})[ - 'content' - ] + not in module_target_sat.cli.Product.info( + {'id': module_product.id, 'organization-id': module_org.id} + )['content'] ) @pytest.mark.tier2 @@ -917,26 +838,31 @@ def test_positive_synchronize_docker_repo(self, repo, module_product, module_org { 'content-type': 'docker', 'docker-upstream-name': CONTAINER_UPSTREAM_NAME, + 'name': valid_docker_repository_names()[0], 'url': CONTAINER_REGISTRY_HUB, - 'include-tags': 'latest', } ] ), indirect=True, ) - def test_positive_synchronize_docker_repo_with_tags_whitelist(self, repo_options, repo): - """Check if only whitelisted tags are synchronized + def test_verify_checksum_container_repo(self, repo, target_sat): + """Check if Verify Content Checksum can be run on container repos - :id: aa820c65-2de1-4b32-8890-98bd8b4320dc + :id: c8f0eb45-3cb6-41b2-aad9-52ac847d7bf8 :parametrized: yes - :expectedresults: Only whitelisted tag is synchronized + :expectedresults: Docker repository is created, and can be synced with + validate-contents set to True + + :BZ: 2161209 + + :customerscenario: true """ - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) - assert repo_options['include-tags'] in repo['container-image-tags-filter'] - assert int(repo['content-counts']['container-image-tags']) == 1 + assert repo['sync']['status'] == 'Not Synced' + target_sat.cli.Repository.synchronize({'id': repo['id'], 'validate-contents': 'true'}) + new_repo = target_sat.cli.Repository.info({'id': repo['id']}) + assert new_repo['sync']['status'] == 'Success' @pytest.mark.tier2 @pytest.mark.parametrize( @@ -953,7 +879,7 @@ def test_positive_synchronize_docker_repo_with_tags_whitelist(self, repo_options ), indirect=True, ) - def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): + def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo, target_sat): """Verify that adding tags whitelist and re-syncing after synchronizing full repository doesn't remove content that was already pulled in when mirroring policy is set to additive @@ -965,13 +891,13 @@ def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): :expectedresults: Non-whitelisted tags are not removed """ tags = 'latest' - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert not repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 - Repository.update({'id': repo['id'], 'include-tags': tags}) - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.update({'id': repo['id'], 'include-tags': tags}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert tags in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 @@ -990,7 +916,7 @@ def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): ), indirect=True, ) - def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo): + def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo, target_sat): """Verify that adding tags whitelist and re-syncing after synchronizing full repository does remove content that was already pulled in when mirroring policy is set to content only @@ -1003,13 +929,13 @@ def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo :expectedresults: Non-whitelisted tags are removed """ tags = 'latest' - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert not repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 - Repository.update({'id': repo['id'], 'include-tags': tags}) - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.update({'id': repo['id'], 'include-tags': tags}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert tags in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) <= 2 @@ -1028,7 +954,9 @@ def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo ), indirect=True, ) - def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo_options, repo): + def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags( + self, repo_options, repo, target_sat + ): """Set tags whitelist to contain both valid and invalid (non-existing) tags. Check if only whitelisted tags are synchronized @@ -1038,8 +966,8 @@ def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo :expectedresults: Only whitelisted tag is synchronized """ - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) for tag in repo_options['include-tags'].split(','): assert tag in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) == 1 @@ -1059,7 +987,9 @@ def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo ), indirect=True, ) - def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, repo): + def test_negative_synchronize_docker_repo_with_invalid_tags( + self, repo_options, repo, target_sat + ): """Set tags whitelist to contain only invalid (non-existing) tags. Check that no data is synchronized. @@ -1069,8 +999,8 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, :expectedresults: Tags are not synchronized """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) for tag in repo_options['include-tags'].split(','): assert tag in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) == 0 @@ -1081,7 +1011,7 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_resynchronize_rpm_repo(self, repo): + def test_positive_resynchronize_rpm_repo(self, repo, target_sat): """Check that repository content is resynced after packages were removed from repository @@ -1093,22 +1023,21 @@ def test_positive_resynchronize_rpm_repo(self, repo): :BZ: 1459845, 1459874, 1318004 - :CaseLevel: Integration """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list({'repository-id': repo['id']}) - Repository.remove_content( + packages = target_sat.cli.Package.list({'repository-id': repo['id']}) + target_sat.cli.Repository.remove_content( {'id': repo['id'], 'ids': [package['id'] for package in packages]} ) - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' # Re-synchronize repository - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' @@ -1137,7 +1066,7 @@ def test_positive_resynchronize_rpm_repo(self, repo): ), ) @pytest.mark.tier2 - def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2): + def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2, module_target_sat): """ Check that a package removed upstream is removed downstream when the repo is next synced if mirror-on-sync is enabled (the default setting). @@ -1150,7 +1079,7 @@ def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2): 3. Delete one package from repo 1. 4. Sync the second repo (repo 2) from the first repo (repo 1). - :Steps: + :steps: 1. Check that the package deleted from repo 1 was removed from repo 2. :expectedresults: A package removed from repo 1 is removed from repo 2 when synced. @@ -1158,36 +1087,40 @@ def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2): :CaseImportance: Medium """ # Add description to repo 1 and its product - Product.update( + module_target_sat.cli.Product.update( { 'id': repo.get('product')['id'], 'organization': module_org.label, 'description': 'Fake Upstream', } ) - Repository.update({'id': repo['id'], 'description': ['Fake Upstream']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'description': ['Fake Upstream']} + ) # Sync repo 1 from the real upstream - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Make 2nd repo - prod_2 = make_product({'organization-id': module_org.id, 'description': 'Downstream'}) + prod_2 = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'description': 'Downstream'} + ) repo_options_2['organization-id'] = module_org.id repo_options_2['product-id'] = prod_2['id'] repo_options_2['url'] = repo.get('published-at') - repo_2 = make_repository(repo_options_2) - Repository.update({'id': repo_2['id'], 'description': ['Downstream']}) - repo_2 = Repository.info({'id': repo_2['id']}) - Repository.synchronize({'id': repo_2['id']}) + repo_2 = module_target_sat.cli_factory.make_repository(repo_options_2) + module_target_sat.cli.Repository.update({'id': repo_2['id'], 'description': ['Downstream']}) + repo_2 = module_target_sat.cli.Repository.info({'id': repo_2['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_2['id']}) # Get list of repo 1's packages and remove one - package = choice(Package.list({'repository-id': repo['id']})) - Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) - repo = Repository.info({'id': repo['id']}) + package = choice(module_target_sat.cli.Package.list({'repository-id': repo['id']})) + module_target_sat.cli.Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '31' # Re-synchronize repo_2, the downstream repository - Repository.synchronize({'id': repo_2['id']}) - repo_2 = Repository.info({'id': repo_2['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_2['id']}) + repo_2 = module_target_sat.cli.Repository.info({'id': repo_2['id']}) assert repo_2['sync']['status'] == 'Success' assert repo_2['content-counts']['packages'] == '31' @@ -1218,11 +1151,9 @@ def test_positive_synchronize_rpm_repo_ignore_SRPM( :BZ: 1591358 - :CaseLevel: Integration - """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['source-rpms'] == '0', 'content not ignored correctly' @@ -1230,7 +1161,7 @@ def test_positive_synchronize_rpm_repo_ignore_SRPM( @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_update_url(self, repo): + def test_positive_update_url(self, repo, module_target_sat): """Update the original url for a repository :id: 1a2cf29b-5c30-4d4c-b6d1-2f227b0a0a57 @@ -1242,9 +1173,9 @@ def test_positive_update_url(self, repo): :CaseImportance: Critical """ # Update the url - Repository.update({'id': repo['id'], 'url': settings.repos.yum_2.url}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'url': settings.repos.yum_2.url}) # Fetch it again - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['url'] == settings.repos.yum_2.url @pytest.mark.tier1 @@ -1252,7 +1183,9 @@ def test_positive_update_url(self, repo): 'new_repo_options', **parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}'}]), ) - def test_negative_update_url_with_special_characters(self, new_repo_options, repo): + def test_negative_update_url_with_special_characters( + self, new_repo_options, repo, module_target_sat + ): """Verify that repository URL cannot be updated to contain the forbidden characters @@ -1265,13 +1198,15 @@ def test_negative_update_url_with_special_characters(self, new_repo_options, rep :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - Repository.update({'id': repo['id'], 'url': new_repo_options['url']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'url': new_repo_options['url']} + ) # Fetch it again, ensure url hasn't changed. - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['url'] == repo['url'] @pytest.mark.tier1 - def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): + def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key, target_sat): """Update the original gpg key :id: 367ff375-4f52-4a8c-b974-8c1c54e3fdd3 @@ -1282,11 +1217,13 @@ def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key['id']}) - gpg_key_new = make_content_credential({'organization-id': module_org.id}) - Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key_new['id']}) - result = Repository.info({'id': repo['id']}) + gpg_key_new = target_sat.cli_factory.make_content_credential( + {'organization-id': module_org.id} + ) + target_sat.cli.Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key_new['id']}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result['gpg-key']['id'] == gpg_key_new['id'] @pytest.mark.tier1 @@ -1295,7 +1232,7 @@ def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): **parametrized([{'mirroring-policy': policy} for policy in MIRRORING_POLICIES]), indirect=True, ) - def test_positive_update_mirroring_policy(self, repo, repo_options): + def test_positive_update_mirroring_policy(self, repo, repo_options, module_target_sat): """Update the mirroring policy rule for repository :id: 9bab2537-3223-40d7-bc4c-a51b09d2e812 @@ -1306,15 +1243,17 @@ def test_positive_update_mirroring_policy(self, repo, repo_options): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'mirroring-policy': repo_options['mirroring-policy']}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'mirroring-policy': repo_options['mirroring-policy']} + ) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['mirroring-policy'] == MIRRORING_POLICIES[repo_options['mirroring-policy']] @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **parametrized([{'publish-via-http': 'no'}]), indirect=True ) - def test_positive_update_publish_method(self, repo): + def test_positive_update_publish_method(self, repo, module_target_sat): """Update the original publishing method :id: e7bd2667-4851-4a64-9c70-1b5eafbc3f71 @@ -1325,8 +1264,8 @@ def test_positive_update_publish_method(self, repo): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'publish-via-http': 'yes'}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'publish-via-http': 'yes'}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['publish-via-http'] == 'yes' @pytest.mark.tier1 @@ -1336,7 +1275,9 @@ def test_positive_update_publish_method(self, repo): indirect=True, ) @pytest.mark.parametrize('checksum_type', ['sha1', 'sha256']) - def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): + def test_positive_update_checksum_type( + self, repo_options, repo, checksum_type, module_target_sat + ): """Create a YUM repository and update the checksum type :id: 42f14257-d860-443d-b337-36fd355014bc @@ -1349,8 +1290,8 @@ def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): :CaseImportance: Critical """ assert repo['content-type'] == repo_options['content-type'] - Repository.update({'checksum-type': checksum_type, 'id': repo['id']}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'checksum-type': checksum_type, 'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['checksum-type'] == checksum_type @pytest.mark.tier1 @@ -1368,7 +1309,7 @@ def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): ), indirect=True, ) - def test_negative_create_checksum_with_on_demand_policy(self, repo_options): + def test_negative_create_checksum_with_on_demand_policy(self, repo_options, module_target_sat): """Attempt to create repository with checksum and on_demand policy. :id: 33d712e6-e91f-42bb-8c5d-35bdc427182c @@ -1382,28 +1323,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): :BZ: 1732056 """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) - - @pytest.mark.tier1 - @pytest.mark.parametrize( - 'repo_options', - **parametrized([{'name': name} for name in valid_data_list().values()]), - indirect=True, - ) - def test_positive_delete_by_id(self, repo): - """Check if repository can be created and deleted - - :id: bcf096db-0033-4138-90a3-cb7355d5dfaf - - :parametrized: yes - - :expectedresults: Repository is created and then deleted - - :CaseImportance: Critical - """ - Repository.delete({'id': repo['id']}) - with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.upgrade @@ -1412,7 +1332,7 @@ def test_positive_delete_by_id(self, repo): **parametrized([{'name': name} for name in valid_data_list().values()]), indirect=True, ) - def test_positive_delete_by_name(self, repo_options, repo): + def test_positive_delete_by_name(self, repo_options, repo, module_target_sat): """Check if repository can be created and deleted :id: 463980a4-dbcf-4178-83a6-1863cf59909a @@ -1423,9 +1343,11 @@ def test_positive_delete_by_name(self, repo_options, repo): :CaseImportance: Critical """ - Repository.delete({'name': repo['name'], 'product-id': repo_options['product-id']}) + module_target_sat.cli.Repository.delete( + {'name': repo['name'], 'product-id': repo_options['product-id']} + ) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1433,7 +1355,7 @@ def test_positive_delete_by_name(self, repo_options, repo): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_delete_rpm(self, repo): + def test_positive_delete_rpm(self, repo, module_target_sat): """Check if rpm repository with packages can be deleted. :id: 1172492f-d595-4c8e-89c1-fabb21eb04ac @@ -1444,14 +1366,14 @@ def test_positive_delete_rpm(self, repo): :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' # Check that there is at least one package assert int(repo['content-counts']['packages']) > 0 - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.upgrade @@ -1460,7 +1382,9 @@ def test_positive_delete_rpm(self, repo): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_remove_content_by_repo_name(self, module_org, module_product, repo): + def test_positive_remove_content_by_repo_name( + self, module_org, module_product, repo, module_target_sat + ): """Synchronize and remove rpm content using repo name :id: a8b6f17d-3b13-4185-920a-2558ace59458 @@ -1473,14 +1397,14 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, :CaseImportance: Critical """ - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': repo['name'], 'product': module_product.name, 'organization': module_org.name, } ) - repo = Repository.info( + repo = module_target_sat.cli.Repository.info( { 'name': repo['name'], 'product': module_product.name, @@ -1490,14 +1414,14 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list( + packages = module_target_sat.cli.Package.list( { 'repository': repo['name'], 'product': module_product.name, 'organization': module_org.name, } ) - Repository.remove_content( + module_target_sat.cli.Repository.remove_content( { 'name': repo['name'], 'product': module_product.name, @@ -1505,7 +1429,7 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, 'ids': [package['id'] for package in packages], } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' @pytest.mark.tier1 @@ -1515,7 +1439,7 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_remove_content_rpm(self, repo): + def test_positive_remove_content_rpm(self, repo, module_target_sat): """Synchronize repository and remove rpm content from it :id: c4bcda0e-c0d6-424c-840d-26684ca7c9f1 @@ -1528,16 +1452,16 @@ def test_positive_remove_content_rpm(self, repo): :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list({'repository-id': repo['id']}) - Repository.remove_content( + packages = module_target_sat.cli.Package.list({'repository-id': repo['id']}) + module_target_sat.cli.Repository.remove_content( {'id': repo['id'], 'ids': [package['id'] for package in packages]} ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' @pytest.mark.tier1 @@ -1560,7 +1484,7 @@ def test_positive_upload_content(self, repo, target_sat): local_path=DataFile.RPM_TO_UPLOAD, remote_path=f"/tmp/{RPM_TO_UPLOAD}", ) - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -1568,8 +1492,11 @@ def test_positive_upload_content(self, repo, target_sat): 'product-id': repo['product']['id'], } ) - assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message'] - assert int(Repository.info({'id': repo['id']})['content-counts']['packages']) == 1 + assert f"Successfully uploaded file {RPM_TO_UPLOAD}" == result[0]['message'] + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['packages']) + == 1 + ) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1592,15 +1519,15 @@ def test_positive_upload_content_to_file_repo(self, repo, target_sat): :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - new_repo = Repository.info({'id': repo['id']}) + new_repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(new_repo['content-counts']['files']) == CUSTOM_FILE_REPO_FILES_COUNT target_sat.put( local_path=DataFile.OS_TEMPLATE_DATA_FILE, remote_path=f"/tmp/{OS_TEMPLATE_DATA_FILE}", ) - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': new_repo['name'], 'organization': new_repo['organization'], @@ -1608,8 +1535,8 @@ def test_positive_upload_content_to_file_repo(self, repo, target_sat): 'product-id': new_repo['product']['id'], } ) - assert f"Successfully uploaded file '{OS_TEMPLATE_DATA_FILE}'" in result[0]['message'] - new_repo = Repository.info({'id': new_repo['id']}) + assert f"Successfully uploaded file {OS_TEMPLATE_DATA_FILE}" == result[0]['message'] + new_repo = target_sat.cli.Repository.info({'id': new_repo['id']}) assert int(new_repo['content-counts']['files']) == CUSTOM_FILE_REPO_FILES_COUNT + 1 @pytest.mark.skip_if_open("BZ:1410916") @@ -1619,7 +1546,7 @@ def test_positive_upload_content_to_file_repo(self, repo, target_sat): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_negative_restricted_user_cv_add_repository(self, module_org, repo): + def test_negative_restricted_user_cv_add_repository(self, module_org, repo, module_target_sat): """Attempt to add a product repository to content view with a restricted user, using product name not visible to restricted user. @@ -1631,7 +1558,7 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): :BZ: 1436209,1410916 - :Steps: + :steps: 1. Setup a restricted user with permissions that filter the products with names like Test_* or "rhel7*" 2. Create a content view @@ -1647,7 +1574,6 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): view, assert that the restricted user still cannot view the product repository. - :CaseLevel: Integration """ required_permissions = { 'Katello::Product': ( @@ -1688,7 +1614,7 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): content_view_name = f"Test_{gen_string('alpha', 20)}" # Create a non admin user, for the moment without any permissions - user = make_user( + user = module_target_sat.cli_factory.user( { 'admin': False, 'default-organization-id': module_org.id, @@ -1698,9 +1624,9 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): } ) # Create a new role - role = make_role() + role = module_target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = module_target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -1721,43 +1647,45 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - make_filter({'role-id': role['id'], 'permissions': permission_names, 'search': search}) + module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names, 'search': search} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises( CLIReturnCodeError, match=r'Access denied\\nMissing one of the required permissions: view_roles', ): - Role.with_user(user_name, user_password).info({'id': role['id']}) + module_target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view - content_view = make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'name': content_view_name} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( - {'id': content_view['id']} - ) + user_content_view = module_target_sat.cli.ContentView.with_user( + user_name, user_password + ).info({'id': content_view['id']}) # assert that this is the same content view assert content_view['name'] == user_content_view['name'] # assert admin user is able to view the product - repos = Repository.list({'organization-id': module_org.id}) + repos = module_target_sat.cli.Repository.list({'organization-id': module_org.id}) assert len(repos) == 1 # assert that this is the same repo assert repos[0]['id'] == repo['id'] # assert that restricted user is not able to view the product - repos = Repository.with_user(user_name, user_password).list( + repos = module_target_sat.cli.Repository.with_user(user_name, user_password).list( {'organization-id': module_org.id} ) assert len(repos) == 0 # assert that the user cannot add the product repo to content view with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user_name, user_password).add_repository( + module_target_sat.cli.ContentView.with_user(user_name, user_password).add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -1765,7 +1693,7 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): } ) # assert that restricted user still not able to view the product - repos = Repository.with_user(user_name, user_password).list( + repos = module_target_sat.cli.Repository.with_user(user_name, user_password).list( {'organization-id': module_org.id} ) assert len(repos) == 0 @@ -1791,7 +1719,7 @@ def test_positive_upload_remove_srpm_content(self, repo, target_sat): remote_path=f"/tmp/{SRPM_TO_UPLOAD}", ) # Upload SRPM - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -1801,17 +1729,23 @@ def test_positive_upload_remove_srpm_content(self, repo, target_sat): } ) assert f"Successfully uploaded file '{SRPM_TO_UPLOAD}'" in result[0]['message'] - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 1 + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 1 + ) # Remove uploaded SRPM - Repository.remove_content( + target_sat.cli.Repository.remove_content( { 'id': repo['id'], - 'ids': [Srpm.list({'repository-id': repo['id']})[0]['id']], + 'ids': [target_sat.cli.Srpm.list({'repository-id': repo['id']})[0]['id']], 'content-type': 'srpm', } ) - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 0 + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 0 + ) @pytest.mark.upgrade @pytest.mark.tier2 @@ -1837,7 +1771,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): remote_path=f"/tmp/{SRPM_TO_UPLOAD}", ) # Upload SRPM - Repository.upload_content( + target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -1846,15 +1780,18 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): 'content-type': 'srpm', } ) - assert len(Srpm.list()) > 0 - srpm_list = Srpm.list({'repository-id': repo['id']}) + assert len(target_sat.cli.Srpm.list()) > 0 + srpm_list = target_sat.cli.Srpm.list({'repository-id': repo['id']}) assert srpm_list[0]['filename'] == SRPM_TO_UPLOAD assert len(srpm_list) == 1 - assert Srpm.info({'id': srpm_list[0]['id']})[0]['filename'] == SRPM_TO_UPLOAD - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 1 + assert target_sat.cli.Srpm.info({'id': srpm_list[0]['id']})[0]['filename'] == SRPM_TO_UPLOAD + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 1 + ) assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'organization': repo['organization'], 'product-id': repo['product']['id'], @@ -1864,10 +1801,10 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) > 0 ) - assert len(Srpm.list({'organization': repo['organization']})) > 0 + assert len(target_sat.cli.Srpm.list({'organization': repo['organization']})) > 0 assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'organization': repo['organization'], 'lifecycle-environment': 'Library', @@ -1878,7 +1815,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'content-view': 'Default Organization View', 'lifecycle-environment': 'Library', @@ -1890,16 +1827,16 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) # Remove uploaded SRPM - Repository.remove_content( + target_sat.cli.Repository.remove_content( { 'id': repo['id'], - 'ids': [Srpm.list({'repository-id': repo['id']})[0]['id']], + 'ids': [target_sat.cli.Srpm.list({'repository-id': repo['id']})[0]['id']], 'content-type': 'srpm', } ) - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == len( - Srpm.list({'repository-id': repo['id']}) - ) + assert int( + target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms'] + ) == len(target_sat.cli.Srpm.list({'repository-id': repo['id']})) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1908,7 +1845,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): indirect=True, ) def test_positive_create_get_update_delete_module_streams( - self, repo_options, module_org, module_product, repo + self, repo_options, module_org, module_product, repo, module_target_sat ): """Check module-stream get for each create, get, update, delete. @@ -1919,7 +1856,7 @@ def test_positive_create_get_update_delete_module_streams( :Setup: 1. valid yum repo with Module Streams. - :Steps: + :steps: 1. Create Yum Repository with url contain module-streams 2. Initialize synchronization 3. Another Repository with same Url @@ -1937,76 +1874,34 @@ def test_positive_create_get_update_delete_module_streams( :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert ( repo['content-counts']['module-streams'] == '7' ), 'Module Streams not synced correctly' # adding repo with same yum url should not change count. - duplicate_repo = make_repository(repo_options) - Repository.synchronize({'id': duplicate_repo['id']}) + duplicate_repo = module_target_sat.cli_factory.make_repository(repo_options) + module_target_sat.cli.Repository.synchronize({'id': duplicate_repo['id']}) - module_streams = ModuleStream.list({'organization-id': module_org.id}) + module_streams = module_target_sat.cli.ModuleStream.list({'organization-id': module_org.id}) assert len(module_streams) == 7, 'Module Streams get worked correctly' - Repository.update( + module_target_sat.cli.Repository.update( { 'product-id': module_product.id, 'id': repo['id'], 'url': settings.repos.module_stream_1.url, } ) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert ( repo['content-counts']['module-streams'] == '7' ), 'Module Streams not synced correctly' - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) - - @pytest.mark.tier1 - @pytest.mark.parametrize( - 'repo_options', - **parametrized([{'content-type': 'yum', 'url': settings.repos.module_stream_0.url}]), - indirect=True, - ) - @pytest.mark.parametrize( - 'repo_options_2', - **parametrized([{'content-type': 'yum', 'url': settings.repos.module_stream_1.url}]), - ) - def test_module_stream_list_validation(self, module_org, repo, repo_options_2): - """Check module-stream get with list on hammer. - - :id: 9842a0c3-8532-4b16-a00a-534fc3b0a776ff89f23e-cd00-4d20-84d3-add0ea24abf8 - - :parametrized: yes - - :Setup: - 1. valid yum repo with Module Streams. - - :Steps: - 1. Create Yum Repositories with url contain module-streams and Products - 2. Initialize synchronization - 3. Verify the module-stream list with various inputs options - - :expectedresults: Verify the module-stream list response. - - :CaseAutomation: Automated - """ - Repository.synchronize({'id': repo['id']}) - - prod_2 = make_product({'organization-id': module_org.id}) - repo_options_2['organization-id'] = module_org.id - repo_options_2['product-id'] = prod_2['id'] - repo_2 = make_repository(repo_options_2) - - Repository.synchronize({'id': repo_2['id']}) - module_streams = ModuleStream.list() - assert len(module_streams) > 13, 'Module Streams list failed' - module_streams = ModuleStream.list({'product-id': prod_2['id']}) - assert len(module_streams) == 7, 'Module Streams list by product failed' + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -2014,7 +1909,7 @@ def test_module_stream_list_validation(self, module_org, repo, repo_options_2): **parametrized([{'content-type': 'yum', 'url': settings.repos.module_stream_1.url}]), indirect=True, ) - def test_module_stream_info_validation(self, repo): + def test_module_stream_info_validation(self, repo, module_target_sat): """Check module-stream get with info on hammer. :id: ddbeb49e-d292-4dc4-8fb9-e9b768acc441a2c2e797-02b7-4b12-9f95-cffc93254198 @@ -2024,7 +1919,7 @@ def test_module_stream_info_validation(self, repo): :Setup: 1. valid yum repo with Module Streams. - :Steps: + :steps: 1. Create Yum Repositories with url contain module-streams 2. Initialize synchronization 3. Verify the module-stream info with various inputs options @@ -2033,11 +1928,11 @@ def test_module_stream_info_validation(self, repo): :CaseAutomation: Automated """ - Repository.synchronize({'id': repo['id']}) - module_streams = ModuleStream.list( + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_streams = module_target_sat.cli.ModuleStream.list( {'repository-id': repo['id'], 'search': 'name="walrus" and stream="5.21"'} ) - actual_result = ModuleStream.info({'id': module_streams[0]['id']}) + actual_result = module_target_sat.cli.ModuleStream.info({'id': module_streams[0]['id']}) expected_result = { 'module-stream-name': 'walrus', 'stream': '5.21', @@ -2049,7 +1944,7 @@ def test_module_stream_info_validation(self, repo): @pytest.mark.tier1 @pytest.mark.skip_if_open('BZ:2002653') - def test_negative_update_red_hat_repo(self, module_manifest_org): + def test_negative_update_red_hat_repo(self, module_manifest_org, module_target_sat): """Updates to Red Hat products fail. :id: d3ac0ea2-faab-4df4-be66-733e1b7ae6b4 @@ -2058,7 +1953,7 @@ def test_negative_update_red_hat_repo(self, module_manifest_org): :BZ: 1756951, 2002653 - :Steps: + :steps: 1. Import manifest and enable a Red Hat repository. 2. Attempt to update the Red Hat repository: # hammer repository update --id --url http://example.com/repo @@ -2066,26 +1961,34 @@ def test_negative_update_red_hat_repo(self, module_manifest_org): :expectedresults: hammer returns error code. The repository is not updated. """ - rh_repo_set_id = RepositorySet.list({'organization-id': module_manifest_org.id})[0]['id'] + rh_repo_set_id = module_target_sat.cli.RepositorySet.list( + {'organization-id': module_manifest_org.id} + )[0]['id'] - RepositorySet.enable( + module_target_sat.cli.RepositorySet.enable( { 'organization-id': module_manifest_org.id, 'basearch': "x86_64", 'id': rh_repo_set_id, } ) - repo_list = Repository.list({'organization-id': module_manifest_org.id}) + repo_list = module_target_sat.cli.Repository.list( + {'organization-id': module_manifest_org.id} + ) - rh_repo_id = Repository.list({'organization-id': module_manifest_org.id})[0]['id'] + rh_repo_id = module_target_sat.cli.Repository.list( + {'organization-id': module_manifest_org.id} + )[0]['id'] - Repository.update( + module_target_sat.cli.Repository.update( { 'id': rh_repo_id, 'url': f'{gen_url(scheme="https")}:{gen_integer(min_value=10, max_value=9999)}', } ) - repo_info = Repository.info({'organization-id': module_manifest_org.id, 'id': rh_repo_id}) + repo_info = module_target_sat.cli.Repository.info( + {'organization-id': module_manifest_org.id, 'id': rh_repo_id} + ) assert repo_info['url'] in [repo.get('url') for repo in repo_list] @pytest.mark.tier1 @@ -2121,8 +2024,8 @@ def test_positive_accessible_content_status( **parametrized([{'content_type': 'yum', 'url': CUSTOM_RPM_SHA}]), indirect=True, ) - def test_positive_sync_sha_repo(self, repo_options): - """Sync a 'sha' repo successfully + def test_positive_sync_sha_repo(self, repo_options, module_target_sat): + """Sync repository with 'sha' checksum, which uses 'sha1' in particular actually :id: 20579f52-a67b-4d3f-be07-41eec059a891 @@ -2132,12 +2035,12 @@ def test_positive_sync_sha_repo(self, repo_options): :BZ: 2024889 - :SubComponent: Candlepin + :SubComponent: Pulp """ - sha_repo = make_repository(repo_options) - sha_repo = Repository.info({'id': sha_repo['id']}) - Repository.synchronize({'id': sha_repo['id']}) - sha_repo = Repository.info({'id': sha_repo['id']}) + sha_repo = module_target_sat.cli_factory.make_repository(repo_options) + sha_repo = module_target_sat.cli.Repository.info({'id': sha_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': sha_repo['id']}) + sha_repo = module_target_sat.cli.Repository.info({'id': sha_repo['id']}) assert sha_repo['sync']['status'] == 'Success' @pytest.mark.tier2 @@ -2146,7 +2049,7 @@ def test_positive_sync_sha_repo(self, repo_options): **parametrized([{'content_type': 'yum', 'url': CUSTOM_3RD_PARTY_REPO}]), indirect=True, ) - def test_positive_sync_third_party_repo(self, repo_options): + def test_positive_sync_third_party_repo(self, repo_options, module_target_sat): """Sync third party repo successfully :id: 45936ab8-46b7-4f07-8b71-d7c8a4a2d984 @@ -2159,10 +2062,10 @@ def test_positive_sync_third_party_repo(self, repo_options): :SubComponent: Pulp """ - repo = make_repository(repo_options) - repo = Repository.info({'id': repo['id']}) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli_factory.make_repository(repo_options) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' @@ -2288,9 +2191,8 @@ def test_positive_sync_third_party_repo(self, repo_options): # :parametrized: yes # # :expectedresults: Ostree repository is created and synced -# -# :CaseLevel: Integration -# + + # :BZ: 1625783 # """ # # Synchronize it @@ -2370,7 +2272,7 @@ def test_positive_sync(self, repo, module_org, module_product, target_sat): :expectedresults: srpms can be listed in repository """ - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library" f"/custom/{module_product.label}/{repo['label']}/Packages/t/ | grep .src.rpm" @@ -2393,10 +2295,10 @@ def test_positive_sync_publish_cv(self, module_org, module_product, repo, target :expectedresults: srpms can be listed in content view """ - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/content_views/" f"{cv['label']}/1.0/custom/{module_product.label}/{repo['label']}/Packages/t/" @@ -2422,14 +2324,16 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product :expectedresults: srpms can be listed in content view in proper lifecycle environment """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + target_sat.cli.content_view = target_sat.cli.ContentView.info({'id': cv['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/{lce['label']}/" f"{cv['label']}/custom/{module_product.label}/{repo['label']}/Packages/t" @@ -2466,22 +2370,20 @@ class TestAnsibleCollectionRepository: ids=['ansible_galaxy', 'ansible_hub'], indirect=True, ) - def test_positive_sync_ansible_collection(self, repo, module_org, module_product): + def test_positive_sync_ansible_collection(self, repo, module_target_sat): """Sync ansible collection repository from Ansible Galaxy and Hub :id: 4b6a819b-8c3d-4a74-bd97-ee3f34cf5d92 :expectedresults: All content synced successfully - :CaseLevel: Integration - :CaseImportance: High :parametrized: yes """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' @pytest.mark.tier2 @@ -2500,39 +2402,45 @@ def test_positive_sync_ansible_collection(self, repo, module_org, module_product ids=['ansible_galaxy'], indirect=True, ) - def test_positive_export_ansible_collection(self, repo, module_org, module_product, target_sat): + def test_positive_export_ansible_collection(self, repo, module_org, target_sat): """Export ansible collection between organizations :id: 4858227e-1669-476d-8da3-4e6bfb6b7e2a :expectedresults: All content exported and imported successfully - :CaseLevel: Integration - :CaseImportance: High """ - import_org = make_org() - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + import_org = target_sat.cli_factory.make_org() + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' # export - result = ContentExport.completeLibrary({'organization-id': module_org.id}) + result = target_sat.cli.ContentExport.completeLibrary({'organization-id': module_org.id}) target_sat.execute(f'cp -r /var/lib/pulp/exports/{module_org.name} /var/lib/pulp/imports/.') target_sat.execute('chown -R pulp:pulp /var/lib/pulp/imports') export_metadata = result['message'].split()[1] # import import_path = export_metadata.replace('/metadata.json', '').replace('exports', 'imports') - ContentImport.library({'organization-id': import_org['id'], 'path': import_path}) - cv = ContentView.info({'name': 'Import-Library', 'organization-label': import_org['label']}) + target_sat.cli.ContentImport.library( + {'organization-id': import_org['id'], 'path': import_path} + ) + cv = target_sat.cli.ContentView.info( + {'name': 'Import-Library', 'organization-label': import_org['label']} + ) assert cv['description'] == 'Content View used for importing into library' - prods = Product.list({'organization-id': import_org['id']}) - prod = Product.info({'id': prods[0]['id'], 'organization-id': import_org['id']}) + prods = target_sat.cli.Product.list({'organization-id': import_org['id']}) + prod = target_sat.cli.Product.info( + {'id': prods[0]['id'], 'organization-id': import_org['id']} + ) ac_content = [ cont for cont in prod['content'] if cont['content-type'] == 'ansible_collection' ] assert len(ac_content) > 0 - repo = Repository.info({'name': ac_content[0]['repo-name'], 'product-id': prod['id']}) + repo = target_sat.cli.Repository.info( + {'name': ac_content[0]['repo-name'], 'product-id': prod['id']} + ) result = target_sat.execute(f'curl {repo["published-at"]}') assert "available_versions" in result.stdout @@ -2552,41 +2460,37 @@ def test_positive_export_ansible_collection(self, repo, module_org, module_produ ids=['ansible_galaxy'], indirect=True, ) - def test_positive_sync_ansible_collection_from_satellite( - self, repo, module_org, module_product, target_sat - ): + def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat): """Sync ansible collection from another organization :id: f7897a56-d014-4189-b4c7-df8f15aaf30a :expectedresults: All content synced successfully - :CaseLevel: Integration - :CaseImportance: High """ - import_org = make_org() - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + import_org = target_sat.cli_factory.make_org() + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' published_url = repo['published-at'] # sync from different org - prod_2 = make_product( + prod_2 = target_sat.cli_factory.make_product( {'organization-id': import_org['id'], 'description': 'Sync from Satellite'} ) - repo_2 = make_repository( + repo_2 = target_sat.cli_factory.make_repository( { 'organization-id': import_org['id'], 'product-id': prod_2['id'], 'url': published_url, 'content-type': 'ansible_collection', - 'ansible-collection-reqirements': '{collections: \ + 'ansible-collection-requirements': '{collections: \ [{ name: theforeman.operations, version: "0.1.0"}]}', } ) - Repository.synchronize({'id': repo_2['id']}) - repo_2_status = Repository.info({'id': repo_2['id']}) + target_sat.cli.Repository.synchronize({'id': repo_2['id']}) + repo_2_status = target_sat.cli.Repository.info({'id': repo_2['id']}) assert repo_2_status['sync']['status'] == 'Success' @@ -2598,7 +2502,7 @@ class TestMD5Repository: @pytest.mark.parametrize( 'repo_options', **parametrized([{'url': FAKE_YUM_MD5_REPO}]), indirect=True ) - def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product, target_sat): + def test_positive_sync_publish_promote_cv(self, repo, module_org, target_sat): """Synchronize MD5 signed repository with add repository to content view, publish and promote content view to lifecycle environment @@ -2609,296 +2513,24 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product :expectedresults: rpms can be listed in content view in proper lifecycle environment """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - synced_repo = Repository.info({'id': repo['id']}) + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + synced_repo = target_sat.cli.Repository.info({'id': repo['id']}) assert synced_repo['sync']['status'].lower() == 'success' assert synced_repo['content-counts']['packages'] == '35' - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view = target_sat.cli.ContentView.info({'id': cv['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) - cv = ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert synced_repo['id'] in [repo['id'] for repo in cv['yum-repositories']] assert lce['id'] in [lc['id'] for lc in cv['lifecycle-environments']] -@pytest.mark.skip_if_open("BZ:1682951") -class TestDRPMRepository: - """Tests specific to using repositories containing delta RPMs.""" - - @pytest.mark.tier2 - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs - - :id: a645966c-750b-40ef-a264-dc3bb632b9fd - - :parametrized: yes - - :expectedresults: drpms can be listed in repository - """ - Repository.synchronize({'id': repo['id']}) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library" - f"/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - @pytest.mark.tier2 - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync_publish_cv(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs, add repository to content view - and publish content view - - :id: 014bfc80-4622-422e-a0ec-755b1d9f845e - - :parametrized: yes - - :expectedresults: drpms can be listed in content view - """ - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/content_views/" - f"{cv['label']}/1.0/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - @pytest.mark.tier2 - @pytest.mark.upgrade - @pytest.mark.skip("Uses deprecated DRPM repository") - @pytest.mark.parametrize( - 'repo_options', **parametrized([{'url': FAKE_YUM_DRPM_REPO}]), indirect=True - ) - def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product, target_sat): - """Synchronize repository with DRPMs, add repository to content view, - publish and promote content view to lifecycle environment - - :id: a01cb12b-d388-4902-8532-714f4e28ec56 - - :parametrized: yes - - :expectedresults: drpms can be listed in content view in proper - lifecycle environment - """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) - cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) - result = target_sat.execute( - f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/{lce['label']}" - f"/{cv['label']}/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" - ) - assert result.status == 0 - assert result.stdout - - -class TestGitPuppetMirror: - """Tests for creating the hosts via CLI. - - Notes for GIT puppet mirror content - - This feature does not allow us to actually sync / update the content in a - GIT repo. Instead, we essentially "snapshot" a repo's contents at any - given time. The ability to update the GIT puppet mirror is / should - be provided by Pulp itself, via a script. However, we should be able to - # create a sync schedule against the mirror to make sure it is periodically - updated to contain the latest and greatest. - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_local_create(self): - """Create repository with local git puppet mirror. - - :id: 89211cd5-82b8-4391-b729-a7502e57f824 - - :CaseLevel: Integration - - :Setup: Assure local GIT puppet has been created and found by pulp - - :Steps: Create link to local puppet mirror via cli - - :expectedresults: Content source containing local GIT puppet mirror - content is created - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_local_update(self): - """Update repository with local git puppet mirror. - - :id: 341f40f2-3501-4754-9acf-7cda1a61f7db - - :CaseLevel: Integration - - :Setup: Assure local GIT puppet has been created and found by pulp - - :Steps: Modify details for existing puppet repo (name, etc.) via cli - - :expectedresults: Content source containing local GIT puppet mirror - content is modified - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_git_local_delete(self): - """Delete repository with local git puppet mirror. - - :id: a243f5bb-5186-41b3-8e8a-07d5cc784ccd - - :CaseLevel: Integration - - :Setup: Assure local GIT puppet has been created and found by pulp - - :Steps: Delete link to local puppet mirror via cli - - :expectedresults: Content source containing local GIT puppet mirror - content no longer exists/is available. - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_remote_create(self): - """Create repository with remote git puppet mirror. - - :id: 8582529f-3112-4b49-8d8f-f2bbf7dceca7 - - :CaseLevel: Integration - - :Setup: Assure remote GIT puppet has been created and found by pulp - - :Steps: Create link to local puppet mirror via cli - - :expectedresults: Content source containing remote GIT puppet mirror - content is created - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_remote_update(self): - """Update repository with remote git puppet mirror. - - :id: 582c50b3-3b90-4244-b694-97642b1b13a9 - - :CaseLevel: Integration - - :Setup: Assure remote GIT puppet has been created and found by pulp - - :Steps: modify details for existing puppet repo (name, etc.) via cli - - :expectedresults: Content source containing remote GIT puppet mirror - content is modified - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - @pytest.mark.upgrade - def test_positive_git_remote_delete(self): - """Delete repository with remote git puppet mirror. - - :id: 0a23f969-b202-4c6c-b12e-f651a0b7d049 - - :CaseLevel: Integration - - :Setup: Assure remote GIT puppet has been created and found by pulp - - :Steps: Delete link to remote puppet mirror via cli - - :expectedresults: Content source containing remote GIT puppet mirror - content no longer exists/is available. - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_sync(self): - """Sync repository with git puppet mirror. - - :id: a46c16bd-0986-48db-8e62-aeb3907ba4d2 - - :CaseLevel: Integration - - :Setup: git mirror (local or remote) exists as a content source - - :Steps: Attempt to sync content from mirror via cli - - :expectedresults: Content is pulled down without error - - :expectedresults: Confirmation that various resources actually exist in - local content repo - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_sync_schedule(self): - """Scheduled sync of git puppet mirror. - - :id: 0d58d180-9836-4524-b608-66b67f9cab12 - - :CaseLevel: Integration - - :Setup: git mirror (local or remote) exists as a content source - - :Steps: Attempt to create a scheduled sync content from mirror, via cli - - :expectedresults: Content is pulled down without error on expected - schedule - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed - @pytest.mark.tier2 - def test_positive_git_view_content(self): - """View content in synced git puppet mirror - - :id: 02f06092-dd6c-49fa-be9f-831e52476e41 - - :CaseLevel: Integration - - :Setup: git mirror (local or remote) exists as a content source - - :Steps: Attempt to list contents of repo via cli - - :expectedresults: Spot-checked items (filenames, dates, perhaps - checksums?) are correct. - - :CaseAutomation: NotAutomated - """ - - class TestFileRepository: """Specific tests for File Repositories""" @@ -2915,7 +2547,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat) :parametrized: yes - :Steps: + :steps: 1. Create a File Repository 2. Upload an arbitrary file to it @@ -2929,7 +2561,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat) local_path=DataFile.RPM_TO_UPLOAD, remote_path=f"/tmp/{RPM_TO_UPLOAD}", ) - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -2938,32 +2570,12 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat) } ) assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( query={"search": f"name={RPM_TO_UPLOAD} and repository={repo['name']}"} ) - assert RPM_TO_UPLOAD == filesearch[0].name - - @pytest.mark.stubbed - @pytest.mark.tier1 - def test_positive_file_permissions(self): - """Check file permissions after file upload to File Repository - - :id: 03da888a-69ba-492f-b204-c62d85948d8a - - :Setup: - 1. Create a File Repository - 2. Upload an arbitrary file to it - - :Steps: Retrieve file permissions from File Repository - - :expectedresults: uploaded file permissions are kept after upload - - :CaseAutomation: NotAutomated - - :CaseImportance: Critical - """ + assert filesearch[0].name == RPM_TO_UPLOAD @pytest.mark.tier1 @pytest.mark.upgrade @@ -2983,7 +2595,7 @@ def test_positive_remove_file(self, repo, target_sat): 1. Create a File Repository 2. Upload an arbitrary file to it - :Steps: Remove a file from File Repository + :steps: Remove a file from File Repository :expectedresults: file is not listed under File Repository after removal @@ -2994,7 +2606,7 @@ def test_positive_remove_file(self, repo, target_sat): local_path=DataFile.RPM_TO_UPLOAD, remote_path=f"/tmp/{RPM_TO_UPLOAD}", ) - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3003,11 +2615,13 @@ def test_positive_remove_file(self, repo, target_sat): } ) assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 0 - files = File.list({'repository-id': repo['id']}) - Repository.remove_content({'id': repo['id'], 'ids': [file_['id'] for file_ in files]}) - repo = Repository.info({'id': repo['id']}) + files = target_sat.cli.File.list({'repository-id': repo['id']}) + target_sat.cli.Repository.remove_content( + {'id': repo['id'], 'ids': [file_['id'] for file_ in files]} + ) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['files'] == '0' @pytest.mark.tier2 @@ -3025,7 +2639,7 @@ def test_positive_remove_file(self, repo, target_sat): ), indirect=True, ) - def test_positive_remote_directory_sync(self, repo): + def test_positive_remote_directory_sync(self, repo, module_target_sat): """Check an entire remote directory can be synced to File Repository through http @@ -3037,7 +2651,7 @@ def test_positive_remote_directory_sync(self, repo): 1. Create a directory to be synced with a pulp manifest on its root 2. Make the directory available through http - :Steps: + :steps: 1. Create a File Repository with url pointing to http url created on setup 2. Initialize synchronization @@ -3045,8 +2659,8 @@ def test_positive_remote_directory_sync(self, repo): :expectedresults: entire directory is synced over http """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['files'] == '2' @@ -3067,7 +2681,7 @@ def test_positive_file_repo_local_directory_sync(self, repo, target_sat): 1. Create a directory to be synced with a pulp manifest on its root locally (on the Satellite/Foreman host) - :Steps: + :steps: 1. Create a File Repository with url pointing to local url created on setup 2. Initialize synchronization @@ -3083,8 +2697,8 @@ def test_positive_file_repo_local_directory_sync(self, repo, target_sat): f'wget -P {CUSTOM_LOCAL_FOLDER} -r -np -nH --cut-dirs=5 -R "index.html*" ' f'{CUSTOM_FILE_REPO}' ) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 1 @pytest.mark.tier2 @@ -3105,7 +2719,7 @@ def test_positive_symlinks_sync(self, repo, target_sat): locally (on the Satellite/Foreman host) 2. Make sure it contains symlinks - :Steps: + :steps: 1. Create a File Repository with url pointing to local url created on setup 2. Initialize synchronization @@ -3123,8 +2737,8 @@ def test_positive_symlinks_sync(self, repo, target_sat): ) target_sat.execute(f'ln -s {CUSTOM_LOCAL_FOLDER} /{gen_string("alpha")}') - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 1 @pytest.mark.tier2 @@ -3147,7 +2761,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat 4. Add some text keyword to the file locally. 5. Upload new version of file. - :Steps: + :steps: 1. Check that the repo contains only the new version of the file :expectedresults: The file is not duplicated and only the latest version of the file @@ -3157,7 +2771,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat """ text_file_name = f'test-{gen_string("alpha", 5)}.txt'.lower() target_sat.execute(f'echo "First File" > /tmp/{text_file_name}') - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3166,7 +2780,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat } ) assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) # Assert there is only one file assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( @@ -3175,7 +2789,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat assert text_file_name == filesearch[0].name # Create new version of the file by changing the text target_sat.execute(f'echo "Second File" > /tmp/{text_file_name}') - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3184,7 +2798,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat } ) assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) # Assert there is still only one file assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( @@ -3197,81 +2811,6 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat assert 'Second File' in textfile.text -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_copy_package_group_between_repos(): - """ - Copy a group of packages from one repo to another. - - :id: 18d832fc-7e27-4067-99ea-5da9eef22253 - - :Setup: - 1. Add a product and sync a repo which has package groups (repo 1) - 2. Create another product and create a yum repo (repo 2) - 3. Select the package group from repo 1 and sync it to repo 2 - - :Steps: - Assert the list of package in repo 2 matches the group list from repo 1 - - :CaseAutomation: NotAutomated - - :CaseImportance: Medium - """ - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_include_and_exclude_content_units(): - """ - Select two packages and include and exclude some dependencies - and then copy them from one repo to another. - - :id: 073a0ade-6860-4b34-b64f-0f1a75025356 - - :Setup: - 1. Add a product and sync a repo which has packages with dependencies (repo 1) - 2. Create another product and create a yum repo (repo 2) - 3. Select a package and include its dependencies - 4. Select a package and exclude its dependencies - 5. Copy packages from repo 1 to repo 2 - - :Steps: - Assert the list of packages in repo 2 matches the packages selected in repo 1, - including only those dependencies expected. - - :CaseAutomation: NotAutomated - - :CaseImportance: Medium - """ - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_copy_erratum_and_RPMs_within_a_date_range(): - """ - Select some packages, filer by date range, - and then copy them from one repo to another. - - :id: da48011b-841a-4706-84b5-2dcfe371c30a - - :Setup: - 1. Add a product and sync a repo which has packages with dependencies (repo 1) - 2. Create another product and create a yum repo (repo 2) - 3. Select some packages and include dependencies - 4. Filter by date range - 5. Copy filtered list of items from repo 1 to repo 2 - 6. Repeat using errata in place of RPMs - - :Steps: - Assert the list of packages or errata in repo 2 matches those selected - and filtered in repo 1, including those dependencies expected. - - :CaseAutomation: NotAutomated - - :CaseImportance: Medium - """ - - @pytest.mark.tier2 def test_positive_syncable_yum_format_repo_import(target_sat, module_org): """Verify that you can import syncable yum format repositories @@ -3306,3 +2845,89 @@ def test_positive_syncable_yum_format_repo_import(target_sat, module_org): ) assert repodata['content-counts']['packages'] != 0 assert repodata['sync']['status'] == 'Success' + + +@pytest.mark.rhel_ver_list([9]) +def test_positive_install_uploaded_rpm_on_host( + target_sat, rhel_contenthost, function_org, function_lce +): + """Verify that uploaded rpm successfully install on content host + + :id: 8701782e-2d1e-41b7-a9dc-07325bfeaf1c + + :steps: + 1. Create product, custom repo and upload rpm into repo + 2. Create CV, add repo into it & publish CV + 3. Upload package on to the satellite, rename it then upload it into repo + 4. Register host with satellite and install rpm on it + + :expectedresults: rpm should install successfully + + :customerscenario: true + + :BZ: 2161993 + """ + product = target_sat.cli_factory.make_product({'organization-id': function_org.id}) + repo = target_sat.cli_factory.make_repository( + { + 'content-type': 'yum', + 'name': gen_string('alpha', 5), + 'product-id': product['id'], + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + sync_status = target_sat.cli.Repository.info({'id': repo['id']})['sync']['status'] + assert sync_status == 'Success', f'Failed to sync {repo["name"]} repo' + + # Upload package + target_sat.put( + local_path=DataFile.FAKE_3_YUM_REPO_RPMS_ANT, + remote_path=f"/tmp/{FAKE_3_YUM_REPO_RPMS[0]}", + ) + # Rename uploaded package name + rpm_new_name = f'{gen_string(str_type="alpha", length=5)}.rpm' + target_sat.execute(f"mv /tmp/{FAKE_3_YUM_REPO_RPMS[0]} /tmp/{rpm_new_name}") + + result = target_sat.cli.Repository.upload_content( + { + 'name': repo['name'], + 'organization': repo['organization'], + 'path': f"/tmp/{rpm_new_name}", + 'product-id': repo['product']['id'], + } + ) + assert f"Successfully uploaded file {rpm_new_name}" == result[0]['message'] + queryinfo = target_sat.execute(f"rpm -q /tmp/{rpm_new_name}") + + content_view = target_sat.cli_factory.make_content_view( + {'organization-id': function_org.id, 'name': gen_string('alpha', 5)} + ) + target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.version_promote( + {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': function_lce.id} + ) + activation_key = target_sat.cli_factory.make_activation_key( + { + 'organization-id': function_org.id, + 'lifecycle-environment-id': function_lce.id, + 'content-view-id': content_view['id'], + } + ) + target_sat.cli.ActivationKey.content_override( + {'id': activation_key.id, 'content-label': repo.content_label, 'value': 'true'} + ) + assert ( + rhel_contenthost.register(function_org, None, activation_key.name, target_sat).status == 0 + ) + assert ( + rhel_contenthost.execute(f'yum install -y {FAKE_3_YUM_REPO_RPMS[0].split("-")[0]}').status + == 0 + ) + installed_package_name = rhel_contenthost.execute( + f'rpm -q {FAKE_3_YUM_REPO_RPMS[0].split("-")[0]}' + ) + assert installed_package_name.stdout == queryinfo.stdout diff --git a/tests/foreman/cli/test_repository_set.py b/tests/foreman/cli/test_repository_set.py index cf9f991991a..092034c6dc8 100644 --- a/tests/foreman/cli/test_repository_set.py +++ b/tests/foreman/cli/test_repository_set.py @@ -4,30 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from robottelo.cli.product import Product -from robottelo.cli.repository_set import RepositorySet -from robottelo.constants import PRDS -from robottelo.constants import REPOSET +from robottelo.constants import PRDS, REPOSET pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.tier1] @pytest.fixture -def params(function_entitlement_manifest_org): +def params(function_entitlement_manifest_org, target_sat): PRODUCT_NAME = PRDS['rhel'] REPOSET_NAME = REPOSET['rhva6'] ARCH = 'x86_64' @@ -35,8 +27,10 @@ def params(function_entitlement_manifest_org): RELEASE = '6Server' manifest_org = function_entitlement_manifest_org - product_id = Product.info({'name': PRODUCT_NAME, 'organization-id': manifest_org.id})['id'] - reposet_id = RepositorySet.info( + product_id = target_sat.cli.Product.info( + {'name': PRODUCT_NAME, 'organization-id': manifest_org.id} + )['id'] + reposet_id = target_sat.cli.RepositorySet.info( {'name': REPOSET_NAME, 'organization-id': manifest_org.id, 'product-id': product_id} )['id'] @@ -115,7 +109,7 @@ def match_repos(repos, match_params): @pytest.mark.upgrade -def test_positive_list_available_repositories(params): +def test_positive_list_available_repositories(params, target_sat): """List available repositories for repository-set :id: 987d6b08-acb0-4264-a459-9cef0d2c6f3f @@ -126,39 +120,39 @@ def test_positive_list_available_repositories(params): :CaseImportance: Critical """ # No repos should be enabled by default - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 0 # Enable repo from Repository Set - RepositorySet.enable(params['enable']['id']) + target_sat.cli.RepositorySet.enable(params['enable']['id']) # Only 1 repo should be enabled, and it should match the arch and releasever - result = RepositorySet.available_repositories(params['avail']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled'])) == 1 # Enable one more repo - RepositorySet.enable(params['enable']['arch_2']) + target_sat.cli.RepositorySet.enable(params['enable']['arch_2']) # 2 repos should be enabled - result = RepositorySet.available_repositories(params['avail']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled'])) == 2 # Disable one repo - RepositorySet.disable(params['enable']['id']) + target_sat.cli.RepositorySet.disable(params['enable']['id']) # There should remain only 1 enabled repo - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 1 # Disable the last enabled repo - RepositorySet.disable(params['enable']['arch_2']) + target_sat.cli.RepositorySet.disable(params['enable']['arch_2']) # There should be no enabled repos - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_enable_by_name(params): +def test_positive_enable_by_name(params, target_sat): """Enable repo from reposet by names of reposet, org and product :id: a78537bd-b88d-4f00-8901-e7944e5de729 @@ -167,12 +161,12 @@ def test_positive_enable_by_name(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['name']) - result = RepositorySet.available_repositories(params['avail']['name']) + target_sat.cli.RepositorySet.enable(params['enable']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_enable_by_label(params): +def test_positive_enable_by_label(params, target_sat): """Enable repo from reposet by org label, reposet and product names @@ -182,12 +176,12 @@ def test_positive_enable_by_label(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['label']) - result = RepositorySet.available_repositories(params['avail']['label']) + target_sat.cli.RepositorySet.enable(params['enable']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_enable_by_id(params): +def test_positive_enable_by_id(params, target_sat): """Enable repo from reposet by IDs of reposet, org and product :id: f7c88534-1d45-45d9-9b87-c50c4e268e8d @@ -196,12 +190,12 @@ def test_positive_enable_by_id(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['ids']) - result = RepositorySet.available_repositories(params['avail']['ids']) + target_sat.cli.RepositorySet.enable(params['enable']['ids']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['ids']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_disable_by_name(params): +def test_positive_disable_by_name(params, target_sat): """Disable repo from reposet by names of reposet, org and product @@ -211,13 +205,13 @@ def test_positive_disable_by_name(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['name']) - RepositorySet.disable(params['enable']['name']) - result = RepositorySet.available_repositories(params['avail']['name']) + target_sat.cli.RepositorySet.enable(params['enable']['name']) + target_sat.cli.RepositorySet.disable(params['enable']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_disable_by_label(params): +def test_positive_disable_by_label(params, target_sat): """Disable repo from reposet by org label, reposet and product names @@ -227,13 +221,13 @@ def test_positive_disable_by_label(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['label']) - RepositorySet.disable(params['enable']['label']) - result = RepositorySet.available_repositories(params['avail']['label']) + target_sat.cli.RepositorySet.enable(params['enable']['label']) + target_sat.cli.RepositorySet.disable(params['enable']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_disable_by_id(params): +def test_positive_disable_by_id(params, target_sat): """Disable repo from reposet by IDs of reposet, org and product :id: 0d6102ba-3fb9-4eb8-972e-d537e252a8e6 @@ -242,7 +236,7 @@ def test_positive_disable_by_id(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['ids']) - RepositorySet.disable(params['enable']['ids']) - result = RepositorySet.available_repositories(params['avail']['ids']) + target_sat.cli.RepositorySet.enable(params['enable']['ids']) + target_sat.cli.RepositorySet.disable(params['enable']['ids']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['ids']) assert len(match_repos(result, params['match']['enabled'])) == 0 diff --git a/tests/foreman/cli/test_rhcloud_insights.py b/tests/foreman/cli/test_rhcloud_insights.py deleted file mode 100644 index 8f9ebb246c1..00000000000 --- a/tests/foreman/cli/test_rhcloud_insights.py +++ /dev/null @@ -1,57 +0,0 @@ -"""CLI tests for Insights part of RH Cloud - Inventory plugin. - -:Requirement: RH Cloud - Inventory - -:CaseAutomation: Automated - -:CaseLevel: System - -:CaseComponent: RHCloud-Inventory - -:Team: Platform - -:TestType: Functional - -:CaseImportance: High - -:Upstream: No -""" -import pytest -from broker import Broker - -from robottelo.hosts import ContentHost - - -@pytest.mark.e2e -@pytest.mark.tier4 -@pytest.mark.parametrize('distro', ['rhel7', 'rhel8']) -def test_positive_connection_option(organization_ak_setup, rhcloud_sat_host, distro): - """Verify that 'insights-client --test-connection' successfully tests the proxy connection via - the Satellite. - - :id: 61a4a39e-b484-49f4-a6fd-46ffc7736e50 - - :customerscenario: true - - :Steps: - - 1. Create RHEL7 and RHEL8 VM and register to insights within org having manifest. - - 2. Run 'insights-client --test-connection'. - - :expectedresults: 'insights-client --test-connection' should return 0. - - :BZ: 1976754 - - :CaseImportance: Critical - """ - org, activation_key = organization_ak_setup - with Broker(nick=distro, host_class=ContentHost) as vm: - vm.configure_rhai_client(rhcloud_sat_host, activation_key.name, org.label, distro) - result = vm.run('insights-client --test-connection') - assert result.status == 0, ( - 'insights-client --test-connection failed.\n' - f'status: {result.status}\n' - f'stdout: {result.stdout}\n' - f'stderr: {result.stderr}' - ) diff --git a/tests/foreman/cli/test_rhcloud_inventory.py b/tests/foreman/cli/test_rhcloud_inventory.py index 0668df0f941..ed84fd465d7 100644 --- a/tests/foreman/cli/test_rhcloud_inventory.py +++ b/tests/foreman/cli/test_rhcloud_inventory.py @@ -1,30 +1,24 @@ """CLI tests for RH Cloud - Inventory, aka Insights Inventory Upload -:Requirement: RH Cloud - Inventory +:Requirement: RHCloud :CaseAutomation: Automated -:CaseLevel: System +:CaseComponent: RHCloud -:CaseComponent: RHCloud-Inventory - -:Team: Platform - -:TestType: Functional +:Team: Phoenix-subscriptions :CaseImportance: High -:Upstream: No """ -import time from datetime import datetime +import time import pytest from wait_for import wait_for from robottelo.config import robottelo_tmp_dir -from robottelo.utils.io import get_local_file_data -from robottelo.utils.io import get_remote_report_checksum +from robottelo.utils.io import get_local_file_data, get_remote_report_checksum inventory_sync_task = 'InventorySync::Async::InventoryFullSync' generate_report_jobs = 'ForemanInventoryUpload::Async::GenerateAllReportsJob' @@ -33,7 +27,7 @@ @pytest.mark.tier3 @pytest.mark.e2e def test_positive_inventory_generate_upload_cli( - organization_ak_setup, rhcloud_registered_hosts, rhcloud_sat_host + rhcloud_manifest_org, rhcloud_registered_hosts, module_target_sat ): """Tests Insights inventory generation and upload via foreman-rake commands: https://github.com/theforeman/foreman_rh_cloud/blob/master/README.md @@ -42,7 +36,7 @@ def test_positive_inventory_generate_upload_cli( :customerscenario: true - :Steps: + :steps: 0. Create a VM and register to insights within org having manifest. 1. Generate and upload report for all organizations @@ -65,13 +59,11 @@ def test_positive_inventory_generate_upload_cli( :BZ: 1957129, 1895953, 1956190 :CaseAutomation: Automated - - :CaseLevel: System """ - org, _ = organization_ak_setup + org = rhcloud_manifest_org cmd = f'organization_id={org.id} foreman-rake rh_cloud_inventory:report:generate_upload' upload_success_msg = f"Generated and uploaded inventory report for organization '{org.name}'" - result = rhcloud_sat_host.execute(cmd) + result = module_target_sat.execute(cmd) assert result.status == 0 assert upload_success_msg in result.stdout @@ -80,14 +72,16 @@ def test_positive_inventory_generate_upload_cli( f'/var/lib/foreman/red_hat_inventory/uploads/done/report_for_{org.id}.tar.xz' ) wait_for( - lambda: rhcloud_sat_host.get(remote_path=remote_report_path, local_path=local_report_path), + lambda: module_target_sat.get( + remote_path=str(remote_report_path), local_path=str(local_report_path) + ), timeout=60, delay=15, silent_failure=True, handle_exception=True, ) local_file_data = get_local_file_data(local_report_path) - assert local_file_data['checksum'] == get_remote_report_checksum(rhcloud_sat_host, org.id) + assert local_file_data['checksum'] == get_remote_report_checksum(module_target_sat, org.id) assert local_file_data['size'] > 0 assert local_file_data['extractable'] assert local_file_data['json_files_parsable'] @@ -102,16 +96,16 @@ def test_positive_inventory_generate_upload_cli( @pytest.mark.e2e @pytest.mark.tier3 def test_positive_inventory_recommendation_sync( - organization_ak_setup, + rhcloud_manifest_org, rhcloud_registered_hosts, - rhcloud_sat_host, + module_target_sat, ): """Tests Insights recommendation sync via foreman-rake commands: https://github.com/theforeman/foreman_rh_cloud/blob/master/README.md :id: 361af91d-1246-4308-9cc8-66beada7d651 - :Steps: + :steps: 0. Create a VM and register to insights within org having manifest. 1. Sync insights recommendation using following foreman-rake command. @@ -122,15 +116,13 @@ def test_positive_inventory_recommendation_sync( :BZ: 1957186 :CaseAutomation: Automated - - :CaseLevel: System """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org cmd = f'organization_id={org.id} foreman-rake rh_cloud_insights:sync' timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') - result = rhcloud_sat_host.execute(cmd) + result = module_target_sat.execute(cmd) wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'Insights full sync and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -146,16 +138,16 @@ def test_positive_inventory_recommendation_sync( @pytest.mark.e2e @pytest.mark.tier3 def test_positive_sync_inventory_status( - organization_ak_setup, + rhcloud_manifest_org, rhcloud_registered_hosts, - rhcloud_sat_host, + module_target_sat, ): """Sync inventory status via foreman-rake commands: https://github.com/theforeman/foreman_rh_cloud/blob/master/README.md :id: 915ffbfd-c2e6-4296-9d69-f3f9a0e79b32 - :Steps: + :steps: 0. Create a VM and register to insights within org having manifest. 1. Sync inventory status for specific organization. @@ -167,19 +159,17 @@ def test_positive_sync_inventory_status( :BZ: 1957186 :CaseAutomation: Automated - - :CaseLevel: System """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org cmd = f'organization_id={org.id} foreman-rake rh_cloud_inventory:sync' success_msg = f"Synchronized inventory for organization '{org.name}'" timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') - result = rhcloud_sat_host.execute(cmd) + result = module_target_sat.execute(cmd) assert result.status == 0 assert success_msg in result.stdout # Check task details wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'{inventory_sync_task} and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -188,7 +178,7 @@ def test_positive_sync_inventory_status( silent_failure=True, handle_exception=True, ) - task_output = rhcloud_sat_host.api.ForemanTask().search( + task_output = module_target_sat.api.ForemanTask().search( query={'search': f'{inventory_sync_task} and started_at >= "{timestamp}"'} ) assert task_output[0].output['host_statuses']['sync'] == 2 @@ -202,12 +192,12 @@ def test_max_org_size_variable(): :id: 7dd964c3-fde8-4335-ab13-02329119d7f6 - :Steps: + :steps: 1. Register few content hosts with satellite. 2. Change value of max_org_size for testing purpose(See BZ#1962694#c2). 3. Start report generation and upload using - ForemanInventoryUpload::Async::GenerateAllReportsJob.perform_now + ForemanTasks.sync_task(ForemanInventoryUpload::Async::GenerateAllReportsJob) :expectedresults: If organization had more hosts than specified by max_org_size variable then report won't be uploaded. @@ -217,8 +207,6 @@ def test_max_org_size_variable(): :BZ: 1962694 :CaseAutomation: ManualOnly - - :CaseLevel: System """ @@ -228,7 +216,7 @@ def test_satellite_inventory_slice_variable(): :id: ffbef1c7-08f3-444b-9255-2251d5594fcb - :Steps: + :steps: 1. Register few content hosts with satellite. 2. Set SATELLITE_INVENTORY_SLICE_SIZE=1 dynflow environment variable. @@ -243,8 +231,6 @@ def test_satellite_inventory_slice_variable(): :BZ: 1945661 :CaseAutomation: ManualOnly - - :CaseLevel: System """ @@ -254,7 +240,7 @@ def test_rhcloud_external_links(): :id: bc7f6354-ed3e-4ac5-939d-90bfe4177043 - :Steps: + :steps: 1. Go to Configure > Inventory upload 2. Go to Configure > Insights @@ -266,8 +252,6 @@ def test_rhcloud_external_links(): :BZ: 1975093 :CaseAutomation: ManualOnly - - :CaseLevel: System """ @@ -277,7 +261,7 @@ def test_positive_generate_all_reports_job(target_sat): :id: a9e4bfdb-6d7c-4f8c-ae57-a81442926dd8 - :Steps: + :steps: 1. Disable the Automatic Inventory upload setting. 2. Execute Foreman GenerateAllReportsJob via foreman-rake. @@ -288,8 +272,6 @@ def test_positive_generate_all_reports_job(target_sat): :customerscenario: true :CaseAutomation: Automated - - :CaseLevel: System """ try: target_sat.update_setting('allow_auto_inventory_upload', False) diff --git a/tests/foreman/cli/test_role.py b/tests/foreman/cli/test_role.py index b01f703d338..7ccdafdf4f1 100644 --- a/tests/foreman/cli/test_role.py +++ b/tests/foreman/cli/test_role.py @@ -4,40 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import re from math import ceil from random import choice +import re -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIDataBaseError -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.filter import Filter -from robottelo.cli.role import Role -from robottelo.cli.settings import Settings -from robottelo.cli.user import User -from robottelo.constants import PERMISSIONS -from robottelo.constants import ROLES -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import parametrized +from robottelo.constants import PERMISSIONS, ROLES +from robottelo.exceptions import CLIDataBaseError, CLIReturnCodeError +from robottelo.utils.datafactory import generate_strings_list, parametrized class TestRole: @@ -45,12 +28,14 @@ class TestRole: @pytest.mark.tier1 @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized( - list(zip(generate_strings_list(length=10), generate_strings_list(length=10))) + list( + zip(generate_strings_list(length=10), generate_strings_list(length=10), strict=True) + ) ), ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Create new role with provided name, update name and delete role by ID :id: f77b8e84-e964-4007-b12b-142949134d8b @@ -64,19 +49,18 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: Critical """ - role = make_role({'name': name}) + role = module_target_sat.cli_factory.make_role({'name': name}) assert role['name'] == name - Role.update({'id': role['id'], 'new-name': new_name}) - role = Role.info({'id': role['id']}) + module_target_sat.cli.Role.update({'id': role['id'], 'new-name': new_name}) + role = module_target_sat.cli.Role.info({'id': role['id']}) assert role['name'] == new_name - Role.delete({'id': role['id']}) + module_target_sat.cli.Role.delete({'id': role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': role['id']}) + module_target_sat.cli.Role.info({'id': role['id']}) @pytest.mark.tier1 @pytest.mark.upgrade - @pytest.mark.build_sanity - def test_positive_create_with_permission(self): + def test_positive_create_with_permission(self, module_target_sat): """Create new role with a set of permission :id: 7cb2b2e2-ad4d-41e9-b6b2-c0366eb09b9a @@ -85,18 +69,24 @@ def test_positive_create_with_permission(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - make_filter({'role-id': role['id'], 'permissions': permissions}) - assert set(Role.filters({'id': role['id']})[0]['permissions']) == set(permissions) + module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) + assert set(module_target_sat.cli.Role.filters({'id': role['id']})[0]['permissions']) == set( + permissions + ) @pytest.mark.tier1 - def test_positive_list_filters_by_id(self): + def test_positive_list_filters_by_id(self, module_target_sat): """Create new role with a filter and list it by role id :id: 6979ad8d-629b-481e-9d3a-8f3b3bca53f9 @@ -105,19 +95,23 @@ def test_positive_list_filters_by_id(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - filter_ = make_filter({'role-id': role['id'], 'permissions': permissions}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) assert role['name'] == filter_['role'] - assert Role.filters({'id': role['id']})[0]['id'] == filter_['id'] + assert module_target_sat.cli.Role.filters({'id': role['id']})[0]['id'] == filter_['id'] @pytest.mark.tier1 - def test_positive_list_filters_by_name(self): + def test_positive_list_filters_by_name(self, module_target_sat): """Create new role with a filter and list it by role name :id: bbcb3982-f484-4dde-a3ea-7145fd28ab1f @@ -126,19 +120,23 @@ def test_positive_list_filters_by_name(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - filter_ = make_filter({'role': role['name'], 'permissions': permissions}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role': role['name'], 'permissions': permissions} + ) assert role['name'] == filter_['role'] - assert Role.filters({'name': role['name']})[0]['id'] == filter_['id'] + assert module_target_sat.cli.Role.filters({'name': role['name']})[0]['id'] == filter_['id'] @pytest.mark.tier1 - def test_negative_list_filters_without_parameters(self): + def test_negative_list_filters_without_parameters(self, module_target_sat): """Try to list filter without specifying role id or name :id: 56cafbe0-d1cb-413e-8eac-0e01a3590fd2 @@ -150,29 +148,28 @@ def test_negative_list_filters_without_parameters(self): :BZ: 1296782 """ with pytest.raises(CLIReturnCodeError) as err: - try: - Role.filters() - except CLIDataBaseError as err: - pytest.fail(err) + module_target_sat.cli.Role.filters() + if isinstance(err.type, CLIDataBaseError): + pytest.fail(err) assert re.search('At least one of options .* is required', err.value.msg) - @pytest.fixture() - def make_role_with_permissions(self): + @pytest.fixture + def make_role_with_permissions(self, target_sat): """Create new role with a filter""" - role = make_role() + role = target_sat.cli_factory.make_role() res_types = iter(PERMISSIONS.keys()) permissions = [] # Collect more than 20 different permissions while len(permissions) <= 20: permissions += [ permission['name'] - for permission in Filter.available_permissions( + for permission in target_sat.cli.Filter.available_permissions( {"search": f"resource_type={next(res_types)}"} ) ] # Create a filter for each permission for perm in permissions: - make_filter({'role': role['name'], 'permissions': perm}) + target_sat.cli_factory.make_filter({'role': role['name'], 'permissions': perm}) return { 'role': role, 'permissions': permissions, @@ -181,7 +178,9 @@ def make_role_with_permissions(self): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('per_page', [1, 5, 20]) - def test_positive_list_filters_with_pagination(self, make_role_with_permissions, per_page): + def test_positive_list_filters_with_pagination( + self, make_role_with_permissions, per_page, module_target_sat + ): """Make sure filters list can be displayed with different items per page value @@ -199,14 +198,14 @@ def test_positive_list_filters_with_pagination(self, make_role_with_permissions, """ # Verify the first page contains exactly the same items count # as `per-page` value - filters = Role.filters( + filters = module_target_sat.cli.Role.filters( {'name': make_role_with_permissions['role']['name'], 'per-page': per_page} ) assert len(filters) == per_page # Verify pagination and total amount of pages by checking the # items count on the last page last_page = ceil(len(make_role_with_permissions['permissions']) / per_page) - filters = Role.filters( + filters = module_target_sat.cli.Role.filters( { 'name': make_role_with_permissions['role']['name'], 'page': last_page, @@ -219,7 +218,7 @@ def test_positive_list_filters_with_pagination(self, make_role_with_permissions, @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_cloned_builtin(self): + def test_positive_delete_cloned_builtin(self, module_target_sat): """Clone a builtin role and attempt to delete it :id: 1fd9c636-596a-4cb2-b100-de19238042cc @@ -231,27 +230,29 @@ def test_positive_delete_cloned_builtin(self): :CaseImportance: Critical """ - role_list = Role.list({'search': f'name=\\"{choice(ROLES)}\\"'}) + role_list = module_target_sat.cli.Role.list({'search': f'name=\\"{choice(ROLES)}\\"'}) assert len(role_list) == 1 - cloned_role = Role.clone({'id': role_list[0]['id'], 'new-name': gen_string('alphanumeric')}) - Role.delete({'id': cloned_role['id']}) + cloned_role = module_target_sat.cli.Role.clone( + {'id': role_list[0]['id'], 'new-name': gen_string('alphanumeric')} + ) + module_target_sat.cli.Role.delete({'id': cloned_role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': cloned_role['id']}) + module_target_sat.cli.Role.info({'id': cloned_role['id']}) class TestSystemAdmin: """Test class for System Admin role end to end CLI""" @pytest.fixture(scope='class', autouse=True) - def tearDown(self): + def tearDown(self, class_target_sat): """Will reset the changed value of settings""" yield - Settings.set({'name': "outofsync_interval", 'value': "30"}) + class_target_sat.cli.Settings.set({'name': "outofsync_interval", 'value': "30"}) @pytest.mark.upgrade @pytest.mark.tier3 @pytest.mark.e2e - def test_system_admin_role_end_to_end(self): + def test_system_admin_role_end_to_end(self, target_sat): """Test System admin role with a end to end workflow :id: da6b3549-d1cf-44fc-869f-08d15d407fa2 @@ -277,29 +278,28 @@ def test_system_admin_role_end_to_end(self): 4. System Admin role should be able to create Organization admins 5. User with sys admin role should be able to edit filters on roles - :CaseLevel: System """ - org = make_org() - location = make_location() + org = target_sat.cli_factory.make_org() + location = target_sat.cli_factory.make_location() common_pass = gen_string('alpha') - role = Role.info({'name': 'System admin'}) - system_admin_1 = make_user( + role = target_sat.cli.Role.info({'name': 'System admin'}) + system_admin_1 = target_sat.cli_factory.user( { 'password': common_pass, 'organization-ids': org['id'], 'location-ids': location['id'], } ) - User.add_role({'id': system_admin_1['id'], 'role-id': role['id']}) - Settings.with_user(username=system_admin_1['login'], password=common_pass).set( - {'name': "outofsync_interval", 'value': "32"} - ) - sync_time = Settings.list({'search': 'name=outofsync_interval'})[0] + target_sat.cli.User.add_role({'id': system_admin_1['id'], 'role-id': role['id']}) + target_sat.cli.Settings.with_user( + username=system_admin_1['login'], password=common_pass + ).set({'name': "outofsync_interval", 'value': "32"}) + sync_time = target_sat.cli.Settings.list({'search': 'name=outofsync_interval'})[0] # Asserts if the setting was updated successfully - assert '32' == sync_time['value'] + assert sync_time['value'] == '32' # Create another System Admin user using the first one - system_admin = User.with_user( + system_admin = target_sat.cli.User.with_user( username=system_admin_1['login'], password=common_pass ).create( { @@ -315,7 +315,9 @@ def test_system_admin_role_end_to_end(self): } ) # Create the Org Admin user - org_role = Role.with_user(username=system_admin['login'], password=common_pass).clone( + org_role = target_sat.cli.Role.with_user( + username=system_admin['login'], password=common_pass + ).clone( { 'name': 'Organization admin', 'new-name': gen_string('alpha'), @@ -323,7 +325,9 @@ def test_system_admin_role_end_to_end(self): 'location-ids': location['id'], } ) - org_admin = User.with_user(username=system_admin['login'], password=common_pass).create( + org_admin = target_sat.cli.User.with_user( + username=system_admin['login'], password=common_pass + ).create( { 'auth-source-id': 1, 'firstname': gen_string('alpha'), @@ -338,20 +342,20 @@ def test_system_admin_role_end_to_end(self): ) # Assert if the cloning was successful assert org_role['id'] is not None - org_role_filters = Role.filters({'id': org_role['id']}) + org_role_filters = target_sat.cli.Role.filters({'id': org_role['id']}) search_filter = None for arch_filter in org_role_filters: if arch_filter['resource-type'] == 'Architecture': search_filter = arch_filter break - Filter.with_user(username=system_admin['login'], password=common_pass).update( - {'role-id': org_role['id'], 'id': arch_filter['id'], 'search': 'name=x86_64'} - ) + target_sat.cli.Filter.with_user( + username=system_admin['login'], password=common_pass + ).update({'role-id': org_role['id'], 'id': arch_filter['id'], 'search': 'name=x86_64'}) # Asserts if the filter is updated - assert 'name=x86_64' in Filter.info({'id': search_filter['id']}).values() - org_admin = User.with_user(username=system_admin['login'], password=common_pass).info( - {'id': org_admin['id']} - ) + assert 'name=x86_64' in target_sat.cli.Filter.info({'id': search_filter['id']}).values() + org_admin = target_sat.cli.User.with_user( + username=system_admin['login'], password=common_pass + ).info({'id': org_admin['id']}) # Asserts Created Org Admin assert org_role['name'] in org_admin['roles'] assert org['name'] in org_admin['organizations'] diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index 17f131c1656..d4b2b883c44 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -4,98 +4,208 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: InterSatelliteSync :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import randint +import os +from time import sleep -import pytest from fauxfactory import gen_string +from manifester import Manifester +import pytest +from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.content_export import ContentExport -from robottelo.cli.content_import import ContentImport -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import make_content_view -from robottelo.cli.factory import make_lifecycle_environment -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.file import File -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.settings import Settings from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import DEFAULT_CV -from robottelo.constants import PRDS -from robottelo.constants import PULP_EXPORT_DIR -from robottelo.constants import PULP_IMPORT_DIR -from robottelo.constants import REPO_TYPE -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + DEFAULT_ARCHITECTURE, + DEFAULT_CV, + ENVIRONMENT, + EXPORT_LIBRARY_NAME, + PULP_EXPORT_DIR, + PULP_IMPORT_DIR, + REPO_TYPE, + REPOS, + DataFile, +) from robottelo.constants.repos import ANSIBLE_GALAXY +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='class') -def config_export_import_settings(): +def config_export_import_settings(module_target_sat): """Check settings and set download policy for export. Reset to original state after import""" - download_policy_value = Settings.info({'name': 'default_download_policy'})['value'] - rh_download_policy_value = Settings.info({'name': 'default_redhat_download_policy'})['value'] - subs_conn_enabled_value = Settings.info({'name': 'subscription_connection_enabled'})['value'] - Settings.set({'name': 'default_redhat_download_policy', 'value': 'immediate'}) + download_policy_value = module_target_sat.cli.Settings.info( + {'name': 'default_download_policy'} + )['value'] + rh_download_policy_value = module_target_sat.cli.Settings.info( + {'name': 'default_redhat_download_policy'} + )['value'] + subs_conn_enabled_value = module_target_sat.cli.Settings.info( + {'name': 'subscription_connection_enabled'} + )['value'] + module_target_sat.cli.Settings.set( + {'name': 'default_redhat_download_policy', 'value': 'immediate'} + ) yield - Settings.set({'name': 'default_download_policy', 'value': download_policy_value}) - Settings.set({'name': 'default_redhat_download_policy', 'value': rh_download_policy_value}) - Settings.set({'name': 'subscription_connection_enabled', 'value': subs_conn_enabled_value}) + module_target_sat.cli.Settings.set( + {'name': 'default_download_policy', 'value': download_policy_value} + ) + module_target_sat.cli.Settings.set( + {'name': 'default_redhat_download_policy', 'value': rh_download_policy_value} + ) + module_target_sat.cli.Settings.set( + {'name': 'subscription_connection_enabled', 'value': subs_conn_enabled_value} + ) -@pytest.fixture(scope='function') +@pytest.fixture def export_import_cleanup_function(target_sat, function_org): """Deletes export/import dirs of function org""" yield - # Deletes directories created for export/import test target_sat.execute( - f'rm -rf {PULP_EXPORT_DIR}/{function_org.name} {PULP_IMPORT_DIR}/{function_org.name}', + f'rm -rf {PULP_EXPORT_DIR}/{function_org.name} {PULP_IMPORT_DIR}/{function_org.name}' ) -@pytest.fixture(scope='function') # perform the cleanup after each testcase of a module +@pytest.fixture # perform the cleanup after each testcase of a module def export_import_cleanup_module(target_sat, module_org): """Deletes export/import dirs of module_org""" yield - # Deletes directories created for export/import test target_sat.execute( f'rm -rf {PULP_EXPORT_DIR}/{module_org.name} {PULP_IMPORT_DIR}/{module_org.name}' ) -@pytest.fixture(scope='class') -def docker_repo(module_target_sat, module_org): - product = make_product({'organization-id': module_org.id}) - repo = make_repository( +@pytest.fixture +def function_import_org(target_sat): + """Creates an Organization for content import.""" + return target_sat.api.Organization().create() + + +@pytest.fixture +def function_import_org_with_manifest(target_sat, function_import_org): + """Creates and sets an Organization with a brand-new manifest for content import.""" + with Manifester(manifest_category=settings.manifest.golden_ticket) as manifest: + target_sat.upload_manifest(function_import_org.id, manifest) + return function_import_org + + +@pytest.fixture(scope='module') +def module_synced_custom_repo(module_target_sat, module_org, module_product): + repo = module_target_sat.cli_factory.make_repository( { + 'content-type': 'yum', + 'download-policy': 'immediate', 'organization-id': module_org.id, + 'product-id': module_product.id, + } + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + return repo + + +@pytest.fixture +def function_synced_custom_repo(target_sat, function_org, function_product): + repo = target_sat.cli_factory.make_repository( + { + 'content-type': 'yum', + 'download-policy': 'immediate', + 'organization-id': function_org.id, + 'product-id': function_product.id, + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + return repo + + +@pytest.fixture +def function_synced_rh_repo(request, target_sat, function_sca_manifest_org): + """Enable and synchronize RH repo with immediate policy""" + repo_dict = ( + REPOS['kickstart'][request.param.replace('kickstart', '')[1:]] + if 'kickstart' in request.param + else REPOS[request.param] + ) + target_sat.cli.RepositorySet.enable( + { + 'organization-id': function_sca_manifest_org.id, + 'name': repo_dict['reposet'], + 'product': repo_dict['product'], + 'releasever': repo_dict.get('releasever', None) or repo_dict.get('version', None), + 'basearch': DEFAULT_ARCHITECTURE, + } + ) + repo = target_sat.cli.Repository.info( + { + 'organization-id': function_sca_manifest_org.id, + 'name': repo_dict['name'], + 'product': repo_dict['product'], + } + ) + # Update the download policy to 'immediate' and sync + target_sat.cli.Repository.update({'download-policy': 'immediate', 'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}, timeout=7200000) + return target_sat.cli.Repository.info( + { + 'organization-id': function_sca_manifest_org.id, + 'name': repo_dict['name'], + 'product': repo_dict['product'], + } + ) + + +@pytest.fixture +def function_synced_file_repo(target_sat, function_org, function_product): + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, + 'product-id': function_product.id, + 'content-type': 'file', + 'url': settings.repos.file_type_repo.url, + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + return repo + + +@pytest.fixture +def function_synced_docker_repo(target_sat, function_org): + product = target_sat.cli_factory.make_product({'organization-id': function_org.id}) + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, 'product-id': product['id'], 'content-type': REPO_TYPE['docker'], 'download-policy': 'immediate', - 'url': 'https://quay.io', - 'docker-upstream-name': 'quay/busybox', + 'url': CONTAINER_REGISTRY_HUB, + 'docker-upstream-name': CONTAINER_UPSTREAM_NAME, + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + return repo + + +@pytest.fixture +def function_synced_AC_repo(target_sat, function_org, function_product): + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, + 'product-id': function_product.id, + 'content-type': 'ansible_collection', + 'url': ANSIBLE_GALAXY, + 'ansible-collection-requirements': '{collections: [ \ + { name: theforeman.foreman, version: "2.1.0" }, \ + { name: theforeman.operations, version: "0.1.0"} ]}', } ) - Repository.synchronize({'id': repo['id']}) - yield repo + target_sat.cli.Repository.synchronize({'id': repo['id']}) + return repo @pytest.mark.run_in_one_thread @@ -103,307 +213,163 @@ class TestRepositoryExport: """Tests for exporting a repository via CLI""" @pytest.mark.tier3 - def test_positive_export_complete_version_custom_repo( - self, target_sat, export_import_cleanup_module, module_org - ): - """Export a custom repository via complete version - - :id: 9c855866-b9b1-4e32-b3eb-7342fdaa7116 - - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine - - :CaseLevel: System - """ - # setup custom repo - cv_name = gen_string('alpha') - product = make_product({'organization-id': module_org.id}) - repo = make_repository( - { - 'download-policy': 'immediate', - 'organization-id': module_org.id, - 'product-id': product['id'], - } - ) - Repository.synchronize({'id': repo['id']}) - # create cv and publish - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': repo['id'], - } - ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) - assert len(cv['versions']) == 1 - cvv = cv['versions'][0] - # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - # Export content view - ContentExport.completeVersion({'id': cvv['id'], 'organization-id': module_org.id}) - # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) != '' - - @pytest.mark.tier3 - def test_positive_export_incremental_version_custom_repo( - self, target_sat, export_import_cleanup_module, module_org + def test_positive_export_version_custom_repo( + self, target_sat, export_import_cleanup_module, module_org, module_synced_custom_repo ): - """Export custom repo via incremental export + """Export custom repo via complete and incremental CV version export. :id: 1b58dca7-c8bb-4893-a306-5882826da559 - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine + :setup: + 1. Product with synced custom repository. + + :steps: + 1. Create a CV, add the product and publish it. + 2. Export complete CV version. + 3. Publish new CV version. + 4. Export incremental CV version. - :CaseLevel: System + :expectedresults: + 1. Complete export succeeds, exported files are present on satellite machine. + 2. Incremental export succeeds, exported files are present on satellite machine. :BZ: 1944733 :customerscenario: true """ - # Create custom product and repository + # Create cv and publish cv_name = gen_string('alpha') - product = make_product({'organization-id': module_org.id}) - repo = make_repository( - { - 'download-policy': 'immediate', - 'organization-id': module_org.id, - 'product-id': product['id'], - } + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': module_org.id} ) - Repository.synchronize({'id': repo['id']}) - # Create cv and publish - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], 'organization-id': module_org.id, - 'repository-id': repo['id'], + 'repository-id': module_synced_custom_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 1 cvv = cv['versions'][0] # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - # Export complete first, then incremental - ContentExport.completeVersion({'id': cvv['id'], 'organization-id': module_org.id}) - ContentExport.incrementalVersion({'id': cvv['id'], 'organization-id': module_org.id}) - # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) != '' - - @pytest.mark.tier3 - def test_positive_export_complete_library_custom_repo( - self, function_org, export_import_cleanup_function, target_sat - ): - """Export custom repo via complete library export - - :id: 5f35654b-fc46-48f0-b064-595e04e2bd7e - - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine - - :CaseLevel: System - """ - # Create custom product and repository - cv_name = gen_string('alpha') - product = make_product({'organization-id': function_org.id}) - repo = make_repository( - { - 'download-policy': 'immediate', - 'organization-id': function_org.id, - 'product-id': product['id'], - } + # Export complete and check the export directory + target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': module_org.id} ) - cv = make_content_view({'name': cv_name, 'organization-id': function_org.id}) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': function_org.id, - 'repository-id': repo['id'], - } + assert '1.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + # Publish new CV version, export incremental and check the export directory + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 2 + cvv = max(cv['versions'], key=lambda x: int(x['id'])) + target_sat.cli.ContentExport.incrementalVersion( + {'id': cvv['id'], 'organization-id': module_org.id} ) - ContentView.publish({'id': cv['id']}) - # Verify export directory is empty - assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' - # Export content view - ContentExport.completeLibrary({'organization-id': function_org.id}) - # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != '' + assert '2.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) @pytest.mark.tier3 - def test_positive_export_incremental_library_custom_repo( - self, export_import_cleanup_function, function_org, target_sat + def test_positive_export_library_custom_repo( + self, target_sat, export_import_cleanup_function, function_org, function_synced_custom_repo ): - """Export custom repo via incremental library export + """Export custom repo via complete and incremental library export. :id: ba8dc7f3-55c2-4120-ac76-cc825ef0abb8 - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine - - :CaseLevel: System - """ - # Create custom product and repository - cv_name = gen_string('alpha') - product = make_product({'organization-id': function_org.id}) - repo = make_repository( - { - 'download-policy': 'immediate', - 'organization-id': function_org.id, - 'product-id': product['id'], - } - ) - # Create cv and publish - cv = make_content_view({'name': cv_name, 'organization-id': function_org.id}) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': function_org.id, - 'repository-id': repo['id'], - } - ) - ContentView.publish({'id': cv['id']}) - # Verify export directory is empty - assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' - # Export complete library, then export incremental - ContentExport.completeLibrary({'organization-id': function_org.id}) - ContentExport.incrementalLibrary({'organization-id': function_org.id}) - # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != '' - - @pytest.mark.tier3 - @pytest.mark.upgrade - def test_positive_export_complete_version_rh_repo( - self, target_sat, export_import_cleanup_module, module_entitlement_manifest_org - ): - """Export RedHat repo via complete version + :setup: + 1. Product with synced custom repository. - :id: e17898db-ca92-4121-a723-0d4b3cf120eb + :steps: + 1. Create a CV, add the product and publish it. + 2. Export complete library. + 3. Export incremental library. - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine + :expectedresults: + 1. Complete export succeeds, exported files are present on satellite machine. + 2. Incremental export succeeds, exported files are present on satellite machine. - :CaseLevel: System """ - # Enable RH repository - cv_name = gen_string('alpha') - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['rhva6'], - 'organization-id': module_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - 'releasever': '6Server', - } - ) - repo = Repository.info( - { - 'name': REPOS['rhva6']['name'], - 'organization-id': module_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - } - ) - # Update the download policy to 'immediate' and sync - Repository.update({'download-policy': 'immediate', 'id': repo['id']}) - Repository.synchronize({'id': repo['id']}) # Create cv and publish - cv = make_content_view( - {'name': cv_name, 'organization-id': module_entitlement_manifest_org.id} + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_org.id} ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': module_entitlement_manifest_org.id, - 'repository-id': repo['id'], + 'organization-id': function_org.id, + 'repository-id': function_synced_custom_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) - assert len(cv['versions']) == 1 - cvv = cv['versions'][0] + target_sat.cli.ContentView.publish({'id': cv['id']}) # Verify export directory is empty - assert ( - target_sat.validate_pulp_filepath(module_entitlement_manifest_org, PULP_EXPORT_DIR) - == '' - ) - # Export content view - ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': module_entitlement_manifest_org.id} - ) - # Verify export directory is not empty - assert ( - target_sat.validate_pulp_filepath(module_entitlement_manifest_org, PULP_EXPORT_DIR) - != '' - ) + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + # Export complete and check the export directory + target_sat.cli.ContentExport.completeLibrary({'organization-id': function_org.id}) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) + # Export incremental and check the export directory + target_sat.cli.ContentExport.incrementalLibrary({'organization-id': function_org.id}) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) @pytest.mark.tier3 @pytest.mark.upgrade + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) def test_positive_export_complete_library_rh_repo( - self, export_import_cleanup_function, function_entitlement_manifest_org, target_sat + self, + target_sat, + export_import_cleanup_function, + function_sca_manifest_org, + function_synced_rh_repo, ): """Export RedHat repo via complete library :id: ffae18bf-6536-4f11-8002-7bf1568bf7f1 - :expectedresults: Repository was successfully exported, exported files are - present on satellite machine + :parametrized: yes + + :setup: + 1. Enabled and synced RH repository. + + :steps: + 1. Create CV with the RH repo and publish. + 2. Export CV version contents to a directory. + + :expectedresults: + 1. Repository was successfully exported, exported files are present on satellite machine - :CaseLevel: System """ - # Enable RH repository - cv_name = gen_string('alpha') - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['rhva6'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - 'releasever': '6Server', - } - ) - repo = Repository.info( - { - 'name': REPOS['rhva6']['name'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - } - ) - # Update the download policy to 'immediate' and sync - Repository.update({'download-policy': 'immediate', 'id': repo['id']}) - Repository.synchronize({'id': repo['id']}, timeout=7200000) # Create cv and publish - cv = make_content_view( - {'name': cv_name, 'organization-id': function_entitlement_manifest_org.id} + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': function_entitlement_manifest_org.id, - 'repository-id': repo['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], } ) - ContentView.publish({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) # Verify export directory is empty - assert ( - target_sat.validate_pulp_filepath(function_entitlement_manifest_org, PULP_EXPORT_DIR) - == '' - ) + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' # Export content view - ContentExport.completeLibrary({'organization-id': function_entitlement_manifest_org.id}) - # Verify export directory is not empty - assert ( - target_sat.validate_pulp_filepath(function_entitlement_manifest_org, PULP_EXPORT_DIR) - != '' + target_sat.cli.ContentExport.completeLibrary( + {'organization-id': function_sca_manifest_org.id} ) + # Verify export directory is not empty + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) != '' @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_export_repository_docker( - self, target_sat, export_import_cleanup_module, module_org, docker_repo + self, target_sat, export_import_cleanup_function, function_org, function_synced_docker_repo ): """Export docker repo via complete and incremental repository. @@ -425,18 +391,20 @@ def test_positive_export_repository_docker( :customerscenario: true """ # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export complete and check the export directory - target_sat.cli.ContentExport.completeRepository({'id': docker_repo['id']}) - assert '1.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + target_sat.cli.ContentExport.completeRepository({'id': function_synced_docker_repo['id']}) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) # Export incremental and check the export directory - target_sat.cli.ContentExport.incrementalRepository({'id': docker_repo['id']}) - assert '2.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + target_sat.cli.ContentExport.incrementalRepository( + {'id': function_synced_docker_repo['id']} + ) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_export_version_docker( - self, target_sat, export_import_cleanup_module, module_org, docker_repo + self, target_sat, export_import_cleanup_function, function_org, function_synced_docker_repo ): """Export CV with docker repo via complete and incremental version. @@ -461,12 +429,14 @@ def test_positive_export_version_docker( """ # Create CV and publish cv_name = gen_string('alpha') - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_org.id} + ) target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': docker_repo['id'], + 'organization-id': function_org.id, + 'repository-id': function_synced_docker_repo['id'], } ) target_sat.cli.ContentView.publish({'id': cv['id']}) @@ -474,40 +444,44 @@ def test_positive_export_version_docker( assert len(cv['versions']) == 1 cvv = cv['versions'][0] # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export complete and check the export directory target_sat.cli.ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': module_org.id} + {'id': cvv['id'], 'organization-id': function_org.id} ) - assert '1.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) # Publish new CVV, export incremental and check the export directory target_sat.cli.ContentView.publish({'id': cv['id']}) cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 2 - cvv = cv['versions'][1] + cvv = max(cv['versions'], key=lambda x: int(x['id'])) target_sat.cli.ContentExport.incrementalVersion( - {'id': cvv['id'], 'organization-id': module_org.id} + {'id': cvv['id'], 'organization-id': function_org.id} ) - assert '2.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) @pytest.fixture(scope='class') -def class_export_entities(module_org): +def class_export_entities(module_org, module_target_sat): """Setup custom repos for export""" exporting_prod_name = gen_string('alpha') - product = make_product({'organization-id': module_org.id, 'name': exporting_prod_name}) + product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'name': exporting_prod_name} + ) exporting_repo_name = gen_string('alpha') - exporting_repo = make_repository( + exporting_repo = module_target_sat.cli_factory.make_repository( { 'name': exporting_repo_name, - 'mirror-on-sync': 'no', + 'mirroring-policy': 'mirror_content_only', 'download-policy': 'immediate', 'product-id': product['id'], } ) - Repository.synchronize({'id': exporting_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': exporting_repo['id']}) exporting_cv_name = gen_string('alpha') - exporting_cv, exporting_cvv_id = _create_cv(exporting_cv_name, exporting_repo, module_org) + exporting_cv, exporting_cvv_id = _create_cv( + exporting_cv_name, exporting_repo, module_org, module_target_sat + ) return { 'exporting_org': module_org, 'exporting_prod_name': exporting_prod_name, @@ -519,7 +493,7 @@ def class_export_entities(module_org): } -def _create_cv(cv_name, repo, module_org, publish=True): +def _create_cv(cv_name, repo, module_org, sat, publish=True): """Creates CV and/or publishes in organization with given name and repository :param cv_name: The name of CV to create @@ -529,101 +503,25 @@ def _create_cv(cv_name, repo, module_org, publish=True): :return: The directory of CV and Content View ID """ description = gen_string('alpha') - content_view = make_content_view( + content_view = sat.cli_factory.make_content_view( {'name': cv_name, 'description': description, 'organization-id': module_org.id} ) - ContentView.add_repository( + sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - content_view = ContentView.info({'name': cv_name, 'organization-id': module_org.id}) + content_view = sat.cli.ContentView.info({'name': cv_name, 'organization-id': module_org.id}) cvv_id = None if publish: - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = sat.cli.ContentView.info({'id': content_view['id']}) cvv_id = content_view['versions'][0]['id'] return content_view, cvv_id -def _enable_rhel_content( - sat_obj, module_entitlement_manifest_org, repo_name, releasever=None, product=None, sync=True -): - """Enable and/or Synchronize rhel content - - :param organization: The organization directory into which the rhel - contents will be enabled - :param bool sync: Syncs contents to repository if true else doesnt - :return: Repository cli object - """ - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET[repo_name], - 'organization-id': module_entitlement_manifest_org.id, - 'product': PRDS[product], - 'releasever': releasever, - } - ) - repo = Repository.info( - { - 'name': REPOS[repo_name]['name'], - 'organization-id': module_entitlement_manifest_org.id, - 'product': PRDS[product], - } - ) - # Update the download policy to 'immediate' - Repository.update({'download-policy': 'immediate', 'mirror-on-sync': 'no', 'id': repo['id']}) - if sync: - # Synchronize the repository - Repository.synchronize({'id': repo['id']}, timeout=7200000) - repo = Repository.info( - { - 'name': REPOS[repo_name]['name'], - 'organization-id': module_entitlement_manifest_org.id, - 'product': PRDS[product], - } - ) - return repo - - -def _import_entities(product, repo, cv, mos='no'): - """Sets same CV, product and repository in importing organization as - exporting organization - - :param str product: The product name same as exporting product - :param str repo: The repo name same as exporting repo - :param str cv: The cv name same as exporting cv - :param str mos: Mirror on Sync repo, by default 'no' can override to 'yes' - :returns dictionary with CLI entities created in this function - """ - importing_org = make_org() - importing_prod = make_product({'organization-id': importing_org['id'], 'name': product}) - importing_repo = make_repository( - { - 'name': repo, - 'mirror-on-sync': mos, - 'download-policy': 'immediate', - 'product-id': importing_prod['id'], - } - ) - importing_cv = make_content_view({'name': cv, 'organization-id': importing_org['id']}) - ContentView.add_repository( - { - 'id': importing_cv['id'], - 'organization-id': importing_org['id'], - 'repository-id': importing_repo['id'], - } - ) - return { - 'importing_org': importing_org, - 'importing_repo': importing_repo, - 'importing_cv': importing_cv, - } - - class TestContentViewSync: """Implements Content View Export Import tests in CLI""" @@ -631,34 +529,28 @@ class TestContentViewSync: @pytest.mark.e2e def test_positive_export_import_cv_end_to_end( self, + target_sat, class_export_entities, config_export_import_settings, export_import_cleanup_module, - target_sat, module_org, + function_import_org, ): - """Export the CV and import it. Ensure that all content is same from - export to import + """Export the CV and import it. Ensure that all content is same from export to import. :id: b4fb9386-9b6a-4fc5-a8bf-96d7c80af93e - :steps: + :setup: + 1. Product with synced custom repository, published in a CV. - 1. Create product and repository with custom contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export CV version via complete version - 5. Import the exported files to satellite - 6. Check that content of export and import matches + :steps: + 1. Export CV version via complete version + 2. Import the exported files to satellite + 3. Check that content of export and import matches :expectedresults: - - 1. CV version custom contents has been exported to directory - 2. All The exported custom contents has been imported in org/satellite - - :CaseImportance: High - - :CaseLevel: System + 1. CV version custom contents has been exported to directory. + 2. All The exported custom contents has been imported in org/satellite. :BZ: 1832858 @@ -669,47 +561,46 @@ def test_positive_export_import_cv_end_to_end( export_cvv_id = class_export_entities['exporting_cvv_id'] export_cv_description = class_export_entities['exporting_cv']['description'] import_cv_name = class_export_entities['exporting_cv_name'] - # check packages - exported_packages = Package.list({'content-view-version-id': export_cvv_id}) + # Check packages + exported_packages = target_sat.cli.Package.list({'content-view-version-id': export_cvv_id}) assert len(exported_packages) # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': export_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) - - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # check that files are present in import_path + # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import files and verify content - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cv = ContentView.info( - {'name': import_cv_name, 'organization-id': importing_org['id']} + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + importing_cv = target_sat.cli.ContentView.info( + {'name': import_cv_name, 'organization-id': function_import_org.id} ) importing_cvv = importing_cv['versions'] assert importing_cv['description'] == export_cv_description assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_packages) assert len(exported_packages) == len(imported_packages) - exported_repo = Repository.info( + exported_repo = target_sat.cli.Repository.info( { 'name': export_repo_name, 'product': export_prod_name, 'organization-id': module_org.id, } ) - imported_repo = Repository.info( + imported_repo = target_sat.cli.Repository.info( { 'name': import_repo_name, 'product': import_prod_name, - 'organization-id': importing_org['id'], + 'organization-id': function_import_org.id, } ) for item in ['packages', 'source-rpms', 'package-groups', 'errata', 'module-streams']: @@ -717,102 +608,106 @@ def test_positive_export_import_cv_end_to_end( @pytest.mark.upgrade @pytest.mark.tier3 + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) def test_positive_export_import_default_org_view( self, + target_sat, export_import_cleanup_function, - function_org, config_export_import_settings, - target_sat, + function_sca_manifest_org, + function_import_org_with_manifest, + function_synced_custom_repo, + function_synced_rh_repo, ): """Export Default Organization View version contents in directory and Import them. :id: b8a2c878-cfc2-491c-a71f-74108d6bc247 - :bz: 1671319 + :parametrized: yes - :customerscenario: true + :setup: + 1. Product with synced custom repository. + 2. Enabled and synced RH repository. :steps: - - 1. Create product and repository with custom contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export `Default Organization View version` contents to a directory - using complete library - 5. Import those contents from some other org/satellite. + 1. Create CV with the custom and RH repository. + 2. Export `Default Organization View version` contents using complete library. + 3. Import those contents from some other org/satellite. :expectedresults: + 1. Default Organization View version custom contents has been exported. + 2. All the exported custom contents has been imported in org/satellite. - 1. Default Organization View version custom contents has been exported to directory - 2. All The exported custom contents has been imported in org/satellite + :BZ: 1671319 - :CaseImportance: High - - :CaseLevel: System + :customerscenario: true """ - importing_cv_name = DEFAULT_CV + # Create cv and publish cv_name = gen_string('alpha') - export_library = 'Export-Library' - # Create custom repo - product = make_product({'organization-id': function_org.id}) - repo = make_repository( + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} + ) + target_sat.cli.ContentView.add_repository( { - 'download-policy': 'immediate', - 'organization-id': function_org.id, - 'product-id': product['id'], + 'id': cv['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_custom_repo['id'], } ) - Repository.synchronize({'id': repo['id']}) - # Create cv and publish - cv = make_content_view({'name': cv_name, 'organization-id': function_org.id}) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': function_org.id, - 'repository-id': repo['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info( + target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view = target_sat.cli.ContentView.info( { 'name': cv_name, - 'organization-id': function_org.id, + 'organization-id': function_sca_manifest_org.id, } ) # Verify packages default_cvv_id = content_view['versions'][0]['id'] - cv_packages = Package.list({'content-view-version-id': default_cvv_id}) + cv_packages = target_sat.cli.Package.list({'content-view-version-id': default_cvv_id}) assert len(cv_packages) # Verify export directory is empty - assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' # Export complete library - export = ContentExport.completeLibrary({'organization-id': function_org.id}) + export = target_sat.cli.ContentExport.completeLibrary( + {'organization-id': function_sca_manifest_org.id} + ) # Verify 'export-library' is created and packages are there - import_path = target_sat.move_pulp_archive(function_org, export['message']) - export_lib_cv = ContentView.info( + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) + export_lib_cv = target_sat.cli.ContentView.info( { - 'name': export_library, - 'organization-id': function_org.id, + 'name': EXPORT_LIBRARY_NAME, + 'organization-id': function_sca_manifest_org.id, } ) export_lib_cvv_id = export_lib_cv['versions'][0]['id'] - exported_lib_packages = Package.list({'content-view-version-id': export_lib_cvv_id}) - assert len(cv_packages) + exported_lib_packages = target_sat.cli.Package.list( + {'content-view-version-id': export_lib_cvv_id} + ) + assert len(exported_lib_packages) assert exported_lib_packages == cv_packages - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # check that files are present in import_path - result = target_sat.execute(f'ls {import_path}') - assert result.stdout != '' # Import and verify content of library - ContentImport.library({'organization-id': importing_org['id'], 'path': import_path}) - importing_cvv = ContentView.info( - {'name': importing_cv_name, 'organization-id': importing_org['id']} + target_sat.cli.Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) + target_sat.cli.ContentImport.library( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': DEFAULT_CV, 'organization-id': function_import_org_with_manifest.id} )['versions'] assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_packages) assert len(cv_packages) == len(imported_packages) @@ -844,19 +739,16 @@ def test_positive_export_import_filtered_cvv( 1. Filtered CV version custom contents has been exported to directory 2. Filtered exported custom contents has been imported in org/satellite - :CaseImportance: High - - :CaseLevel: System """ exporting_cv_name = importing_cvv = gen_string('alpha') exporting_cv, exporting_cvv = _create_cv( exporting_cv_name, class_export_entities['exporting_repo'], class_export_entities['exporting_org'], - False, + target_sat, ) filter_name = gen_string('alphanumeric') - ContentView.filter.create( + target_sat.cli.ContentView.filter.create( { 'name': filter_name, 'content-view-id': exporting_cv['id'], @@ -864,46 +756,50 @@ def test_positive_export_import_filtered_cvv( 'type': 'rpm', } ) - ContentView.filter.rule.create( + target_sat.cli.ContentView.filter.rule.create( { 'name': 'cat', 'content-view-filter': filter_name, 'content-view-id': exporting_cv['id'], } ) - ContentView.publish( + target_sat.cli.ContentView.publish( { 'id': exporting_cv['id'], 'organization-id': class_export_entities['exporting_org'].id, } ) - exporting_cv = ContentView.info({'id': exporting_cv['id']}) - exporting_cvv_id = exporting_cv['versions'][0]['id'] + exporting_cv = target_sat.cli.ContentView.info({'id': exporting_cv['id']}) + exporting_cvv_id = max(exporting_cv['versions'], key=lambda x: int(x['id']))['id'] # Check presence of 1 rpm due to filter - export_packages = Package.list({'content-view-version-id': exporting_cvv_id}) + export_packages = target_sat.cli.Package.list({'content-view-version-id': exporting_cvv_id}) assert len(export_packages) == 1 # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': exporting_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) # Import section - importing_org = make_org() + importing_org = target_sat.cli_factory.make_org() # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) + target_sat.cli.Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) # check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import file and verify content - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cvv = ContentView.info( + target_sat.cli.ContentImport.version( + {'organization-id': importing_org['id'], 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( {'name': importing_cvv, 'organization-id': importing_org['id']} )['versions'] assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_packages) == 1 assert len(export_packages) == len(imported_packages) @@ -911,71 +807,70 @@ def test_positive_export_import_filtered_cvv( @pytest.mark.upgrade def test_positive_export_import_promoted_cv( self, + target_sat, class_export_entities, export_import_cleanup_module, config_export_import_settings, - target_sat, module_org, + function_import_org, ): """Export promoted CV version contents in directory and Import them. :id: 315ef1f0-e2ad-43ec-adff-453fb71654a7 - :steps: + :setup: + 1. Product with synced custom repository, published in a CV. - 1. Create product and repository with contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Promote the CV. - 5. Export CV version contents to a directory - 6. Import those contents from some other org/satellite. + :steps: + 1. Promote the CV. + 2. Export CV version contents to a directory. + 3. Import those contents from some other org/satellite. :expectedresults: + 1. Promoted CV version contents has been exported to directory. + 2. Promoted CV version contents has been imported successfully. + 3. The imported CV should only be published and not promoted. - 1. Promoted CV version contents has been exported to directory - 2. Promoted CV version contents has been imported successfully - 3. The imported CV should only be published and not promoted - - :CaseLevel: System """ import_cv_name = class_export_entities['exporting_cv_name'] export_cv_id = class_export_entities['exporting_cv']['id'] export_cvv_id = class_export_entities['exporting_cvv_id'] - env = make_lifecycle_environment({'organization-id': module_org.id}) - ContentView.version_promote( + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli.ContentView.version_promote( { 'id': export_cvv_id, 'to-lifecycle-environment-id': env['id'], } ) - promoted_cvv_id = ContentView.info({'id': export_cv_id})['versions'][-1]['id'] - # check packages - exported_packages = Package.list({'content-view-version-id': promoted_cvv_id}) + promoted_cvv_id = target_sat.cli.ContentView.info({'id': export_cv_id})['versions'][-1][ + 'id' + ] + # Check packages + exported_packages = target_sat.cli.Package.list( + {'content-view-version-id': promoted_cvv_id} + ) assert len(exported_packages) # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': export_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) - - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # Move export files to import location and set permission - # Import and verify content - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cv_id = ContentView.info( - {'name': import_cv_name, 'organization-id': importing_org['id']} + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + importing_cv_id = target_sat.cli.ContentView.info( + {'name': import_cv_name, 'organization-id': function_import_org.id} ) - importing_cvv_id = ContentView.info( - {'name': import_cv_name, 'organization-id': importing_org['id']} + importing_cvv_id = target_sat.cli.ContentView.info( + {'name': import_cv_name, 'organization-id': function_import_org.id} )['versions'] assert len(importing_cvv_id) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv_id[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv_id[0]['id']} + ) assert len(imported_packages) assert len(exported_packages) == len(imported_packages) # Verify the LCE is in Library @@ -985,248 +880,99 @@ def test_positive_export_import_promoted_cv( @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.e2e + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['kickstart-rhel7', 'kickstart-rhel8_bos', 'rhscl7'], + indirect=True, + ) def test_positive_export_import_redhat_cv( self, + target_sat, export_import_cleanup_function, config_export_import_settings, - function_entitlement_manifest_org, - function_secondary_entitlement_manifest, - target_sat, + function_sca_manifest_org, + function_import_org_with_manifest, + function_synced_rh_repo, ): - """Export CV version redhat contents in directory and Import them + """Export CV version with RedHat contents in directory and import them. :id: f6bd7fa9-396e-44ac-92a3-ab87ce1a7ef5 - :steps: + :parametrized: yes - 1. Enable product and repository with redhat contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export CV version contents to a directory - 5. Import those contents from some other org/satellite. + :setup: + 1. Enabled and synced RH repository. - :expectedresults: + :steps: + 1. Create CV with the RH repo and publish. + 2. Export CV version contents to a directory. + 3. Import those contents from some other org/satellite. - 1. CV version redhat contents has been exported to directory - 2. All The exported redhat contents has been imported in org/satellite + :expectedresults: + 1. CV version redhat contents has been exported to directory. + 2. All the exported redhat contents has been imported in org/satellite. :BZ: 1655239, 2040870 :customerscenario: true - :CaseImportance: High - - :CaseLevel: System """ - # Setup rhel repo - cv_name = gen_string('alpha') - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['kickstart']['rhel7'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - 'releasever': '7.9', - } - ) - repo = Repository.info( - { - 'name': REPOS['kickstart']['rhel7']['name'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - } - ) - # Update the download policy to 'immediate' and sync - Repository.update({'download-policy': 'immediate', 'id': repo['id']}) - Repository.synchronize({'id': repo['id']}, timeout=7200000) # Create cv and publish - cv = make_content_view( - {'name': cv_name, 'organization-id': function_entitlement_manifest_org.id} + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': function_entitlement_manifest_org.id, - 'repository-id': repo['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 1 cvv = cv['versions'][0] # Verify export directory is empty - assert ( - target_sat.validate_pulp_filepath(function_entitlement_manifest_org, PULP_EXPORT_DIR) - == '' - ) + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': function_entitlement_manifest_org.id}, - timeout=7200000, - ) - import_path = target_sat.move_pulp_archive( - function_entitlement_manifest_org, export['message'] - ) - exported_packages = Package.list({'content-view-version-id': cvv['id']}) - assert len(exported_packages) - - # importing portion - importing_org = target_sat.api.Organization().create() - # check that files are present in import_path - result = target_sat.execute(f'ls {import_path}') - assert result.stdout != '' - target_sat.upload_manifest( - importing_org.id, - function_secondary_entitlement_manifest, - interface='CLI', + export = target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': function_sca_manifest_org.id}, timeout=7200000, ) - importing_org.sca_disable() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - ContentImport.version( - {'organization-id': importing_org.id, 'path': import_path}, timeout=7200000 - ) - # Import file and verify content - importing_cvv = ContentView.info({'name': cv_name, 'organization-id': importing_org.id})[ - 'versions' - ] - assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) - assert len(imported_packages) - assert len(exported_packages) == len(imported_packages) - exported_repo = Repository.info( - { - 'name': repo['name'], - 'product': repo['product']['name'], - 'organization-id': function_entitlement_manifest_org.id, - } - ) - imported_repo = Repository.info( - { - 'name': repo['name'], - 'product': repo['product']['name'], - 'organization-id': importing_org.id, - } - ) - for item in ['packages', 'source-rpms', 'package-groups', 'errata', 'module-streams']: - assert exported_repo['content-counts'][item] == imported_repo['content-counts'][item] - - @pytest.mark.tier4 - def test_positive_export_import_redhat_cv_with_huge_contents( - self, - export_import_cleanup_function, - config_export_import_settings, - target_sat, - function_entitlement_manifest_org, - function_secondary_entitlement_manifest, - ): - """Export CV version redhat contents in directory and Import them - - :id: 05eb185f-e526-466c-9c14-702dde1d49de - - :steps: - - 1. Enable product and repository with redhat repository having huge contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export CV version contents to a directory - 5. Import those contents from some other org/satellite. - - :expectedresults: - - 1. CV version redhat contents has been exported to directory - 2. All The exported redhat contents has been imported in org/satellite - - :BZ: 1655239 - - :CaseImportance: Critical - - :CaseLevel: Acceptance - """ - cv_name = gen_string('alpha') + # Verify export directory is not empty + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) != '' - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': REPOSET['rhscl7'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhscl'], - 'releasever': '7Server', - } - ) - repo = Repository.info( - { - 'name': REPOS['rhscl7']['name'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhscl'], - } - ) - # Update the download policy to 'immediate' and sync - Repository.update({'download-policy': 'immediate', 'id': repo['id']}) - Repository.synchronize({'id': repo['id']}, timeout=7200000) - # Create cv and publish - cv = make_content_view( - {'name': cv_name, 'organization-id': function_entitlement_manifest_org.id} - ) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': function_entitlement_manifest_org.id, - 'repository-id': repo['id'], - } - ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) - assert len(cv['versions']) == 1 - cvv = cv['versions'][0] - # Export cv - export = ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': function_entitlement_manifest_org.id}, - timeout=7200000, - ) - import_path = target_sat.move_pulp_archive( - function_entitlement_manifest_org, export['message'] - ) - exported_packages = Package.list({'content-view-version-id': cvv['id']}) + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) + exported_packages = target_sat.cli.Package.list({'content-view-version-id': cvv['id']}) assert len(exported_packages) - # importing portion - importing_org = target_sat.api.Organization().create() - # check that files are present in import_path - result = target_sat.execute(f'ls {import_path}') - assert result.stdout != '' # Import and verify content - target_sat.upload_manifest( - importing_org.id, - function_secondary_entitlement_manifest, - interface='CLI', + target_sat.cli.Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path}, timeout=7200000, ) - importing_org.sca_disable() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - ContentImport.version( - {'organization-id': importing_org.id, 'path': import_path}, timeout=7200000 - ) - importing_cvv = ContentView.info({'name': cv_name, 'organization-id': importing_org.id})[ - 'versions' - ] + importing_cvv = target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + )['versions'] assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_packages) assert len(exported_packages) == len(imported_packages) - exported_repo = Repository.info( + exported_repo = target_sat.cli.Repository.info( { - 'name': repo['name'], - 'product': repo['product']['name'], - 'organization-id': function_entitlement_manifest_org.id, + 'name': function_synced_rh_repo['name'], + 'product': function_synced_rh_repo['product']['name'], + 'organization-id': function_sca_manifest_org.id, } ) - imported_repo = Repository.info( + imported_repo = target_sat.cli.Repository.info( { - 'name': repo['name'], - 'product': repo['product']['name'], - 'organization-id': importing_org.id, + 'name': function_synced_rh_repo['name'], + 'product': function_synced_rh_repo['product']['name'], + 'organization-id': function_import_org_with_manifest.id, } ) for item in ['packages', 'source-rpms', 'package-groups', 'errata', 'module-streams']: @@ -1241,66 +987,87 @@ def test_positive_export_cv_with_on_demand_repo( :id: c366ace5-1fde-4ae7-9e84-afe58c06c0ca :steps: - 1. Create product 2. Create repos with immediate and on_demand download policy 3. Sync the repositories 4. Create CV with above product and publish - 5. Attempt to export CV version contents + 5. Attempt to export CV version with 'fail-on-missing' option + 6. Attempt to export CV version without 'fail-on-missing' option :expectedresults: - - 1. Export passes with warning and skips the on_demand repo - - :BZ: 1998626 + 1. Export fails when 'fail-on-missing' option is used + 2. Export passes otherwise with warning and skips the on_demand repo """ # Create custom product - product = make_product({'organization-id': module_org.id, 'name': gen_string('alpha')}) + product = target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'name': gen_string('alpha')} + ) - # Create repositories - repo_ondemand = make_repository( + # Create repositories and sync them + repo_ondemand = target_sat.cli_factory.make_repository( { + 'content-type': 'yum', 'download-policy': 'on_demand', 'organization-id': module_org.id, 'product-id': product['id'], } ) - repo_immediate = make_repository( + repo_immediate = target_sat.cli_factory.make_repository( { + 'content-type': 'yum', 'download-policy': 'immediate', 'organization-id': module_org.id, 'product-id': product['id'], } ) - - # Sync repositories - Repository.synchronize({'id': repo_ondemand['id']}) - Repository.synchronize({'id': repo_immediate['id']}) + target_sat.cli.Repository.synchronize({'id': repo_ondemand['id']}) + target_sat.cli.Repository.synchronize({'id': repo_immediate['id']}) # Create cv and publish - cv = make_content_view({'name': gen_string('alpha'), 'organization-id': module_org.id}) - ContentView.add_repository( + cv = target_sat.cli_factory.make_content_view( + {'name': gen_string('alpha'), 'organization-id': module_org.id} + ) + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], 'organization-id': module_org.id, 'repository-id': repo_ondemand['id'], } ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], 'organization-id': module_org.id, 'repository-id': repo_immediate['id'], } ) - ContentView.publish({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - # Export Content View version - result = ContentExport.completeVersion( - {'id': cv['id'], 'organization-id': module_org.id}, + # Attempt to export CV version with 'fail-on-missing' option + with pytest.raises(CLIReturnCodeError): + target_sat.cli.ContentExport.completeVersion( + { + 'organization-id': module_org.id, + 'content-view-id': cv['id'], + 'version': '1.0', + 'fail-on-missing-content': True, + }, + output_format='base', # json output can't be parsed - BZ#1998626 + ) + + # Export is not generated + assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + + # Attempt to export CV version without 'fail-on-missing' option + result = target_sat.cli.ContentExport.completeVersion( + { + 'organization-id': module_org.id, + 'content-view-id': cv['id'], + 'version': '1.0', + }, output_format='base', # json output can't be parsed - BZ#1998626 ) @@ -1321,51 +1088,49 @@ def test_positive_export_cv_with_on_demand_repo( @pytest.mark.tier2 def test_negative_import_same_cv_twice( self, + target_sat, class_export_entities, export_import_cleanup_module, config_export_import_settings, - target_sat, module_org, + function_import_org, ): - """Import the same cv twice + """Import the same CV twice. :id: 15a7ddd3-c1a5-4b22-8460-6cb2b8ea4ef9 - :steps: + :setup: + 1. Product with synced custom repository, published in a CV. - 1. Create product and repository with custom contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export CV version contents to a directory - 5. Import those contents from some other org/satellite. - 6. Attempt to reimport the same contents + :steps: + 1. Export CV version contents to a directory. + 2. Import those contents from some other org/satellite. + 3. Attempt to reimport the same contents. :expectedresults: - - 1. Reimporting the contents with same version fails - 2. Satellite displays an error message + 1. Reimporting the contents with same version fails. + 2. Satellite displays an error message. """ export_cvv_id = class_export_entities['exporting_cvv_id'] export_cv_name = class_export_entities['exporting_cv_name'] # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': export_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) - - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # check that files are present in import_path + # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import section - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) with pytest.raises(CLIReturnCodeError) as error: - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) assert ( f"Content View Version specified in the metadata - '{export_cv_name} 1.0' " 'already exists. If you wish to replace the existing version, ' @@ -1373,324 +1138,476 @@ def test_negative_import_same_cv_twice( ) in error.value.message @pytest.mark.tier2 - def test_negative_import_invalid_path(self, module_org): + def test_negative_import_invalid_path(self, module_org, module_target_sat): """Import cv that doesn't exist in path :id: 4cc69666-407f-4d66-b3d2-8fe2ed135a5f :steps: - - 1. Import a cv with a path that doesn't exist + 1. Import a CV with a path that doesn't exist. :expectedresults: - - 1. Error 'Unable to sync repositories, no library repository found' should be - displayed + 1. Error 'Unable to sync repositories, no library repository found' should be displayed. """ export_folder = gen_string('alpha') import_path = f'{PULP_IMPORT_DIR}{export_folder}' # Import section with pytest.raises(CLIReturnCodeError) as error: - ContentImport.version({'organization-id': module_org.id, 'path': import_path}) + module_target_sat.cli.ContentImport.version( + {'organization-id': module_org.id, 'path': import_path} + ) assert ( f'''Error: Unable to find '{import_path}/metadata.json'. ''' 'If the metadata.json file is at a different location provide it to the ' '--metadata-file option' ) in error.value.message - @pytest.mark.tier2 - def test_negative_export_cv_with_on_demand_repo( - self, export_import_cleanup_module, target_sat, module_org + @pytest.mark.tier3 + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) + def test_negative_import_incomplete_archive( + self, + target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_synced_rh_repo, + function_sca_manifest_org, + function_import_org_with_manifest, ): - """Exporting CV version having on_demand repo throws error - with "fail-on-missing-content" option set + """Try to import an incomplete export archive (mock interrupted transfer). - :id: f8b86d0e-e1a7-4e19-bb82-6de7d16c6676 - - :steps: + :id: c3b898bb-c6c8-402d-82f9-b15774d9f0fc - 1. Create product - 2. Create repos with immediate and on_demand download policy - 3. Sync the repositories - 4. Create CV with above product and publish - 5. Attempt to export CV version contents + :parametrized: yes - :expectedresults: + :setup: + 1. Enabled and synced RH repository. - 1. Export fails with error and no export is created + :steps: + 1. Create CV with the setup repo, publish it and export. + 2. Corrupt the export archive so that it's incomplete. + 3. Try to import the incomplete archive. + 4. Verify no content is imported and the import CV can be deleted. - :BZ: 1998626, 2067275 + :expectedresults: + 1. The import should fail. + 2. No content should be added, the empty import CV can be deleted. """ - - # Create custom product - product = make_product({'organization-id': module_org.id, 'name': gen_string('alpha')}) - - # Create repositories - repo_ondemand = make_repository( + # Create CV with the setup repo, publish it and export + cv = target_sat.cli_factory.make_content_view( { - 'download-policy': 'on_demand', - 'organization-id': module_org.id, - 'product-id': product['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-ids': [function_synced_rh_repo['id']], } ) - repo_immediate = make_repository( - { - 'download-policy': 'immediate', - 'organization-id': module_org.id, - 'product-id': product['id'], - } + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 1 + cvv = cv['versions'][0] + export = target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': function_sca_manifest_org.id} + ) + assert '1.0' in target_sat.validate_pulp_filepath( + function_sca_manifest_org, PULP_EXPORT_DIR ) - # Sync repositories - Repository.synchronize({'id': repo_ondemand['id']}) - Repository.synchronize({'id': repo_immediate['id']}) + # Corrupt the export archive so that it's incomplete + tar_files = target_sat.execute( + f'find {PULP_EXPORT_DIR}{function_sca_manifest_org.name}/{cv["name"]}/ -name *.tar.gz' + ).stdout.splitlines() + assert len(tar_files) == 1, 'Expected just one tar file in the export' - # Create cv and publish - cv = make_content_view({'name': gen_string('alpha'), 'organization-id': module_org.id}) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': repo_ondemand['id'], - } - ) - ContentView.add_repository( - { - 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': repo_immediate['id'], - } - ) - ContentView.publish({'id': cv['id']}) + size = int(target_sat.execute(f'du -b {tar_files[0]}').stdout.split()[0]) + assert size > 0, 'Export tar should not be empty' - # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + res = target_sat.execute(f'truncate -s {size // 2} {tar_files[0]}') + assert res.status == 0, 'Truncation of the tar file failed' - # Export Content View version + # Try to import the incomplete archive + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) with pytest.raises(CLIReturnCodeError) as error: - ContentExport.completeVersion( - {'id': cv['id'], 'organization-id': module_org.id, 'fail-on-missing-content': True}, - output_format='base', # json output can't be parsed - BZ#1998626 + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path} ) - # Error is raised - assert error.status != 0 - - # Export is not generated - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - - @pytest.mark.tier2 - def test_positive_create_custom_major_minor_cv_version(self): - """CV can published with custom major and minor versions - - :id: 6697cd22-253a-4bdc-a108-7e0af22caaf4 - - :steps: - - 1. Create product and repository with custom contents - 2. Sync the repository - 3. Create CV with above repository - 4. Publish the CV with custom major and minor versions - - :expectedresults: + assert '1 subtask(s) failed' in error.value.message + target_sat.wait_for_tasks( + search_query=( + 'Actions::Katello::ContentView::Remove and ' + f'organization_id = {function_import_org_with_manifest.id}' + ), + max_tries=5, + poll_rate=10, + ) - 1. CV version with custom major and minor versions is created + # Verify no content is imported and the import CV can be deleted + imported_cv = target_sat.cli.ContentView.info( + {'name': cv['name'], 'organization-id': function_import_org_with_manifest.id} + ) + assert len(imported_cv['versions']) == 0, 'There should be no CV version imported' - :CaseLevel: System - """ - org = make_org() - major = randint(1, 1000) - minor = randint(1, 1000) - content_view = make_content_view( - {'name': gen_string('alpha'), 'organization-id': org['id']} - ) - ContentView.publish({'id': content_view['id'], 'major': major, 'minor': minor}) - content_view = ContentView.info({'id': content_view['id']}) - cvv = content_view['versions'][0]['version'] - assert cvv.split('.')[0] == str(major) - assert cvv.split('.')[1] == str(minor) + target_sat.cli.ContentView.delete({'id': imported_cv['id']}) + with pytest.raises(CLIReturnCodeError) as error: + target_sat.cli.ContentView.info( + {'name': cv['name'], 'organization-id': function_import_org_with_manifest.id} + ) + assert 'content_view not found' in error.value.message, 'The imported CV should be gone' @pytest.mark.tier3 def test_postive_export_cv_with_mixed_content_repos( - self, class_export_entities, export_import_cleanup_module, target_sat, module_org + self, + export_import_cleanup_function, + target_sat, + function_org, + function_synced_custom_repo, + function_synced_file_repo, + function_synced_docker_repo, + function_synced_AC_repo, ): """Exporting CV version having yum and non-yum(docker) is successful :id: ffcdbbc6-f787-4978-80a7-4b44c389bf49 - :steps: + :setup: + 1. Synced repositories of each content type: yum, file, docker, AC - 1. Create product with yum and non-yum(docker) repos - 2. Sync the repositories - 3. Create CV with above product and publish - 4. Export CV version contents to a directory + :steps: + 1. Create CV, add all setup repos and publish. + 2. Export CV version contents to a directory. :expectedresults: - 1. Export will succeed, however the export wont contain non-yum repo. - No warning is printed (see BZ 1775383) + 1. Export succeeds and content is exported. :BZ: 1726457 :customerscenario: true """ - product = make_product( - { - 'organization-id': module_org.id, - 'name': gen_string('alpha'), - } - ) - nonyum_repo = make_repository( - { - 'content-type': 'docker', - 'docker-upstream-name': 'quay/busybox', - 'organization-id': module_org.id, - 'product-id': product['id'], - 'url': CONTAINER_REGISTRY_HUB, - }, - ) - Repository.synchronize({'id': nonyum_repo['id']}) - yum_repo = make_repository( - { - 'name': gen_string('alpha'), - 'download-policy': 'immediate', - 'mirror-on-sync': 'no', - 'product-id': product['id'], - } - ) - Repository.synchronize({'id': yum_repo['id']}) - content_view = make_content_view({'organization-id': module_org.id}) - # Add docker and yum repo - ContentView.add_repository( - { - 'id': content_view['id'], - 'organization-id': module_org.id, - 'repository-id': nonyum_repo['id'], - } - ) - ContentView.add_repository( - { - 'id': content_view['id'], - 'organization-id': module_org.id, - 'repository-id': yum_repo['id'], - } + content_view = target_sat.cli_factory.make_content_view( + {'organization-id': function_org.id} + ) + repos = [ + function_synced_custom_repo, + function_synced_file_repo, + function_synced_docker_repo, + function_synced_AC_repo, + ] + for repo in repos: + target_sat.cli.ContentView.add_repository( + { + 'id': content_view['id'], + 'organization-id': function_org.id, + 'repository-id': repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + exporting_cv = target_sat.cli.ContentView.info({'id': content_view['id']}) + assert len(exporting_cv['versions']) == 1 + exporting_cvv = target_sat.cli.ContentView.version_info( + {'id': exporting_cv['versions'][0]['id']} ) - ContentView.publish({'id': content_view['id']}) - exporting_cv_id = ContentView.info({'id': content_view['id']}) - assert len(exporting_cv_id['versions']) == 1 - exporting_cvv_id = exporting_cv_id['versions'][0] + assert len(exporting_cvv['repositories']) == len(repos) # check packages - exported_packages = Package.list({'content-view-version-id': exporting_cvv_id['id']}) + exported_packages = target_sat.cli.Package.list( + {'content-view-version-id': exporting_cvv['id']} + ) assert len(exported_packages) # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export cv - ContentExport.completeVersion( - {'id': exporting_cvv_id['id'], 'organization-id': module_org.id} + target_sat.cli.ContentExport.completeVersion( + {'id': exporting_cvv['id'], 'organization-id': function_org.id} ) # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) != '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != '' + + @pytest.mark.tier3 + def test_postive_export_import_cv_with_mixed_content_syncable( + self, + export_import_cleanup_function, + target_sat, + function_org, + function_synced_custom_repo, + function_synced_file_repo, + function_import_org, + ): + """Export and import CV with mixed content in the syncable format. + + :id: cb1aecac-d48a-4154-9ca7-71788674148f + + :setup: + 1. Synced repositories of syncable-supported content types: yum, file + + :steps: + 1. Create CV, add all setup repos and publish. + 2. Export CV version contents in syncable format. + 3. Import the syncable export, check the content. + + :expectedresults: + 1. Export succeeds and content is exported. + 2. Import succeeds, content is imported and matches the export. + """ + # Create CV, add all setup repos and publish + cv = target_sat.cli_factory.make_content_view({'organization-id': function_org.id}) + repos = [ + function_synced_custom_repo, + function_synced_file_repo, + ] + for repo in repos: + target_sat.cli.ContentView.add_repository( + { + 'id': cv['id'], + 'organization-id': function_org.id, + 'repository-id': repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': cv['id']}) + exporting_cv = target_sat.cli.ContentView.info({'id': cv['id']}) + exporting_cvv = target_sat.cli.ContentView.version_info( + {'id': exporting_cv['versions'][0]['id']} + ) + exported_packages = target_sat.cli.Package.list( + {'content-view-version-id': exporting_cvv['id']} + ) + exported_files = target_sat.cli.File.list({'content-view-version-id': exporting_cvv['id']}) + + # Export CV version contents in syncable format + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + export = target_sat.cli.ContentExport.completeVersion( + {'id': exporting_cvv['id'], 'organization-id': function_org.id, 'format': 'syncable'} + ) + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != '' + + # Import the syncable export + import_path = target_sat.move_pulp_archive(function_org, export['message']) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + importing_cv = target_sat.cli.ContentView.info( + {'name': exporting_cv['name'], 'organization-id': function_import_org.id} + ) + assert all( + [exporting_cv[key] == importing_cv[key] for key in ['label', 'name']] + ), 'Imported CV name/label does not match the export' + assert ( + len(exporting_cv['versions']) == len(importing_cv['versions']) == 1 + ), 'CV versions count does not match' + + importing_cvv = target_sat.cli.ContentView.version_info( + {'id': importing_cv['versions'][0]['id']} + ) + assert ( + len(exporting_cvv['repositories']) == len(importing_cvv['repositories']) == len(repos) + ), 'Repositories count does not match' + + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv['id']} + ) + imported_files = target_sat.cli.File.list({'content-view-version-id': importing_cvv['id']}) + assert exported_packages == imported_packages, 'Imported RPMs do not match the export' + assert exported_files == imported_files, 'Imported Files do not match the export' @pytest.mark.tier3 - def test_postive_import_export_cv_with_file_content( - self, target_sat, config_export_import_settings, export_import_cleanup_module, module_org + def test_postive_export_import_cv_with_file_content( + self, + target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_org, + function_synced_file_repo, + function_import_org, ): """Exporting and Importing cv with file content :id: d00739f0-dedf-4303-8929-889dc23260a4 - :steps: + :setup: + 1. Product with synced file-type repository. - 1. Create custom product and custom repo with file type - 2. Sync repo - 3. Create cv and add file repo created in step 1 and publish - 4. Export cv and import cv into another satellite - 5. Check imported cv has files in it + :steps: + 1. Create CV, add the file repo and publish. + 2. Export the CV and import it into another organization. + 3. Check the imported CV has files in it. - :expectedresults: Imported cv should have the files present in the cv of - the imported system + :expectedresults: + 1. Imported CV should have the files present. :BZ: 1995827 :customerscenario: true """ - # setup custom repo + # Create CV, add the file repo and publish. cv_name = import_cv_name = gen_string('alpha') - product = make_product({'organization-id': module_org.id}) - file_repo = make_repository( - { - 'organization-id': module_org.id, - 'product-id': product['id'], - 'content-type': 'file', - 'url': settings.repos.file_type_repo.url, - } + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_org.id} ) - Repository.synchronize({'id': file_repo['id']}) - # create cv and publish - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': file_repo['id'], + 'organization-id': function_org.id, + 'repository-id': function_synced_file_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - exporting_cv_id = ContentView.info({'id': cv['id']}) - assert len(exporting_cv_id['versions']) == 1 - exporting_cvv_id = exporting_cv_id['versions'][0]['id'] + target_sat.cli.ContentView.publish({'id': cv['id']}) + exporting_cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(exporting_cv['versions']) == 1 + exporting_cvv_id = exporting_cv['versions'][0]['id'] # check files - exported_files = File.list({'content-view-version-id': exporting_cvv_id}) + exported_files = target_sat.cli.File.list({'content-view-version-id': exporting_cvv_id}) assert len(exported_files) # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - # Export cv - export = ContentExport.completeVersion( - {'id': exporting_cvv_id, 'organization-id': module_org.id} + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + # Export the CV + export = target_sat.cli.ContentExport.completeVersion( + {'id': exporting_cvv_id, 'organization-id': function_org.id} ) - import_path = target_sat.move_pulp_archive(module_org, export['message']) - - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # check that files are present in import_path + import_path = target_sat.move_pulp_archive(function_org, export['message']) + # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import files and verify content - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cvv = ContentView.info( - {'name': import_cv_name, 'organization-id': importing_org['id']} + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': import_cv_name, 'organization-id': function_import_org.id} )['versions'] assert len(importing_cvv) >= 1 - imported_files = File.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_files = target_sat.cli.File.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_files) assert len(exported_files) == len(imported_files) + @pytest.mark.tier2 + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) + def test_positive_export_rerun_failed_import( + self, + target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_synced_rh_repo, + function_sca_manifest_org, + function_import_org_with_manifest, + ): + """Verify that import can be rerun successfully after failed import. + + :id: 73e7cece-9a93-4203-9c2c-813d5a8d7700 + + :parametrized: yes + + :setup: + 1. Enabled and synced RH repository. + + :steps: + 1. Create CV, add repo from the setup, publish it and run export. + 2. Start import of the CV into another organization and kill it before it's done. + 3. Rerun the import again, let it finish and check the CVV was imported. + + :expectedresults: + 1. First import should fail, no CVV should be added. + 2. Second import should succeed without errors and should contain the CVV. + + :CaseImportance: Medium + + :BZ: 2058905 + + :customerscenario: true + """ + # Create CV and publish + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} + ) + target_sat.cli.ContentView.add_repository( + { + 'id': cv['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 1 + cvv = cv['versions'][0] + # Verify export directory is empty + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' + # Export the CV + export = target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': function_sca_manifest_org.id} + ) + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) + assert target_sat.execute(f'ls {import_path}').stdout != '' + # Run the import asynchronously + task_id = target_sat.cli.ContentImport.version( + { + 'organization-id': function_import_org_with_manifest.id, + 'path': import_path, + 'async': True, + } + )['id'] + # Wait for the CV creation on import and make the import fail + wait_for( + lambda: target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + ) + ) + target_sat.cli.Service.restart() + sleep(30) + # Assert that the initial import task did not succeed and CVV was removed + assert ( + target_sat.api.ForemanTask() + .search( + query={'search': f'Actions::Katello::ContentViewVersion::Import and id = {task_id}'} + )[0] + .result + != 'success' + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + )['versions'] + assert len(importing_cvv) == 0 + # Rerun the import and let it finish + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + )['versions'] + assert len(importing_cvv) == 1 + @pytest.mark.tier3 - def test_postive_import_export_ansible_collection_repo( + @pytest.mark.skip_if_open("BZ:2262379") + def test_postive_export_import_ansible_collection_repo( self, target_sat, config_export_import_settings, export_import_cleanup_function, function_org, + function_import_org, ): """Exporting and Importing library with ansible collection :id: 71dd1e1a-caad-48be-a180-206c8aa78639 :steps: + 1. Create custom product and custom repo with ansible collection. + 2. Sync the repo. + 3. Export library and import into another satellite. + 4. Check imported library has ansible collection in it. - 1. Create custom product and custom repo with ansible collection - 2. Sync repo - 3. Export library and import into another satellite - 4. Check imported library has ansible collection in it - - :expectedresults: Imported library should have the ansible collection present in the - imported product + :expectedresults: + 1. Imported library should have the ansible collection present in the imported product. """ # setup ansible_collection product and repo - export_product = make_product({'organization-id': function_org.id}) - ansible_repo = make_repository( + export_product = target_sat.cli_factory.make_product({'organization-id': function_org.id}) + ansible_repo = target_sat.cli_factory.make_repository( { 'organization-id': function_org.id, 'product-id': export_product['id'], @@ -1701,26 +1618,24 @@ def test_postive_import_export_ansible_collection_repo( { name: theforeman.operations, version: "0.1.0"} ]}', } ) - Repository.synchronize({'id': ansible_repo['id']}) + target_sat.cli.Repository.synchronize({'id': ansible_repo['id']}) # Export library - export = ContentExport.completeLibrary({'organization-id': function_org.id}) + export = target_sat.cli.ContentExport.completeLibrary({'organization-id': function_org.id}) import_path = target_sat.move_pulp_archive(function_org, export['message']) - - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - - # check that files are present in import_path + # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import files and verify content - ContentImport.library({'organization-id': importing_org['id'], 'path': import_path}) - assert Product.list({'organization-id': importing_org['id']}) - import_product = Product.info( + target_sat.cli.ContentImport.library( + {'organization-id': function_import_org.id, 'path': import_path} + ) + assert target_sat.cli.Product.list({'organization-id': function_import_org.id}) + import_product = target_sat.cli.Product.info( { - 'organization-id': importing_org['id'], - 'id': Product.list({'organization-id': importing_org['id']})[0]['id'], + 'organization-id': function_import_org.id, + 'id': target_sat.cli.Product.list({'organization-id': function_import_org.id})[0][ + 'id' + ], } ) assert import_product['name'] == export_product['name'] @@ -1728,90 +1643,140 @@ def test_postive_import_export_ansible_collection_repo( assert import_product['content'][0]['content-type'] == "ansible_collection" @pytest.mark.tier3 - def test_negative_import_redhat_cv_without_manifest( + def test_postive_export_import_repo_with_GPG( self, - export_import_cleanup_function, - config_export_import_settings, - function_entitlement_manifest_org, target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_org, + function_synced_custom_repo, + function_import_org, ): - """Redhat content can't be imported into satellite/organization without manifest + """Test export and import of a repository with GPG key - :id: b0f5f95b-3f9f-4827-84f1-b66517dc34f1 + :id: a5b455aa-e87e-4ae5-a1c7-4c8e6c7f7af5 - :steps: + :setup: + 1. Product with synced custom repository. - 1. Enable product and repository with redhat contents. - 2. Sync the repository. - 3. Create CV with above product and publish. - 4. Export CV version contents to a directory - 5. Import those contents to other org without manifest. + :steps: + 1. Create a GPG key and add it to the setup repository. + 2. Export the repository and import it into another organization. :expectedresults: + 1. Export and import succeeds without any errors. + 2. GPG key is imported to the importing org too. - 1. Import fails with message "Could not import the archive.: - No manifest found. Import a manifest with the appropriate subscriptions before - importing content." + :CaseImportance: Medium + + :BZ: 2178645, 2090390 + :customerscenario: true """ - # Setup rhel repo - cv_name = gen_string('alpha') - RepositorySet.enable( + # Create a GPG key and add it to the setup repository. + gpg_key = target_sat.api.GPGKey( + organization=function_org, + content=DataFile.VALID_GPG_KEY_FILE.read_text(), + ).create() + target_sat.cli.Repository.update( + {'id': function_synced_custom_repo.id, 'gpg-key-id': gpg_key.id} + ) + # Export the repository and import it into another organization. + export = target_sat.cli.ContentExport.completeRepository( + {'id': function_synced_custom_repo.id} + ) + import_path = target_sat.move_pulp_archive(function_org, export['message']) + target_sat.cli.ContentImport.repository( { - 'basearch': 'x86_64', - 'name': REPOSET['rhva6'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], - 'releasever': '6Server', + 'organization-id': function_import_org.id, + 'path': import_path, } ) - repo = Repository.info( + # Check the imported repo has the GPG key assigned. + imported_repo = target_sat.cli.Repository.info( { - 'name': REPOS['rhva6']['name'], - 'organization-id': function_entitlement_manifest_org.id, - 'product': PRDS['rhel'], + 'name': function_synced_custom_repo.name, + 'product': function_synced_custom_repo.product.name, + 'organization-id': function_import_org.id, } ) - # Update the download policy to 'immediate' and sync - Repository.update({'download-policy': 'immediate', 'id': repo['id']}) - Repository.synchronize({'id': repo['id']}, timeout=7200000) + assert int(imported_repo['content-counts']['packages']) + assert imported_repo['gpg-key']['name'] == gpg_key.name + # Check the GPG key is imported to the importing org too. + imported_gpg = target_sat.cli.ContentCredential.info( + {'organization-id': function_import_org.id, 'name': gpg_key.name} + ) + assert imported_gpg + assert imported_gpg['content'] == gpg_key.content + + @pytest.mark.tier3 + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) + def test_negative_import_redhat_cv_without_manifest( + self, + target_sat, + export_import_cleanup_function, + config_export_import_settings, + function_sca_manifest_org, + function_synced_rh_repo, + ): + """Redhat content can't be imported into satellite/organization without manifest + + :id: b0f5f95b-3f9f-4827-84f1-b66517dc34f1 + + :parametrized: yes + + :setup: + 1. Enabled and synced RH repository. + + :steps: + 1. Create CV with the RH repo and publish. + 2. Export CV version contents to a directory. + 3. Import those contents to other org without manifest. + + :expectedresults: + 1. Import fails with message "Could not import the archive.: + No manifest found. Import a manifest with the appropriate subscriptions before + importing content." + """ # Create cv and publish - cv = make_content_view( - {'name': cv_name, 'organization-id': function_entitlement_manifest_org.id} + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': function_entitlement_manifest_org.id, - 'repository-id': repo['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], } ) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 1 cvv = cv['versions'][0] # Verify export directory is empty - assert ( - target_sat.validate_pulp_filepath(function_entitlement_manifest_org, PULP_EXPORT_DIR) - == '' - ) + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': function_entitlement_manifest_org.id} - ) - import_path = target_sat.move_pulp_archive( - function_entitlement_manifest_org, export['message'] + export = target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': function_sca_manifest_org.id} ) + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) # check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # importing portion - importing_org = make_org() + importing_org = target_sat.cli_factory.make_org() # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) + target_sat.cli.Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) with pytest.raises(CLIReturnCodeError) as error: - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) + target_sat.cli.ContentImport.version( + {'organization-id': importing_org['id'], 'path': import_path} + ) assert ( 'Could not import the archive.:\n No manifest found. Import a manifest with the ' 'appropriate subscriptions before importing content.' @@ -1820,27 +1785,31 @@ def test_negative_import_redhat_cv_without_manifest( @pytest.mark.tier2 def test_positive_import_content_for_disconnected_sat_with_existing_content( self, + target_sat, class_export_entities, config_export_import_settings, - target_sat, module_org, + function_import_org, ): """Import a content view into a disconnected satellite for an existing content view :id: 22c077dc-0041-4c6c-9da5-fd58e5497ae8 - :steps: + :setup: + 1. Product with synced custom repository, published in a CV. - 1. Sync a few repos - 2. Create a cv with the repo from 1 - 3. Run complete export - 4. On Disconnected satellite, create a cv with same name as cv on 2 and with - 'import-only' selected - 5. run import command + :steps: + 1. Run complete export of the CV from setup. + 2. On Disconnected satellite, create a CV with the same name as setup CV and with + 'import-only' set to False and run the import command. + 3. On Disconnected satellite, create a CV with the same name as setup CV and with + 'import-only' set to True and run the import command. - :expectedresults: Import should run successfully + :expectedresults: + 1. Import should fail with correct message when existing CV has 'import-only' set False. + 2. Import should succeed when existing CV has 'import-only' set True. - :bz: 2030101 + :BZ: 2030101 :customerscenario: true """ @@ -1849,389 +1818,848 @@ def test_positive_import_content_for_disconnected_sat_with_existing_content( # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': export_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) - # importing portion - importing_org = make_org() - # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) - # check that files are present in import_path + # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import section - # Create cv with 'import-only' set to true - make_content_view( - {'name': export_cv_name, 'import-only': True, 'organization-id': importing_org['id']} + # Create cv with 'import-only' set to False + cv = target_sat.cli_factory.make_content_view( + { + 'name': export_cv_name, + 'import-only': False, + 'organization-id': function_import_org.id, + } + ) + with pytest.raises(CLIReturnCodeError) as error: + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + assert ( + f"Unable to import in to Content View specified in the metadata - '{export_cv_name}'. " + "The 'import_only' attribute for the content view is set to false. To mark this " + "Content View as importable, have your system administrator run the following command " + f"on the server. \n foreman-rake katello:set_content_view_import_only ID={cv.id}" + ) in error.value.message + target_sat.cli.ContentView.remove({'id': cv.id, 'destroy-content-view': 'yes'}) + + # Create cv with 'import-only' set to True + target_sat.cli_factory.make_content_view( + {'name': export_cv_name, 'import-only': True, 'organization-id': function_import_org.id} + ) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} ) - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cvv = ContentView.info( - {'name': export_cv_name, 'organization-id': importing_org['id']} + importing_cvv = target_sat.cli.ContentView.info( + {'name': export_cv_name, 'organization-id': function_import_org.id} )['versions'] assert len(importing_cvv) >= 1 - -class TestInterSatelliteSync: - """Implements InterSatellite Sync tests in CLI""" - - @pytest.mark.stubbed @pytest.mark.tier3 - def test_negative_export_cv(self): - """Export whole CV version contents is aborted due to insufficient - memory. - - :id: 4fa58c0c-95d2-45f5-a7fc-c5f3312a989c - - :steps: Attempt to Export whole CV version contents to a directory - which has less memory available than contents size. - - :expectedresults: The export CV version contents has been aborted due - to insufficient memory. - - :CaseAutomation: NotAutomated + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) + def test_positive_export_incremental_syncable_check_content( + self, + target_sat, + export_import_cleanup_function, + config_export_import_settings, + function_sca_manifest_org, + function_synced_rh_repo, + ): + """Export complete and incremental CV version in syncable format and assert that all + files referenced in the repomd.xml (including productid) are present in the exports. - :CaseLevel: System - """ + :id: 6ff771cd-39ef-4865-8ae8-629f4baf5f98 - @pytest.mark.stubbed - @pytest.mark.tier3 - @pytest.mark.upgrade - def test_positive_export_import_cv_iso(self): - """Export CV version contents in directory as iso and Import it. + :parametrized: yes - :id: 5c39afd4-09d6-43c5-8d50-edc98105b7db + :setup: + 1. Enabled and synced RH repository. :steps: - - 1. Export whole CV version contents as ISO - 2. Import these copied ISO to some other org/satellite. + 1. Create a CV, add the product and publish it. + 2. Export complete syncable CV version. + 3. Publish new CV version. + 4. Export incremental syncable CV version. + 5. Verify the exports contain all files listed in the repomd.xml. :expectedresults: + 1. Complete and incremental export succeed. + 2. All files referenced in the repomd.xml files are present in the exports. - 1. CV version has been exported to directory as ISO in specified in - settings. - 2. The exported ISO has been imported in org/satellite. + :BZ: 2212523 - :CaseAutomation: NotAutomated - - :CaseLevel: System + :customerscenario: true """ + # Create cv and publish + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} + ) + target_sat.cli.ContentView.add_repository( + { + 'id': cv['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rh_repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 1 + cvv = cv['versions'][0] + # Verify export directory is empty + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' + # Export complete and check the export directory + target_sat.cli.ContentExport.completeVersion({'id': cvv['id'], 'format': 'syncable'}) + assert '1.0' in target_sat.validate_pulp_filepath( + function_sca_manifest_org, PULP_EXPORT_DIR + ) + # Publish new CV version, export incremental and check the export directory + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 2 + cvv = max(cv['versions'], key=lambda x: int(x['id'])) + target_sat.cli.ContentExport.incrementalVersion({'id': cvv['id'], 'format': 'syncable'}) + assert '2.0' in target_sat.validate_pulp_filepath( + function_sca_manifest_org, PULP_EXPORT_DIR + ) + # Verify that the content referenced in repomd.xml files is present in both exports + repomd_files = target_sat.execute( + f'find {PULP_EXPORT_DIR}{function_sca_manifest_org.name}/{cv_name}/ -name repomd.xml' + ).stdout.splitlines() + assert len(repomd_files) == 2, 'Unexpected count of exports identified.' + for repomd in repomd_files: + repodata_dir = os.path.split(repomd)[0] + repomd_refs = set( + target_sat.execute( + f'''grep -oP '(?<= pkg_cnt_1 + export_2 = target_sat.cli.ContentExport.incrementalVersion({'id': cvv_2['id']}) + assert '2.0' in target_sat.validate_pulp_filepath( + function_sca_manifest_org, PULP_EXPORT_DIR + ) - :CaseLevel: System - """ + # Import version 2, check the package count. + import_path2 = target_sat.move_pulp_archive(function_sca_manifest_org, export_2['message']) + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path2} + ) + imp_cv = target_sat.cli.ContentView.info( + {'name': exp_cv['name'], 'organization-id': function_import_org_with_manifest.id} + ) + assert len(imp_cv['versions']) == 2 + imp_cvv = max(imp_cv['versions'], key=lambda x: int(x['id'])) + assert ( + target_sat.api.ContentViewVersion(id=imp_cvv['id']).read().package_count + == pkg_cnt_2 + == int(function_synced_rh_repo['content-counts']['packages']) + ), 'Unexpected package count after second import' + + # Check the package count available to install on the content host. + res = rhel_contenthost.execute('dnf clean all && dnf repolist -v') + assert res.status == 0 + assert ( + f'Repo-available-pkgs: {pkg_cnt_2}' in res.stdout + ), 'Package count available on the host did not meet the expectation' - @pytest.mark.stubbed - @pytest.mark.tier3 - @pytest.mark.upgrade - def test_positive_export_import_redhat_mix_cv(self): - """Export CV version having Red Hat and custom repo in directory - and Import them. + # Install the package. + res = rhel_contenthost.execute(f'dnf -y install {filtered_pkg}') + assert res.status == 0, f'Installation from the import failed:\n{res.stdout}' - :id: a38cf67d-563c-46f0-a263-4825b26faf2b - :steps: +@pytest.fixture(scope='module') +def module_downstream_sat(module_satellite_host): + """Provides Downstream Satellite.""" + module_satellite_host.cli.Settings.set( + {'name': 'subscription_connection_enabled', 'value': 'No'} + ) + return module_satellite_host + + +@pytest.fixture +def function_downstream_org(module_downstream_sat, function_sca_manifest): + """Provides organization with manifest on Downstream Satellite.""" + org = module_downstream_sat.api.Organization().create() + module_downstream_sat.upload_manifest(org.id, function_sca_manifest.content) + return org + + +def _set_downstream_org( + downstream_sat, + upstream_sat, + downstream_org, + upstream_org='Default_Organization', + username=settings.server.admin_username, + password=settings.server.admin_password, + lce_label=None, + cv_label=None, +): + """Configures Downstream organization to sync from particular Upstream organization. + + :param downstream_sat: Downstream Satellite instance. + :param upstream_sat: Upstream Satellite instance. + :param downstream_org: Downstream organization to be configured. + :param upstream_org: Upstream organization to sync CDN content from, + default: Default_Organization + :param username: Username for authentication, default: admin username from settings. + :param password: Password for authentication, default: admin password from settings. + :param lce_label: Upstream Lifecycle Environment, default: Library + :param cv_label: Upstream Content View Label, default: Default_Organization_View. + :return: True if succeeded. + """ + # Create Content Credentials with Upstream Satellite's katello-server-ca.crt. + crt_file = f'{upstream_sat.hostname}.crt' + downstream_sat.execute( + f'curl -o {crt_file} http://{upstream_sat.hostname}/pub/katello-server-ca.crt' + ) + cc = downstream_sat.cli.ContentCredential.create( + { + 'name': upstream_sat.hostname, + 'organization-id': downstream_org.id, + 'path': crt_file, + 'content-type': 'cert', + } + ) + # Set the CDN configuration to Network Sync. + res = downstream_sat.cli.Org.configure_cdn( + { + 'id': downstream_org.id, + 'type': 'network_sync', + 'url': f'https://{upstream_sat.hostname}/', + 'username': username, + 'password': password, + 'upstream-organization-label': upstream_org.label, + 'upstream-lifecycle-environment-label': lce_label, + 'upstream-content-view-label': cv_label, + 'ssl-ca-credential-id': cc['id'], + } + ) + return 'Updated CDN configuration' in res - 1. Export whole CV version having mixed repos to a path accessible - over HTTP. - 2. Import the Red Hat repository by defining the CDN URL from the - exported HTTP URL. - 3. Import custom repo by creating new repo and setting yum repo url - to exported HTTP url. - :expectedresults: Both custom and Red Hat repos are imported - successfully. +class TestNetworkSync: + """Implements Network Sync scenarios.""" - :CaseAutomation: NotAutomated + @pytest.mark.tier2 + @pytest.mark.parametrize( + 'function_synced_rh_repo', + ['rhae2'], + indirect=True, + ) + def test_positive_network_sync_rh_repo( + self, + target_sat, + function_sca_manifest_org, + function_synced_rh_repo, + module_downstream_sat, + function_downstream_org, + ): + """Sync a RH repo from Upstream to Downstream Satellite - :CaseLevel: System - """ + :id: fdb58c18-0a64-418b-990d-2233381fee8f - @pytest.mark.stubbed - @pytest.mark.tier3 - @pytest.mark.upgrade - def test_positive_install_package_from_imported_repos(self): - """Install packages in client from imported repo of Downstream satellite. + :parametrized: yes - :id: a81ffb55-398d-4ad0-bcae-5ed48f504ded + :setup: + 1. Enabled and synced RH yum repository at Upstream Sat. + 2. Organization with manifest at Downstream Sat. :steps: + 1. Set the Downstream org to sync from Upstream org. + 2. Enable and sync the repository from Upstream to Downstream. - 1. Export whole Red Hat YUM repo to a path accessible over HTTP. - 2. Import the Red Hat repository by defining the CDN URL from the - exported HTTP URL. - 3. In downstream satellite create CV, AK with this imported repo. - 4. Register/Subscribe a client with a downstream satellite. - 5. Attempt to install a package on a client from imported repo of - downstream. + :expectedresults: + 1. Repository can be enabled and synced. - :expectedresults: The package is installed on client from imported repo - of downstream satellite. + :BZ: 2213128 - :CaseAutomation: NotAutomated + :customerscenario: true - :CaseLevel: System """ + assert _set_downstream_org( + downstream_sat=module_downstream_sat, + upstream_sat=target_sat, + downstream_org=function_downstream_org, + upstream_org=function_sca_manifest_org, + ), 'Downstream org configuration failed' + + # Enable and sync the repository. + reposet = module_downstream_sat.cli.RepositorySet.list( + { + 'organization-id': function_downstream_org.id, + 'search': f'content_label={function_synced_rh_repo["content-label"]}', + } + ) + assert ( + len(reposet) == 1 + ), f'Expected just one reposet for "{function_synced_rh_repo["content-label"]}"' + res = module_downstream_sat.cli.RepositorySet.enable( + { + 'organization-id': function_downstream_org.id, + 'id': reposet[0]['id'], + 'basearch': DEFAULT_ARCHITECTURE, + } + ) + assert 'Repository enabled' in str(res), 'Repository enable failed' + + repos = module_downstream_sat.cli.Repository.list( + {'organization-id': function_downstream_org.id} + ) + assert len(repos) == 1, 'Expected 1 repo enabled' + repo = repos[0] + module_downstream_sat.cli.Repository.synchronize({'id': repo['id']}) + + repo = module_downstream_sat.cli.Repository.info({'id': repo['id']}) + assert 'Success' in repo['sync']['status'], 'Sync did not succeed' + assert ( + repo['content-counts'] == function_synced_rh_repo['content-counts'] + ), 'Content counts do not match' diff --git a/tests/foreman/cli/test_settings.py b/tests/foreman/cli/test_settings.py index 8e0c6144d35..fdd005e02e8 100644 --- a/tests/foreman/cli/test_settings.py +++ b/tests/foreman/cli/test_settings.py @@ -4,34 +4,30 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Settings :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random from time import sleep import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.settings import Settings from robottelo.config import settings -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import generate_strings_list -from robottelo.utils.datafactory import invalid_boolean_strings -from robottelo.utils.datafactory import invalid_emails_list -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_emails_list -from robottelo.utils.datafactory import valid_url_list -from robottelo.utils.datafactory import xdist_adapter +from robottelo.exceptions import CLIReturnCodeError +from robottelo.utils.datafactory import ( + gen_string, + generate_strings_list, + invalid_boolean_strings, + invalid_emails_list, + valid_data_list, + valid_emails_list, + valid_url_list, + xdist_adapter, +) @pytest.mark.stubbed @@ -49,7 +45,7 @@ def test_negative_update_hostname_with_empty_fact(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['discovery_prefix'], indirect=True) -def test_positive_update_hostname_prefix_without_value(setting_update): +def test_positive_update_hostname_prefix_without_value(setting_update, module_target_sat): """Update the Hostname_prefix settings without any string(empty values) :id: a84c28ea-6821-4c31-b4ab-8662c22c9135 @@ -59,15 +55,14 @@ def test_positive_update_hostname_prefix_without_value(setting_update): :BZ: 1470083 :expectedresults: Error should be raised on setting empty value for discovery_prefix setting - """ with pytest.raises(CLIReturnCodeError): - Settings.set({'name': "discovery_prefix", 'value': ""}) + module_target_sat.cli.Settings.set({'name': "discovery_prefix", 'value': ""}) @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['discovery_prefix'], indirect=True) -def test_positive_update_hostname_default_prefix(setting_update): +def test_positive_update_hostname_default_prefix(setting_update, module_target_sat): """Update the default set prefix of hostname_prefix setting :id: a6e46e53-6273-406a-8009-f184d9551d66 @@ -75,11 +70,10 @@ def test_positive_update_hostname_default_prefix(setting_update): :parametrized: yes :expectedresults: Default set prefix should be updated with new value - """ hostname_prefix_value = gen_string('alpha') - Settings.set({'name': "discovery_prefix", 'value': hostname_prefix_value}) - discovery_prefix = Settings.list({'search': 'name=discovery_prefix'})[0] + module_target_sat.cli.Settings.set({'name': "discovery_prefix", 'value': hostname_prefix_value}) + discovery_prefix = module_target_sat.cli.Settings.list({'search': 'name=discovery_prefix'})[0] assert hostname_prefix_value == discovery_prefix['value'] @@ -114,7 +108,7 @@ def test_negative_discover_host_with_invalid_prefix(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text(setting_update): +def test_positive_update_login_page_footer_text(setting_update, module_target_sat): """Updates parameter "login_text" in settings :id: 4d4e1151-5bd6-4fa2-8dbb-e182b43ad7ec @@ -127,17 +121,16 @@ def test_positive_update_login_page_footer_text(setting_update): :parametrized: yes :expectedresults: Parameter is updated successfully - """ login_text_value = random.choice(list(valid_data_list().values())) - Settings.set({'name': "login_text", 'value': login_text_value}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': login_text_value}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text["value"] == login_text_value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text_without_value(setting_update): +def test_positive_update_login_page_footer_text_without_value(setting_update, module_target_sat): """Updates parameter "login_text" without any string (empty value) :id: 01ce95de-2994-42b6-b9f8-f7882981fb69 @@ -150,16 +143,15 @@ def test_positive_update_login_page_footer_text_without_value(setting_update): :parametrized: yes :expectedresults: Message on login screen should be removed - """ - Settings.set({'name': "login_text", 'value': ""}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': ""}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text['value'] == '' @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text_with_long_string(setting_update): +def test_positive_update_login_page_footer_text_with_long_string(setting_update, module_target_sat): """Attempt to update parameter "Login_page_footer_text" with long length string under General tab @@ -178,8 +170,8 @@ def test_positive_update_login_page_footer_text_with_long_string(setting_update) login_text_value = random.choice( list(generate_strings_list(length=1000, exclude_types=['latin1', 'utf8', 'cjk', 'html'])) ) - Settings.set({'name': "login_text", 'value': login_text_value}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': login_text_value}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text['value'] == login_text_value @@ -212,7 +204,7 @@ def test_positive_update_email_delivery_method_smtp(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['delivery_method'], indirect=True) -def test_positive_update_email_delivery_method_sendmail(setting_update): +def test_positive_update_email_delivery_method_sendmail(setting_update, module_target_sat): """Check Updating Sendmail params through settings subcommand :id: 578de898-fde2-4957-b39a-9dd059f490bf @@ -236,13 +228,13 @@ def test_positive_update_email_delivery_method_sendmail(setting_update): 'sendmail_location': '/usr/sbin/sendmail', } for key, value in sendmail_config_params.items(): - Settings.set({'name': f'{key}', 'value': f'{value}'}) - assert Settings.list({'search': f'name={key}'})[0]['value'] == value + module_target_sat.cli.Settings.set({'name': f'{key}', 'value': f'{value}'}) + assert module_target_sat.cli.Settings.list({'search': f'name={key}'})[0]['value'] == value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_reply_address'], indirect=True) -def test_positive_update_email_reply_address(setting_update): +def test_positive_update_email_reply_address(setting_update, module_target_sat): """Check email reply address is updated :id: cb0907d1-9cb6-45c4-b2bb-e2790ea55f16 @@ -257,8 +249,8 @@ def test_positive_update_email_reply_address(setting_update): """ email_address = random.choice(list(valid_emails_list())) email_address = email_address.replace('"', r'\"').replace('`', r'\`') - Settings.set({'name': "email_reply_address", 'value': email_address}) - email_reply_address = Settings.list( + module_target_sat.cli.Settings.set({'name': "email_reply_address", 'value': email_address}) + email_reply_address = module_target_sat.cli.Settings.list( {'search': 'name=email_reply_address'}, output_format='json' )[0] updated_email_address = ( @@ -269,7 +261,7 @@ def test_positive_update_email_reply_address(setting_update): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_reply_address'], indirect=True) -def test_negative_update_email_reply_address(setting_update): +def test_negative_update_email_reply_address(setting_update, module_target_sat): """Check email reply address is not updated :id: 2a2220c2-badf-47d5-ba3f-e6329930ab39 @@ -286,12 +278,14 @@ def test_negative_update_email_reply_address(setting_update): """ invalid_email_address = random.choice(list(invalid_emails_list())) with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'email_reply_address', 'value': invalid_email_address}) + module_target_sat.cli.Settings.set( + {'name': 'email_reply_address', 'value': invalid_email_address} + ) @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_subject_prefix'], indirect=True) -def test_positive_update_email_subject_prefix(setting_update): +def test_positive_update_email_subject_prefix(setting_update, module_target_sat): """Check email subject prefix is updated :id: c8e6b323-7b39-43d6-a9f1-5474f920bba2 @@ -305,14 +299,18 @@ def test_positive_update_email_subject_prefix(setting_update): :CaseImportance: Low """ email_subject_prefix_value = gen_string('alpha') - Settings.set({'name': "email_subject_prefix", 'value': email_subject_prefix_value}) - email_subject_prefix = Settings.list({'search': 'name=email_subject_prefix'})[0] + module_target_sat.cli.Settings.set( + {'name': "email_subject_prefix", 'value': email_subject_prefix_value} + ) + email_subject_prefix = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] assert email_subject_prefix_value == email_subject_prefix['value'] @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_subject_prefix'], indirect=True) -def test_negative_update_email_subject_prefix(setting_update): +def test_negative_update_email_subject_prefix(setting_update, module_target_sat): """Check email subject prefix is not updated :id: 8a638596-248f-4196-af36-ad2982196382 @@ -327,18 +325,26 @@ def test_negative_update_email_subject_prefix(setting_update): :CaseImportance: Low """ - email_subject_prefix_original = Settings.list({'search': 'name=email_subject_prefix'})[0] + email_subject_prefix_original = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] email_subject_prefix_value = gen_string('alpha', 256) with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'email_subject_prefix', 'value': email_subject_prefix_value}) - email_subject_prefix = Settings.list({'search': 'name=email_subject_prefix'})[0] + module_target_sat.cli.Settings.set( + {'name': 'email_subject_prefix', 'value': email_subject_prefix_value} + ) + email_subject_prefix = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] assert email_subject_prefix == email_subject_prefix_original @pytest.mark.tier2 @pytest.mark.parametrize('send_welcome_email_value', ["true", "false"]) @pytest.mark.parametrize('setting_update', ['send_welcome_email'], indirect=True) -def test_positive_update_send_welcome_email(setting_update, send_welcome_email_value): +def test_positive_update_send_welcome_email( + setting_update, send_welcome_email_value, module_target_sat +): """Check email send welcome email is updated :id: cdaf6cd0-5eea-4252-87c5-f9ec3ba79ac1 @@ -353,15 +359,19 @@ def test_positive_update_send_welcome_email(setting_update, send_welcome_email_v :CaseImportance: Low """ - Settings.set({'name': 'send_welcome_email', 'value': send_welcome_email_value}) - host_value = Settings.list({'search': 'name=send_welcome_email'})[0]['value'] + module_target_sat.cli.Settings.set( + {'name': 'send_welcome_email', 'value': send_welcome_email_value} + ) + host_value = module_target_sat.cli.Settings.list({'search': 'name=send_welcome_email'})[0][ + 'value' + ] assert send_welcome_email_value == host_value @pytest.mark.tier2 @pytest.mark.parametrize('rss_enable_value', ["true", "false"]) @pytest.mark.parametrize('setting_update', ['rss_enable'], indirect=True) -def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value): +def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value, module_target_sat): """Check if the RSS feed can be enabled or disabled :id: 021cefab-2629-44e2-a30d-49c944d0a234 @@ -374,14 +384,14 @@ def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value): :CaseAutomation: Automated """ - Settings.set({'name': 'rss_enable', 'value': rss_enable_value}) - rss_setting = Settings.list({'search': 'name=rss_enable'})[0] + module_target_sat.cli.Settings.set({'name': 'rss_enable', 'value': rss_enable_value}) + rss_setting = module_target_sat.cli.Settings.list({'search': 'name=rss_enable'})[0] assert rss_setting["value"] == rss_enable_value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['rss_url'], indirect=True) -def test_positive_update_rssfeed_url(setting_update): +def test_positive_update_rssfeed_url(setting_update, module_target_sat): """Check if the RSS feed URL is updated :id: 166ff6f2-e36e-4934-951f-b947139d0d73 @@ -399,14 +409,14 @@ def test_positive_update_rssfeed_url(setting_update): :CaseAutomation: Automated """ test_url = random.choice(list(valid_url_list())) - Settings.set({'name': 'rss_url', 'value': test_url}) - updated_url = Settings.list({'search': 'name=rss_url'})[0] + module_target_sat.cli.Settings.set({'name': 'rss_url', 'value': test_url}) + updated_url = module_target_sat.cli.Settings.list({'search': 'name=rss_url'})[0] assert updated_url['value'] == test_url @pytest.mark.parametrize('value', **xdist_adapter(invalid_boolean_strings())) @pytest.mark.tier2 -def test_negative_update_send_welcome_email(value): +def test_negative_update_send_welcome_email(value, module_target_sat): """Check email send welcome email is updated :id: 2f75775d-72a1-4b2f-86c2-98c36e446099 @@ -424,7 +434,7 @@ def test_negative_update_send_welcome_email(value): :CaseImportance: Low """ with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'send_welcome_email', 'value': value}) + module_target_sat.cli.Settings.set({'name': 'send_welcome_email', 'value': value}) @pytest.mark.tier3 @@ -446,8 +456,6 @@ def test_positive_failed_login_attempts_limit(setting_update, target_sat): :CaseImportance: Critical - :CaseLevel: System - :parametrized: yes :expectedresults: failed_login_attempts_limit works as expected @@ -459,11 +467,11 @@ def test_positive_failed_login_attempts_limit(setting_update, target_sat): username = settings.server.admin_username password = settings.server.admin_password assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 0 - Settings.set({'name': 'failed_login_attempts_limit', 'value': '5'}) + target_sat.cli.Settings.set({'name': 'failed_login_attempts_limit', 'value': '5'}) for _ in range(5): assert target_sat.execute(f'hammer -u {username} -p BAD_PASS user list').status == 129 assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 129 sleep(301) assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 0 - Settings.set({'name': 'failed_login_attempts_limit', 'value': '0'}) - assert Settings.info({'name': 'failed_login_attempts_limit'})['value'] == '0' + target_sat.cli.Settings.set({'name': 'failed_login_attempts_limit', 'value': '0'}) + assert target_sat.cli.Settings.info({'name': 'failed_login_attempts_limit'})['value'] == '0' diff --git a/tests/foreman/cli/test_sso.py b/tests/foreman/cli/test_sso.py index d70a56a139f..949e021ff48 100644 --- a/tests/foreman/cli/test_sso.py +++ b/tests/foreman/cli/test_sso.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/foreman/cli/test_subnet.py b/tests/foreman/cli/test_subnet.py index 90ec40d3d05..74a6e1727fc 100644 --- a/tests/foreman/cli/test_subnet.py +++ b/tests/foreman/cli/test_subnet.py @@ -4,35 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Networking :Team: Rocket -:TestType: Functional - :CaseImportance: Medium -:Upstream: No """ import random import re +from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_netmask import pytest -from fauxfactory import gen_choice -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr - -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_domain -from robottelo.cli.factory import make_subnet -from robottelo.cli.subnet import Subnet + from robottelo.constants import SUBNET_IPAM_TYPES -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError +from robottelo.utils.datafactory import ( + filtered_datapoint, + parametrized, + valid_data_list, +) @filtered_datapoint @@ -74,7 +65,7 @@ def invalid_missing_attributes(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_CRUD(): +def test_positive_CRUD(module_target_sat): """Create, update and delete subnet :id: d74a52a7-df56-44ef-89a3-081c14e81e43 @@ -85,16 +76,16 @@ def test_positive_CRUD(): """ name = gen_choice(list(valid_data_list().values())) pool = sorted(valid_addr_pools()[0]) - mask = '255.255.255.0' + mask = gen_netmask() # generate pool range from network address network = gen_ipaddr() from_ip = re.sub(r'\d+$', str(pool[0]), network) to_ip = re.sub(r'\d+$', str(pool[1]), network) domains_amount = random.randint(2, 3) - domains = [make_domain() for _ in range(domains_amount)] + domains = [module_target_sat.cli_factory.make_domain() for _ in range(domains_amount)] gateway = gen_ipaddr(ip3=True) ipam_type = SUBNET_IPAM_TYPES['dhcp'] - subnet = make_subnet( + subnet = module_target_sat.cli_factory.make_subnet( { 'name': name, 'from': from_ip, @@ -107,7 +98,7 @@ def test_positive_CRUD(): } ) # Check if Subnet can be listed - subnets_ids = [subnet_['id'] for subnet_ in Subnet.list()] + subnets_ids = [subnet_['id'] for subnet_ in module_target_sat.cli.Subnet.list()] assert subnet['id'] in subnets_ids assert subnet['name'] == name assert subnet['start-of-ip-range'] == from_ip @@ -123,11 +114,11 @@ def test_positive_CRUD(): pool = sorted(valid_addr_pools()[0]) # generate pool range from network address new_network = gen_ipaddr() - new_mask = '255.255.192.0' + new_mask = gen_netmask() ip_from = re.sub(r'\d+$', str(pool[0]), new_network) ip_to = re.sub(r'\d+$', str(pool[1]), new_network) ipam_type = SUBNET_IPAM_TYPES['internal'] - Subnet.update( + module_target_sat.cli.Subnet.update( { 'new-name': new_name, 'from': ip_from, @@ -139,7 +130,7 @@ def test_positive_CRUD(): 'domain-ids': "", # delete domains needed for subnet delete } ) - subnet = Subnet.info({'id': subnet['id']}) + subnet = module_target_sat.cli.Subnet.info({'id': subnet['id']}) assert subnet['name'] == new_name assert subnet['start-of-ip-range'] == ip_from assert subnet['end-of-ip-range'] == ip_to @@ -148,14 +139,14 @@ def test_positive_CRUD(): assert ipam_type in subnet['ipam'] # delete subnet - Subnet.delete({'id': subnet['id']}) + module_target_sat.cli.Subnet.delete({'id': subnet['id']}) with pytest.raises(CLIReturnCodeError): - Subnet.info({'id': subnet['id']}) + module_target_sat.cli.Subnet.info({'id': subnet['id']}) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_missing_attributes())) -def test_negative_create_with_attributes(options): +def test_negative_create_with_attributes(options, module_target_sat): """Create subnet with invalid or missing required attributes :id: de468dd3-7ba8-463e-881a-fd1cb3cfc7b6 @@ -167,13 +158,13 @@ def test_negative_create_with_attributes(options): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError, match='Could not create the subnet:'): - make_subnet(options) + module_target_sat.cli_factory.make_subnet(options) @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.parametrize('pool', **parametrized(invalid_addr_pools())) -def test_negative_create_with_address_pool(pool): +def test_negative_create_with_address_pool(pool, module_target_sat): """Create subnet with invalid address pool range :parametrized: yes @@ -184,20 +175,19 @@ def test_negative_create_with_address_pool(pool): :CaseImportance: Medium """ - mask = '255.255.255.0' + mask = gen_netmask() network = gen_ipaddr() - opts = {'mask': mask, 'network': network} + options = {'mask': mask, 'network': network} # generate pool range from network address for key, val in pool.items(): - opts[key] = re.sub(r'\d+$', str(val), network) - with pytest.raises(CLIFactoryError) as raise_ctx: - make_subnet(opts) - assert 'Could not create the subnet:' in str(raise_ctx.value) + options[key] = re.sub(r'\d+$', str(val), network) + with pytest.raises(CLIFactoryError, match='Could not create the subnet:'): + module_target_sat.cli_factory.make_subnet(options) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_missing_attributes())) -def test_negative_update_attributes(options): +def test_negative_update_attributes(request, options, module_target_sat): """Update subnet with invalid or missing required attributes :parametrized: yes @@ -208,19 +198,20 @@ def test_negative_update_attributes(options): :CaseImportance: Medium """ - subnet = make_subnet() + subnet = module_target_sat.cli_factory.make_subnet() + request.addfinalizer(lambda: module_target_sat.cli.Subnet.delete({'id': subnet['id']})) options['id'] = subnet['id'] with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): - Subnet.update(options) - # check - subnet is not updated - result = Subnet.info({'id': subnet['id']}) - for key in options.keys(): - assert subnet[key] == result[key] + module_target_sat.cli.Subnet.update(options) + # check - subnet is not updated + result = module_target_sat.cli.Subnet.info({'id': subnet['id']}) + for key in ['name', 'network-addr', 'network-mask']: + assert subnet[key] == result[key] @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_addr_pools())) -def test_negative_update_address_pool(options): +def test_negative_update_address_pool(request, options, module_target_sat): """Update subnet with invalid address pool :parametrized: yes @@ -231,15 +222,16 @@ def test_negative_update_address_pool(options): :CaseImportance: Medium """ - subnet = make_subnet() + subnet = module_target_sat.cli_factory.make_subnet() + request.addfinalizer(lambda: module_target_sat.cli.Subnet.delete({'id': subnet['id']})) opts = {'id': subnet['id']} # generate pool range from network address for key, val in options.items(): opts[key] = re.sub(r'\d+$', str(val), subnet['network-addr']) with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): - Subnet.update(opts) + module_target_sat.cli.Subnet.update(opts) # check - subnet is not updated - result = Subnet.info({'id': subnet['id']}) + result = module_target_sat.cli.Subnet.info({'id': subnet['id']}) for key in ['start-of-ip-range', 'end-of-ip-range']: assert result[key] == subnet[key] diff --git a/tests/foreman/cli/test_subscription.py b/tests/foreman/cli/test_subscription.py index 4628cb02ca3..c28ba08e61a 100644 --- a/tests/foreman/cli/test_subscription.py +++ b/tests/foreman/cli/test_subscription.py @@ -4,43 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: SubscriptionManagement :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +from manifester import Manifester from nailgun import entities +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_activation_key -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.host import Host -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.subscription import Subscription -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.config import settings +from robottelo.constants import EXPIRED_MANIFEST, PRDS, REPOS, REPOSET, DataFile +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.run_in_one_thread] @pytest.fixture(scope='module') -def golden_ticket_host_setup(request, module_sca_manifest_org): - new_product = make_product({'organization-id': module_sca_manifest_org.id}) - new_repo = make_repository({'product-id': new_product['id']}) - Repository.synchronize({'id': new_repo['id']}) - new_ak = make_activation_key( +def golden_ticket_host_setup(request, module_sca_manifest_org, module_target_sat): + new_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_sca_manifest_org.id} + ) + new_repo = module_target_sat.cli_factory.make_repository({'product-id': new_product['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + return module_target_sat.cli_factory.make_activation_key( { 'lifecycle-environment': 'Library', 'content-view': 'Default Organization View', @@ -48,11 +38,10 @@ def golden_ticket_host_setup(request, module_sca_manifest_org): 'auto-attach': False, } ) - return new_ak @pytest.mark.tier1 -def test_positive_manifest_upload(function_entitlement_manifest_org): +def test_positive_manifest_upload(function_entitlement_manifest_org, module_target_sat): """upload manifest :id: e5a0e4f8-fed9-4896-87a0-ac33f6baa227 @@ -62,12 +51,14 @@ def test_positive_manifest_upload(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_manifest_delete(function_entitlement_manifest_org): +def test_positive_manifest_delete(function_entitlement_manifest_org, module_target_sat): """Delete uploaded manifest :id: 01539c07-00d5-47e2-95eb-c0fd4f39090f @@ -76,14 +67,20 @@ def test_positive_manifest_delete(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - Subscription.delete_manifest({'organization-id': function_entitlement_manifest_org.id}) - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.Subscription.delete_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): +def test_positive_enable_manifest_reposet(function_entitlement_manifest_org, module_target_sat): """enable repository set :id: cc0f8f40-5ea6-4fa7-8154-acdc2cb56b45 @@ -91,12 +88,12 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): :expectedresults: you are able to enable and synchronize repository contained in a manifest - :CaseLevel: Integration - :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - RepositorySet.enable( + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.RepositorySet.enable( { 'basearch': 'x86_64', 'name': REPOSET['rhva6'], @@ -105,7 +102,7 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): 'releasever': '6Server', } ) - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': REPOS['rhva6']['name'], 'organization-id': function_entitlement_manifest_org.id, @@ -115,7 +112,7 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): @pytest.mark.tier3 -def test_positive_manifest_history(function_entitlement_manifest_org): +def test_positive_manifest_history(function_entitlement_manifest_org, module_target_sat): """upload manifest and check history :id: 000ab0a0-ec1b-497a-84ff-3969a965b52c @@ -125,14 +122,14 @@ def test_positive_manifest_history(function_entitlement_manifest_org): :CaseImportance: Medium """ org = function_entitlement_manifest_org - Subscription.list({'organization-id': org.id}, per_page=None) - history = Subscription.manifest_history({'organization-id': org.id}) + module_target_sat.cli.Subscription.list({'organization-id': org.id}, per_page=None) + history = module_target_sat.cli.Subscription.manifest_history({'organization-id': org.id}) assert f'{org.name} file imported successfully.' in ''.join(history) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_manifest_refresh(function_entitlement_manifest_org): +def test_positive_manifest_refresh(function_entitlement_manifest_org, module_target_sat): """upload manifest and refresh :id: 579bbbf7-11cf-4d78-a3b1-16d73bd4ca57 @@ -141,13 +138,19 @@ def test_positive_manifest_refresh(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - Subscription.refresh_manifest({'organization-id': function_entitlement_manifest_org.id}) - Subscription.delete_manifest({'organization-id': function_entitlement_manifest_org.id}) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.Subscription.refresh_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) + module_target_sat.cli.Subscription.delete_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) @pytest.mark.tier2 -def test_positive_subscription_list(function_entitlement_manifest_org): +def test_positive_subscription_list(function_entitlement_manifest_org, module_target_sat): """Verify that subscription list contains start and end date :id: 4861bcbc-785a-436d-98ce-14cfef7d6907 @@ -160,11 +163,11 @@ def test_positive_subscription_list(function_entitlement_manifest_org): :CaseImportance: Medium """ - subscription_list = Subscription.list( + subscription_list = module_target_sat.cli.Subscription.list( {'organization-id': function_entitlement_manifest_org.id}, per_page=False ) for column in ['start-date', 'end-date']: - assert column in subscription_list[0].keys() + assert column in subscription_list[0] @pytest.mark.tier2 @@ -193,14 +196,14 @@ def test_positive_delete_manifest_as_another_user(target_sat, function_entitleme ).create() # use the first admin to upload a manifest target_sat.put(f'{function_entitlement_manifest.path}', f'{function_entitlement_manifest.name}') - Subscription.with_user(username=user1.login, password=user1_password).upload( + target_sat.cli.Subscription.with_user(username=user1.login, password=user1_password).upload( {'file': f'{function_entitlement_manifest.name}', 'organization-id': f'{org.id}'} ) # try to search and delete the manifest with another admin - Subscription.with_user(username=user2.login, password=user2_password).delete_manifest( - {'organization-id': org.id} - ) - assert len(Subscription.list({'organization-id': org.id})) == 0 + target_sat.cli.Subscription.with_user( + username=user2.login, password=user2_password + ).delete_manifest({'organization-id': org.id}) + assert len(target_sat.cli.Subscription.list({'organization-id': org.id})) == 0 @pytest.mark.tier2 @@ -270,8 +273,49 @@ def test_positive_auto_attach_disabled_golden_ticket( rhel7_contenthost_class.install_katello_ca(target_sat) rhel7_contenthost_class.register_contenthost(module_org.label, golden_ticket_host_setup['name']) assert rhel7_contenthost_class.subscribed - host = Host.list({'search': rhel7_contenthost_class.hostname}) + host = target_sat.cli.Host.list({'search': rhel7_contenthost_class.hostname}) host_id = host[0]['id'] with pytest.raises(CLIReturnCodeError) as context: - Host.subscription_auto_attach({'host-id': host_id}) + target_sat.cli.Host.subscription_auto_attach({'host-id': host_id}) assert "This host's organization is in Simple Content Access mode" in str(context.value) + + +def test_negative_check_katello_reimport(target_sat, function_org): + """Verify katello:reimport trace should not fail with an TypeError + + :id: b7508a1c-7798-4649-83a3-cf94c7409c96 + + :steps: + 1. Import expired manifest & refresh + 2. Delete expired manifest + 3. Re-import new valid manifest & refresh + + :expectedresults: There should not be an error after reimport manifest + + :customerscenario: true + + :BZ: 2225534, 2253621 + """ + remote_path = f'/tmp/{EXPIRED_MANIFEST}' + target_sat.put(DataFile.EXPIRED_MANIFEST_FILE, remote_path) + # Import expired manifest & refresh + target_sat.cli.Subscription.upload({'organization-id': function_org.id, 'file': remote_path}) + with pytest.raises(CLIReturnCodeError): + target_sat.cli.Subscription.refresh_manifest({'organization-id': function_org.id}) + exec_val = target_sat.execute( + 'grep -i "Katello::HttpErrors::BadRequest: This Organization\'s subscription ' + 'manifest has expired. Please import a new manifest" /var/log/foreman/production.log' + ) + assert exec_val.status + # Delete expired manifest + target_sat.cli.Subscription.delete_manifest({'organization-id': function_org.id}) + # Re-import new manifest & refresh + manifester = Manifester(manifest_category=settings.manifest.golden_ticket) + manifest = manifester.get_manifest() + target_sat.upload_manifest(function_org.id, manifest.content) + ret_val = target_sat.cli.Subscription.refresh_manifest({'organization-id': function_org.id}) + assert 'Candlepin job status: SUCCESS' in ret_val + # Additional check, katello:reimport trace should not fail with TypeError + trace_output = target_sat.execute("foreman-rake katello:reimport --trace") + assert 'TypeError: no implicit conversion of String into Integer' not in trace_output.stdout + assert trace_output.status == 0 diff --git a/tests/foreman/cli/test_syncplan.py b/tests/foreman/cli/test_syncplan.py index d0ddf18d081..8669cd49a18 100644 --- a/tests/foreman/cli/test_syncplan.py +++ b/tests/foreman/cli/test_syncplan.py @@ -4,45 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SyncPlans :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta from time import sleep -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_product -from robottelo.cli.factory import make_repository -from robottelo.cli.factory import make_sync_plan -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.syncplan import SyncPlan -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import invalid_values_list -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import ( + filtered_datapoint, + invalid_values_list, + parametrized, + valid_data_list, +) -SYNC_DATE_FMT = '%Y-%m-%d %H:%M:%S' +SYNC_DATE_FMT = '%Y-%m-%d %H:%M:%S UTC' @filtered_datapoint @@ -112,7 +98,7 @@ def validate_task_status(sat, repo_id, org_id, max_tries=10): ) -def validate_repo_content(repo, content_types, after_sync=True): +def validate_repo_content(sat, repo, content_types, after_sync=True): """Check whether corresponding content is present in repository before or after synchronization is performed @@ -122,7 +108,7 @@ def validate_repo_content(repo, content_types, after_sync=True): :param bool after_sync: Specify whether you perform validation before synchronization procedure is happened or after """ - repo = Repository.info({'id': repo['id']}) + repo = sat.cli.Repository.info({'id': repo['id']}) for content in content_types: count = int(repo['content-counts'][content]) assert count > 0 if after_sync else count == 0 @@ -130,7 +116,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Check if syncplan can be created with random names :id: dc0a86f7-4219-427e-92fd-29352dbdbfce @@ -141,14 +127,16 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - sync_plan = make_sync_plan({'enabled': 'false', 'name': name, 'organization-id': module_org.id}) - result = SyncPlan.info({'id': sync_plan['id']}) + sync_plan = module_target_sat.cli_factory.sync_plan( + {'enabled': 'false', 'name': name, 'organization-id': module_org.id} + ) + result = module_target_sat.cli.SyncPlan.info({'id': sync_plan['id']}) assert result['name'] == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, desc): +def test_positive_create_with_description(module_org, desc, module_target_sat): """Check if syncplan can be created with random description :id: a1bbe81b-60f5-4a19-b400-a02a23fa1dfa @@ -159,16 +147,16 @@ def test_positive_create_with_description(module_org, desc): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = module_target_sat.cli_factory.sync_plan( {'enabled': 'false', 'description': desc, 'organization-id': module_org.id} ) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['description'] == desc @pytest.mark.parametrize('test_data', **parametrized(valid_name_interval_create_tests())) @pytest.mark.tier1 -def test_positive_create_with_interval(module_org, test_data): +def test_positive_create_with_interval(module_org, test_data, module_target_sat): """Check if syncplan can be created with varied intervals :id: 32eb0c1d-0c9a-4fb5-a185-68d0d705fbce @@ -179,7 +167,7 @@ def test_positive_create_with_interval(module_org, test_data): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = module_target_sat.cli_factory.sync_plan( { 'enabled': 'false', 'interval': test_data['interval'], @@ -187,14 +175,14 @@ def test_positive_create_with_interval(module_org, test_data): 'organization-id': module_org.id, } ) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['name'] == test_data['name'] assert result['interval'] == test_data['interval'] @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_name(module_org, name): +def test_negative_create_with_name(module_org, name, module_target_sat): """Check if syncplan can be created with random invalid names :id: 4c1aee35-271e-4ed8-9369-d2abfea8cfd9 @@ -206,12 +194,14 @@ def test_negative_create_with_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError, match='Could not create the sync plan:'): - make_sync_plan({'enabled': 'false', 'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.sync_plan( + {'enabled': 'false', 'name': name, 'organization-id': module_org.id} + ) @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_org, new_desc): +def test_positive_update_description(module_org, new_desc, module_target_sat): """Check if syncplan description can be updated :id: 00a279cd-1f49-4ebb-a59a-6f0b4e4cb83c @@ -220,9 +210,11 @@ def test_positive_update_description(module_org, new_desc): :expectedresults: Sync plan is created and description is updated """ - new_sync_plan = make_sync_plan({'enabled': 'false', 'organization-id': module_org.id}) - SyncPlan.update({'description': new_desc, 'id': new_sync_plan['id']}) - result = SyncPlan.info({'id': new_sync_plan['id']}) + new_sync_plan = module_target_sat.cli_factory.sync_plan( + {'enabled': 'false', 'organization-id': module_org.id} + ) + module_target_sat.cli.SyncPlan.update({'description': new_desc, 'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['description'] == new_desc @@ -239,7 +231,7 @@ def test_positive_update_interval(module_org, test_data, request, target_sat): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'false', 'interval': test_data['interval'], @@ -249,8 +241,10 @@ def test_positive_update_interval(module_org, test_data, request, target_sat): ) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - SyncPlan.update({'id': new_sync_plan['id'], 'interval': test_data['new-interval']}) - result = SyncPlan.info({'id': new_sync_plan['id']}) + target_sat.cli.SyncPlan.update( + {'id': new_sync_plan['id'], 'interval': test_data['new-interval']} + ) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['interval'] == test_data['new-interval'] @@ -270,7 +264,7 @@ def test_positive_update_sync_date(module_org, request, target_sat): # Set the sync date to today/right now today = datetime.now() sync_plan_name = gen_string('alphanumeric') - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'name': sync_plan_name, 'sync-date': today.strftime(SYNC_DATE_FMT), @@ -284,9 +278,11 @@ def test_positive_update_sync_date(module_org, request, target_sat): # Set sync date 5 days in the future future_date = today + timedelta(days=5) # Update sync interval - SyncPlan.update({'id': new_sync_plan['id'], 'sync-date': future_date.strftime(SYNC_DATE_FMT)}) + target_sat.cli.SyncPlan.update( + {'id': new_sync_plan['id'], 'sync-date': future_date.strftime(SYNC_DATE_FMT)} + ) # Fetch it - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['start-date'] != new_sync_plan['start-date'] assert datetime.strptime(result['start-date'], '%Y/%m/%d %H:%M:%S') > datetime.strptime( new_sync_plan['start-date'], '%Y/%m/%d %H:%M:%S' @@ -296,7 +292,7 @@ def test_positive_update_sync_date(module_org, request, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_by_id(module_org, name): +def test_positive_delete_by_id(module_org, module_target_sat, name): """Check if syncplan can be created and deleted :id: b5d97c6b-aead-422b-8d9f-4a192bbe4a3b @@ -307,10 +303,12 @@ def test_positive_delete_by_id(module_org, name): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan({'name': name, 'organization-id': module_org.id}) - SyncPlan.delete({'id': new_sync_plan['id']}) + new_sync_plan = module_target_sat.cli_factory.sync_plan( + {'name': name, 'organization-id': module_org.id} + ) + module_target_sat.cli.SyncPlan.delete({'id': new_sync_plan['id']}) with pytest.raises(CLIReturnCodeError): - SyncPlan.info({'id': new_sync_plan['id']}) + module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) @pytest.mark.tier1 @@ -323,16 +321,16 @@ def test_positive_info_enabled_field_is_displayed(module_org, request, target_sa :CaseImportance: Critical """ - new_sync_plan = make_sync_plan({'organization-id': module_org.id}) + new_sync_plan = target_sat.cli_factory.sync_plan({'organization-id': module_org.id}) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result.get('enabled') is not None @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_info_with_assigned_product(module_org): +def test_positive_info_with_assigned_product(module_org, module_target_sat): """Verify that sync plan info command returns list of products which are assigned to that sync plan @@ -346,12 +344,10 @@ def test_positive_info_with_assigned_product(module_org): :BZ: 1390545 :CaseImportance: Critical - - :CaseLevel: Integration """ prod1 = gen_string('alpha') prod2 = gen_string('alpha') - sync_plan = make_sync_plan( + sync_plan = module_target_sat.cli_factory.sync_plan( { 'enabled': 'false', 'organization-id': module_org.id, @@ -359,9 +355,13 @@ def test_positive_info_with_assigned_product(module_org): } ) for prod_name in [prod1, prod2]: - product = make_product({'organization-id': module_org.id, 'name': prod_name}) - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': sync_plan['id']}) - updated_plan = SyncPlan.info({'id': sync_plan['id']}) + product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'name': prod_name} + ) + module_target_sat.cli.Product.set_sync_plan( + {'id': product['id'], 'sync-plan-id': sync_plan['id']} + ) + updated_plan = module_target_sat.cli.SyncPlan.info({'id': sync_plan['id']}) assert len(updated_plan['products']) == 2 assert {prod['name'] for prod in updated_plan['products']} == {prod1, prod2} @@ -377,10 +377,8 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, :expectedresults: Repository was not synchronized :BZ: 1279539 - - :CaseLevel: System """ - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -389,9 +387,9 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, ) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=2) @@ -408,14 +406,12 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, :expectedresults: Product is synchronized successfully. :BZ: 1279539 - - :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - new_sync_plan = make_sync_plan( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'interval': 'hourly', @@ -428,7 +424,7 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -437,8 +433,8 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): - validate_task_status(repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -446,8 +442,8 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, ) sleep(delay * 3 / 4) # Verify product was synced successfully - validate_task_status(repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_task_status(target_sat, repo['id'], module_org.id) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier4 @@ -460,19 +456,17 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques :expectedresults: Product is synchronized successfully. - :CaseLevel: System - :BZ: 1655595 """ cron_multiple = 5 # sync event is on every multiple of this value, starting from 00 mins delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 180 # do not start test less than 3 mins before the next sync event - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -485,9 +479,9 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Verify product is not synced and doesn't have any content - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -497,7 +491,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -506,7 +500,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier4 @@ -519,21 +513,23 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque :expectedresults: Products are synchronized successfully. - :CaseLevel: System - :BZ: 1655595 """ cron_multiple = 5 # sync event is on every multiple of this value, starting from 00 mins delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 210 # do not start test less than 3.5 mins before the next sync event - products = [make_product({'organization-id': module_org.id}) for _ in range(2)] + products = [ + target_sat.cli_factory.make_product({'organization-id': module_org.id}) for _ in range(2) + ] repos = [ - make_repository({'product-id': product['id']}) for product in products for _ in range(2) + target_sat.cli_factory.make_repository({'product-id': product['id']}) + for product in products + for _ in range(2) ] # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -552,10 +548,12 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque ) for repo in repos: with pytest.raises(AssertionError): - validate_task_status(repo['id'], module_org.id, max_tries=1) + validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) # Associate sync plan with products for product in products: - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan( + {'id': product['id'], 'sync-plan-id': new_sync_plan['id']} + ) # Wait fifth of expected time logger.info( f"Waiting {(delay / 5)} seconds to check products {products[0]['name']}" @@ -565,7 +563,7 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque # Verify products have not been synced yet for repo in repos: with pytest.raises(AssertionError): - validate_task_status(repo['id'], module_org.id, max_tries=1) + validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) # Wait the rest of expected time logger.info( f"Waiting {(delay * 4 / 5)} seconds to check product {products[0]['name']}" @@ -574,8 +572,8 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque sleep(delay * 4 / 5) # Verify products were synced successfully for repo in repos: - validate_task_status(repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_task_status(target_sat, repo['id'], module_org.id) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.run_in_one_thread @@ -593,13 +591,11 @@ def test_positive_synchronize_rh_product_past_sync_date( :expectedresults: Product is synchronized successfully. :BZ: 1279539 - - :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 org = function_entitlement_manifest_org - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], 'organization-id': org.id, @@ -608,11 +604,11 @@ def test_positive_synchronize_rh_product_past_sync_date( 'basearch': 'x86_64', } ) - product = Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) - repo = Repository.info( + product = target_sat.cli.Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) + repo = target_sat.cli.Repository.info( {'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org.id} ) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'interval': 'hourly', @@ -625,7 +621,7 @@ def test_positive_synchronize_rh_product_past_sync_date( sync_plan = entities.SyncPlan(organization=org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -634,8 +630,8 @@ def test_positive_synchronize_rh_product_past_sync_date( sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): - validate_task_status(repo['id'], org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_task_status(target_sat, repo['id'], org.id, max_tries=1) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -643,8 +639,8 @@ def test_positive_synchronize_rh_product_past_sync_date( ) sleep(delay * 3 / 4) # Verify product was synced successfully - validate_task_status(repo['id'], org.id) - validate_repo_content(repo, ['errata', 'packages']) + validate_task_status(target_sat, repo['id'], org.id) + validate_repo_content(target_sat, repo, ['errata', 'packages']) @pytest.mark.run_in_one_thread @@ -660,15 +656,13 @@ def test_positive_synchronize_rh_product_future_sync_date( :expectedresults: Product is synchronized successfully. - :CaseLevel: System - :BZ: 1655595 """ cron_multiple = 5 # sync event is on every multiple of this value, starting from 00 mins delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 180 # do not start test less than 2 mins before the next sync event org = function_entitlement_manifest_org - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], 'organization-id': org.id, @@ -677,14 +671,14 @@ def test_positive_synchronize_rh_product_future_sync_date( 'basearch': 'x86_64', } ) - product = Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) - repo = Repository.info( + product = target_sat.cli.Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) + repo = target_sat.cli.Repository.info( {'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org.id} ) # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'organization-id': org.id, @@ -698,10 +692,10 @@ def test_positive_synchronize_rh_product_future_sync_date( request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): - validate_task_status(repo['id'], org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_task_status(target_sat, repo['id'], org.id, max_tries=1) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait fifth of expected time logger.info( f"Waiting {(delay / 5)} seconds to check product {product['name']}" @@ -710,8 +704,8 @@ def test_positive_synchronize_rh_product_future_sync_date( sleep(delay / 5) # Verify product has not been synced yet with pytest.raises(AssertionError): - validate_task_status(repo['id'], org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_task_status(target_sat, repo['id'], org.id, max_tries=1) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 4 / 5)} seconds to check product {product['name']}" @@ -719,8 +713,8 @@ def test_positive_synchronize_rh_product_future_sync_date( ) sleep(delay * 4 / 5) # Verify product was synced successfully - validate_task_status(repo['id'], org.id) - validate_repo_content(repo, ['errata', 'packages']) + validate_task_status(target_sat, repo['id'], org.id) + validate_repo_content(target_sat, repo, ['errata', 'packages']) @pytest.mark.tier3 @@ -733,14 +727,12 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques :id: 8d882e8b-b5c1-4449-81c6-0efd31ad75a7 :expectedresults: Product is synchronized successfully. - - :CaseLevel: System """ delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(days=1) + timedelta(seconds=delay) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'interval': 'daily', @@ -751,7 +743,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -761,7 +753,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -770,7 +762,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier3 @@ -784,14 +776,12 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque :expectedresults: Product is synchronized successfully. :BZ: 1396647 - - :CaseLevel: System """ delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(weeks=1) + timedelta(seconds=delay) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.sync_plan( { 'enabled': 'true', 'interval': 'weekly', @@ -802,7 +792,7 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -811,8 +801,8 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): - validate_task_status(repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -820,5 +810,5 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque ) sleep(delay * 3 / 4) # Verify product was synced successfully - validate_task_status(repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_task_status(target_sat, repo['id'], module_org.id) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) diff --git a/tests/foreman/cli/test_templatesync.py b/tests/foreman/cli/test_templatesync.py index c2bd4af9235..b0cc31b2003 100644 --- a/tests/foreman/cli/test_templatesync.py +++ b/tests/foreman/cli/test_templatesync.py @@ -4,29 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: TemplatesPlugin :Team: Endeavour -:TestType: Functional - -:Upstream: No """ import base64 -import pytest -import requests from fauxfactory import gen_string from nailgun import entities +import pytest +import requests -from robottelo.cli.template import Template -from robottelo.cli.template_sync import TemplateSync from robottelo.config import settings -from robottelo.constants import FOREMAN_TEMPLATE_IMPORT_URL -from robottelo.constants import FOREMAN_TEMPLATE_TEST_TEMPLATE - +from robottelo.constants import ( + FOREMAN_TEMPLATE_IMPORT_URL, + FOREMAN_TEMPLATE_TEST_TEMPLATE, +) git = settings.git @@ -50,7 +44,7 @@ def setUpClass(self, module_target_sat): """ # Check all Downloadable templates exists - if not requests.get(FOREMAN_TEMPLATE_IMPORT_URL).status_code == 200: + if requests.get(FOREMAN_TEMPLATE_IMPORT_URL).status_code != 200: pytest.fail('The foreman templates git url is not accessible') # Download the Test Template in test running folder @@ -67,7 +61,7 @@ def test_positive_import_force_locked_template( :id: b80fbfc4-bcab-4a5d-b6c1-0e22906cd8ab - :Steps: + :steps: 1. Import some of the locked template specifying the `force` parameter `false`. 2. After ensuring the template is not updated, Import same locked template @@ -82,7 +76,7 @@ def test_positive_import_force_locked_template( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - TemplateSync.imports( + target_sat.cli.TemplateSync.imports( {'repo': dir_path, 'prefix': prefix, 'organization-ids': module_org.id, 'lock': 'true'} ) ptemplate = entities.ProvisioningTemplate().search( @@ -92,11 +86,13 @@ def test_positive_import_force_locked_template( assert ptemplate[0].read().locked update_txt = 'updated a little' target_sat.execute(f"echo {update_txt} >> {dir_path}/example_template.erb") - TemplateSync.imports( + target_sat.cli.TemplateSync.imports( {'repo': dir_path, 'prefix': prefix, 'organization-id': module_org.id} ) - assert update_txt not in Template.dump({'name': f'{prefix}example template'}) - TemplateSync.imports( + assert update_txt not in target_sat.cli.Template.dump( + {'name': f'{prefix}example template'} + ) + target_sat.cli.TemplateSync.imports( { 'repo': dir_path, 'prefix': prefix, @@ -104,7 +100,7 @@ def test_positive_import_force_locked_template( 'force': 'true', } ) - assert update_txt in Template.dump({'name': f'{prefix}example template'}) + assert update_txt in target_sat.cli.Template.dump({'name': f'{prefix}example template'}) else: pytest.fail('The template is not imported for force test') @@ -125,13 +121,15 @@ def test_positive_import_force_locked_template( indirect=True, ids=['non_empty_repo'], ) - def test_positive_update_templates_in_git(self, module_org, git_repository, git_branch, url): + def test_positive_update_templates_in_git( + self, module_org, git_repository, git_branch, url, module_target_sat + ): """Assure only templates with a given filter are pushed to git repository and existing template file is updated. :id: 5b0be026-2983-4570-bc63-d9aba36fca65 - :Steps: + :steps: 1. Repository contains file with same name as exported template. 2. Export "Atomic Kickstart default" templates to git repo. @@ -158,12 +156,12 @@ def test_positive_update_templates_in_git(self, module_org, git_repository, git_ assert res.status_code == 201 # export template to git url = f'{url}/{git.username}/{git_repository["name"]}' - output = TemplateSync.exports( + output = module_target_sat.cli.TemplateSync.exports( { 'repo': url, 'branch': git_branch, 'organization-id': module_org.id, - 'filter': 'Atomic Kickstart default', + 'filter': 'User - Registered Users', 'dirname': dirname, } ).split('\n') @@ -191,14 +189,14 @@ def test_positive_update_templates_in_git(self, module_org, git_repository, git_ ids=['non_empty_repo', 'empty_repo'], ) def test_positive_export_filtered_templates_to_git( - self, module_org, git_repository, git_branch, url + self, module_org, git_repository, git_branch, url, module_target_sat ): """Assure only templates with a given filter regex are pushed to git repository. :id: fd583f85-f170-4b93-b9b1-36d72f31c31f - :Steps: + :steps: 1. Export only the templates matching with regex e.g: `^atomic.*` to git repo. :expectedresults: @@ -212,7 +210,7 @@ def test_positive_export_filtered_templates_to_git( """ dirname = 'export' url = f'{url}/{git.username}/{git_repository["name"]}' - output = TemplateSync.exports( + output = module_target_sat.cli.TemplateSync.exports( { 'repo': url, 'branch': git_branch, @@ -239,14 +237,14 @@ def test_positive_export_filtered_templates_to_temp_dir(self, module_org, target :bz: 1778177 - :Steps: Export the templates matching with regex e.g: `ansible` to /tmp directory. + :steps: Export the templates matching with regex e.g: `ansible` to /tmp directory. :expectedresults: The templates are exported /tmp directory :CaseImportance: Medium """ dir_path = '/tmp' - output = TemplateSync.exports( + output = target_sat.cli.TemplateSync.exports( {'repo': dir_path, 'organization-id': module_org.id, 'filter': 'ansible'} ).split('\n') exported_count = [row == 'Exported: true' for row in output].count(True) diff --git a/tests/foreman/cli/test_user.py b/tests/foreman/cli/test_user.py index e8fef60401d..3646e301db0 100644 --- a/tests/foreman/cli/test_user.py +++ b/tests/foreman/cli/test_user.py @@ -10,50 +10,38 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import datetime import random from time import sleep -import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_string +from fauxfactory import gen_alphanumeric, gen_string from nailgun import entities +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_filter -from robottelo.cli.factory import make_location -from robottelo.cli.factory import make_org -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.filter import Filter -from robottelo.cli.org import Org -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import LOCALES +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils import gen_ssh_keypairs -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list -from robottelo.utils.datafactory import valid_emails_list -from robottelo.utils.datafactory import valid_usernames_list +from robottelo.utils.datafactory import ( + parametrized, + valid_data_list, + valid_emails_list, + valid_usernames_list, +) class TestUser: """Implements Users tests in CLI""" @pytest.fixture(scope='module') - def module_roles(self): + def module_roles(self, module_target_sat): """ Initializes class attribute ``dct_roles`` with several random roles saved on sat. roles is a dict so keys are role's id respective value is @@ -66,14 +54,14 @@ def roles_helper(): tests """ for role_name in valid_usernames_list() + include_list: - yield make_role({'name': role_name}) + yield module_target_sat.cli_factory.make_role({'name': role_name}) - stubbed_roles = {role['id']: role for role in roles_helper()} - yield stubbed_roles + # return stubbed roles + return {role['id']: role for role in roles_helper()} @pytest.mark.parametrize('email', **parametrized(valid_emails_list())) @pytest.mark.tier2 - def test_positive_CRUD(self, email): + def test_positive_CRUD(self, email, module_target_sat): """Create User with various parameters, updating and deleting :id: 2d430243-8512-46ee-8d21-7ccf0c7af807 @@ -96,7 +84,7 @@ def test_positive_CRUD(self, email): 'mail': mail.replace('"', r'\"').replace('`', r'\`'), 'description': random.choice(list(valid_data_list().values())), } - user = make_user(user_params) + user = module_target_sat.cli_factory.user(user_params) user['firstname'], user['lastname'] = user['name'].split() user_params.pop('mail') user_params['email'] = mail @@ -104,14 +92,18 @@ def test_positive_CRUD(self, email): assert user_params[key] == user[key], f'values for key "{key}" do not match' # list by firstname and lastname - result = User.list({'search': 'firstname = {}'.format(user_params['firstname'])}) + result = module_target_sat.cli.User.list( + {'search': 'firstname = {}'.format(user_params['firstname'])} + ) # make sure user is in list result assert {user['id'], user['login'], user['name']} == { result[0]['id'], result[0]['login'], result[0]['name'], } - result = User.list({'search': 'lastname = {}'.format(user_params['lastname'])}) + result = module_target_sat.cli.User.list( + {'search': 'lastname = {}'.format(user_params['lastname'])} + ) # make sure user is in list result assert {user['id'], user['login'], user['name']} == { result[0]['id'], @@ -127,22 +119,21 @@ def test_positive_CRUD(self, email): 'description': random.choice(list(valid_data_list().values())), } user_params.update({'id': user['id']}) - User.update(user_params) - user = User.info({'login': user['login']}) + module_target_sat.cli.User.update(user_params) + user = module_target_sat.cli.User.info({'login': user['login']}) user['firstname'], user['lastname'] = user['name'].split() user_params.pop('mail') user_params['email'] = new_mail for key in user_params: assert user_params[key] == user[key], f'values for key "{key}" do not match' # delete - User.delete({'login': user['login']}) + module_target_sat.cli.User.delete({'login': user['login']}) with pytest.raises(CLIReturnCodeError): - User.info({'login': user['login']}) + module_target_sat.cli.User.info({'login': user['login']}) @pytest.mark.tier1 @pytest.mark.upgrade - @pytest.mark.build_sanity - def test_positive_CRUD_admin(self): + def test_positive_CRUD_admin(self, target_sat): """Create an Admin user :id: 0d0384ad-d85a-492e-8630-7f48912a4fd5 @@ -151,23 +142,23 @@ def test_positive_CRUD_admin(self): :CaseImportance: Critical """ - user = make_user({'admin': '1'}) + user = target_sat.cli_factory.user({'admin': '1'}) assert user['admin'] == 'yes' # update to non admin by id - User.update({'id': user['id'], 'admin': '0'}) - user = User.info({'id': user['id']}) + target_sat.cli.User.update({'id': user['id'], 'admin': '0'}) + user = target_sat.cli.User.info({'id': user['id']}) assert user['admin'] == 'no' # update back to admin by name - User.update({'login': user['login'], 'admin': '1'}) - user = User.info({'login': user['login']}) + target_sat.cli.User.update({'login': user['login'], 'admin': '1'}) + user = target_sat.cli.User.info({'login': user['login']}) assert user['admin'] == 'yes' # delete user - User.delete({'login': user['login']}) + target_sat.cli.User.delete({'login': user['login']}) with pytest.raises(CLIReturnCodeError): - User.info({'id': user['id']}) + target_sat.cli.User.info({'id': user['id']}) @pytest.mark.tier1 - def test_positive_create_with_default_loc(self): + def test_positive_create_with_default_loc(self, target_sat): """Check if user with default location can be created :id: efe7256d-8c8f-444c-8d59-43500e1319c3 @@ -176,13 +167,15 @@ def test_positive_create_with_default_loc(self): :CaseImportance: Critical """ - location = make_location() - user = make_user({'default-location-id': location['id'], 'location-ids': location['id']}) + location = target_sat.cli_factory.make_location() + user = target_sat.cli_factory.user( + {'default-location-id': location['id'], 'location-ids': location['id']} + ) assert location['name'] in user['locations'] assert location['name'] == user['default-location'] @pytest.mark.tier1 - def test_positive_create_with_defaut_org(self): + def test_positive_create_with_defaut_org(self, module_target_sat): """Check if user with default organization can be created :id: cc692b6f-2519-429b-8ecb-c4bb51ed3544 @@ -192,35 +185,38 @@ def test_positive_create_with_defaut_org(self): :CaseImportance: Critical """ - org = make_org() - user = make_user({'default-organization-id': org['id'], 'organization-ids': org['id']}) + org = module_target_sat.cli_factory.make_org() + user = module_target_sat.cli_factory.user( + {'default-organization-id': org['id'], 'organization-ids': org['id']} + ) assert org['name'] in user['organizations'] assert org['name'] == user['default-organization'] @pytest.mark.tier2 - def test_positive_create_with_orgs_and_update(self): + def test_positive_create_with_orgs_and_update(self, module_target_sat): """Create User associated to multiple Organizations, update them :id: f537296c-a8a8-45ef-8996-c1d32b8f64de :expectedresults: User is created with orgs, orgs are updated - :CaseLevel: Integration """ orgs_amount = 2 - orgs = [make_org() for _ in range(orgs_amount)] - user = make_user({'organization-ids': [org['id'] for org in orgs]}) + orgs = [module_target_sat.cli_factory.make_org() for _ in range(orgs_amount)] + user = module_target_sat.cli_factory.user({'organization-ids': [org['id'] for org in orgs]}) assert len(user['organizations']) == orgs_amount for org in orgs: assert org['name'] in user['organizations'] - orgs = [make_org() for _ in range(orgs_amount)] - User.update({'id': user['id'], 'organization-ids': [org['id'] for org in orgs]}) - user = User.info({'id': user['id']}) + orgs = [module_target_sat.cli_factory.make_org() for _ in range(orgs_amount)] + module_target_sat.cli.User.update( + {'id': user['id'], 'organization-ids': [org['id'] for org in orgs]} + ) + user = module_target_sat.cli.User.info({'id': user['id']}) for org in orgs: assert org['name'] in user['organizations'] @pytest.mark.tier1 - def test_negative_delete_internal_admin(self): + def test_negative_delete_internal_admin(self, module_target_sat): """Attempt to delete internal admin user :id: 4fc92958-9e75-4bd2-bcbe-32f906e432f5 @@ -230,11 +226,11 @@ def test_negative_delete_internal_admin(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - User.delete({'login': settings.server.admin_username}) - assert User.info({'login': settings.server.admin_username}) + module_target_sat.cli.User.delete({'login': settings.server.admin_username}) + assert module_target_sat.cli.User.info({'login': settings.server.admin_username}) @pytest.mark.tier2 - def test_positive_last_login_for_new_user(self): + def test_positive_last_login_for_new_user(self, module_target_sat): """Create new user with admin role and check last login updated for that user :id: 967282d3-92d0-42ce-9ef3-e542d2883408 @@ -245,23 +241,24 @@ def test_positive_last_login_for_new_user(self): :BZ: 1763816 - :CaseLevel: Integration """ login = gen_string('alpha') password = gen_string('alpha') org_name = gen_string('alpha') - make_user({'login': login, 'password': password}) - User.add_role({'login': login, 'role': 'System admin'}) - result_before_login = User.list({'search': f'login = {login}'}) + module_target_sat.cli_factory.user({'login': login, 'password': password}) + module_target_sat.cli.User.add_role({'login': login, 'role': 'System admin'}) + result_before_login = module_target_sat.cli.User.list({'search': f'login = {login}'}) # this is because satellite uses the UTC timezone before_login_time = datetime.datetime.utcnow() assert result_before_login[0]['login'] == login assert result_before_login[0]['last-login'] == "" - Org.with_user(username=login, password=password).create({'name': org_name}) - result_after_login = User.list({'search': f'login = {login}'}) + module_target_sat.cli.Org.with_user(username=login, password=password).create( + {'name': org_name} + ) + result_after_login = module_target_sat.cli.User.list({'search': f'login = {login}'}) # checking user last login should not be empty assert result_after_login[0]['last-login'] != "" @@ -271,12 +268,12 @@ def test_positive_last_login_for_new_user(self): assert after_login_time > before_login_time @pytest.mark.tier1 - def test_positive_update_all_locales(self): + def test_positive_update_all_locales(self, module_target_sat): """Update Language in My Account :id: f0993495-5117-461d-a116-44867b820139 - :Steps: Update current User with all different Language options + :steps: Update current User with all different Language options :expectedresults: Current User is updated @@ -284,14 +281,14 @@ def test_positive_update_all_locales(self): :CaseImportance: Critical """ - user = make_user() + user = module_target_sat.cli_factory.user() for locale in LOCALES: - User.update({'id': user['id'], 'locale': locale}) - assert locale == User.info({'id': user['id']})['locale'] + module_target_sat.cli.User.update({'id': user['id'], 'locale': locale}) + assert locale == module_target_sat.cli.User.info({'id': user['id']})['locale'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_add_and_delete_roles(self, module_roles): + def test_positive_add_and_delete_roles(self, module_roles, module_target_sat): """Add multiple roles to User, then delete them For now add-role user sub command does not allow multiple role ids @@ -305,17 +302,16 @@ def test_positive_add_and_delete_roles(self, module_roles): :expectedresults: Roles are added to user and deleted successfully - :CaseLevel: Integration """ - user = make_user() + user = module_target_sat.cli_factory.user() original_role_names = set(user['roles']) expected_role_names = set(original_role_names) for role_id, role in module_roles.items(): - User.add_role({'login': user['login'], 'role-id': role_id}) + module_target_sat.cli.User.add_role({'login': user['login'], 'role-id': role_id}) expected_role_names.add(role['name']) - user_roles = User.info({'id': user['id']})['roles'] + user_roles = module_target_sat.cli.User.info({'id': user['id']})['roles'] assert len(expected_role_names) == len(user_roles) for role in expected_role_names: assert role in user_roles @@ -323,11 +319,11 @@ def test_positive_add_and_delete_roles(self, module_roles): roles_to_remove = expected_role_names - original_role_names for role_name in roles_to_remove: user_credentials = {'login': user['login'], 'role': role_name} - User.remove_role(user_credentials) - user = User.info({'id': user['id']}) + module_target_sat.cli.User.remove_role(user_credentials) + user = module_target_sat.cli.User.info({'id': user['id']}) assert role_name not in user['roles'] - user_roles = User.info({'id': user['id']})['roles'] + user_roles = module_target_sat.cli.User.info({'id': user['id']})['roles'] assert len(original_role_names) == len(user_roles) for role in original_role_names: assert role in user_roles @@ -344,7 +340,7 @@ def module_user(self): return entities.User().create() @pytest.mark.tier1 - def test_positive_CRD_ssh_key(self, module_user): + def test_positive_CRD_ssh_key(self, module_user, module_target_sat): """SSH Key can be added to a User, listed and deletd :id: 57304fca-8e0d-454a-be31-34423345c8b2 @@ -355,13 +351,19 @@ def test_positive_CRD_ssh_key(self, module_user): :CaseImportance: Critical """ ssh_name = gen_string('alpha') - User.ssh_keys_add({'user': module_user.login, 'key': self.ssh_key, 'name': ssh_name}) - result = User.ssh_keys_list({'user-id': module_user.id}) + module_target_sat.cli.User.ssh_keys_add( + {'user': module_user.login, 'key': self.ssh_key, 'name': ssh_name} + ) + result = module_target_sat.cli.User.ssh_keys_list({'user-id': module_user.id}) assert ssh_name in [i['name'] for i in result] - result = User.ssh_keys_info({'user-id': module_user.id, 'name': ssh_name}) + result = module_target_sat.cli.User.ssh_keys_info( + {'user-id': module_user.id, 'name': ssh_name} + ) assert self.ssh_key in result[0]['public-key'] - result = User.ssh_keys_delete({'user-id': module_user.id, 'name': ssh_name}) - result = User.ssh_keys_list({'user-id': module_user.id}) + result = module_target_sat.cli.User.ssh_keys_delete( + {'user-id': module_user.id, 'name': ssh_name} + ) + result = module_target_sat.cli.User.ssh_keys_list({'user-id': module_user.id}) assert ssh_name not in [i['name'] for i in result] @pytest.mark.tier1 @@ -378,10 +380,12 @@ def test_positive_create_ssh_key_super_admin_from_file(self, target_sat): ssh_name = gen_string('alpha') result = target_sat.execute(f"echo '{self.ssh_key}' > test_key.pub") assert result.status == 0, 'key file not created' - User.ssh_keys_add({'user': 'admin', 'key-file': 'test_key.pub', 'name': ssh_name}) - result = User.ssh_keys_list({'user': 'admin'}) + target_sat.cli.User.ssh_keys_add( + {'user': 'admin', 'key-file': 'test_key.pub', 'name': ssh_name} + ) + result = target_sat.cli.User.ssh_keys_list({'user': 'admin'}) assert ssh_name in [i['name'] for i in result] - result = User.ssh_keys_info({'user': 'admin', 'name': ssh_name}) + result = target_sat.cli.User.ssh_keys_info({'user': 'admin', 'name': ssh_name}) assert self.ssh_key == result[0]['public-key'] @@ -403,13 +407,11 @@ def test_personal_access_token_admin_user(self, target_sat): 1. Should show output of the api endpoint 2. When revoked, authentication error - :CaseLevel: System - :CaseImportance: High """ - user = make_user({'admin': '1'}) + user = target_sat.cli_factory.user({'admin': '1'}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -417,7 +419,9 @@ def test_personal_access_token_admin_user(self, target_sat): command_output = target_sat.execute(curl_command) assert user['login'] in command_output.stdout assert user['email'] in command_output.stdout - User.access_token(action="revoke", options={'name': token_name, 'user-id': user['id']}) + target_sat.cli.User.access_token( + action="revoke", options={'name': token_name, 'user-id': user['id']} + ) command_output = target_sat.execute(curl_command) assert f'Unable to authenticate user {user["login"]}' in command_output.stdout @@ -439,14 +443,12 @@ def test_positive_personal_access_token_user_with_role(self, target_sat): 2. When an incorrect end point is used, missing permission should be displayed. - :CaseLevel: System - :CaseImportance: High """ - user = make_user() - User.add_role({'login': user['login'], 'role': 'Viewer'}) + user = target_sat.cli_factory.user() + target_sat.cli.User.add_role({'login': user['login'], 'role': 'Viewer'}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -473,17 +475,15 @@ def test_expired_personal_access_token(self, target_sat): :expectedresults: Authentication error - :CaseLevel: System - :CaseImportance: Medium """ - user = make_user() - User.add_role({'login': user['login'], 'role': 'Viewer'}) + user = target_sat.cli_factory.user() + target_sat.cli.User.add_role({'login': user['login'], 'role': 'Viewer'}) token_name = gen_alphanumeric() datetime_now = datetime.datetime.utcnow() datetime_expire = datetime_now + datetime.timedelta(seconds=20) datetime_expire = datetime_expire.strftime("%Y-%m-%d %H:%M:%S") - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id'], 'expires-at': datetime_expire}, ) @@ -513,26 +513,24 @@ def test_custom_personal_access_token_role(self, target_sat): :expectedresults: Non admin user is able to view only the assigned entity - :CaseLevel: System - :CaseImportance: High :BZ: 1974685, 1996048 """ - role = make_role() + role = target_sat.cli_factory.make_role() permissions = [ permission['name'] - for permission in Filter.available_permissions( + for permission in target_sat.cli.Filter.available_permissions( {'search': 'resource_type=PersonalAccessToken'} ) ] permissions = ','.join(permissions) - make_filter({'role-id': role['id'], 'permissions': permissions}) - make_filter({'role-id': role['id'], 'permissions': 'view_users'}) - user = make_user() - User.add_role({'login': user['login'], 'role': role['name']}) + target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': permissions}) + target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': 'view_users'}) + user = target_sat.cli_factory.user() + target_sat.cli.User.add_role({'login': user['login'], 'role': role['name']}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -541,8 +539,51 @@ def test_custom_personal_access_token_role(self, target_sat): ) assert user['login'] in command_output.stdout assert user['email'] in command_output.stdout - User.access_token(action="revoke", options={'name': token_name, 'user-id': user['id']}) + target_sat.cli.User.access_token( + action="revoke", options={'name': token_name, 'user-id': user['id']} + ) command_output = target_sat.execute( f'curl -k -u {user["login"]}:{token_value} {target_sat.url}/api/v2/users' ) assert f'Unable to authenticate user {user["login"]}' in command_output.stdout + + @pytest.mark.tier2 + def test_negative_personal_access_token_invalid_date(self, target_sat): + """Personal access token with invalid expire date. + + :id: 8c7c91c5-f6d9-4709-857c-6a875db41b88 + + :steps: + 1. Set the expired time to a nonsensical datetime + 2. Set the expired time to a past datetime + + :expectedresults: token is not created with invalid or past expire time + + :CaseImportance: Medium + + :BZ: 2231814 + """ + user = target_sat.cli_factory.user() + target_sat.cli.User.add_role({'login': user['login'], 'role': 'Viewer'}) + token_name = gen_alphanumeric() + # check for invalid datetime + invalid_datetimes = ['00-14-00 09:30:55', '2028-08-22 28:30:55', '0000-00-22 15:90:55'] + for datetime_expire in invalid_datetimes: + with pytest.raises(CLIReturnCodeError): + target_sat.cli.User.access_token( + action='create', + options={ + 'name': token_name, + 'user-id': user['id'], + 'expires-at': datetime_expire, + }, + ) + # check for past datetime + datetime_now = datetime.datetime.utcnow() + datetime_expire = datetime_now - datetime.timedelta(seconds=20) + datetime_expire = datetime_expire.strftime("%Y-%m-%d %H:%M:%S") + with pytest.raises(CLIReturnCodeError): + target_sat.cli.User.access_token( + action='create', + options={'name': token_name, 'user-id': user['id'], 'expires-at': datetime_expire}, + ) diff --git a/tests/foreman/cli/test_usergroup.py b/tests/foreman/cli/test_usergroup.py index 1d63f59a13c..1c757b4e766 100644 --- a/tests/foreman/cli/test_usergroup.py +++ b/tests/foreman/cli/test_usergroup.py @@ -4,44 +4,29 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_role -from robottelo.cli.factory import make_user -from robottelo.cli.factory import make_usergroup -from robottelo.cli.factory import make_usergroup_external -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.cli.task import Task -from robottelo.cli.user import User -from robottelo.cli.usergroup import UserGroup -from robottelo.cli.usergroup import UserGroupExternal +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import valid_usernames_list -@pytest.fixture(scope='function') -def function_user_group(): +@pytest.fixture +def function_user_group(target_sat): """Create new usergroup per each test""" - user_group = make_usergroup() - yield user_group + return target_sat.cli_factory.usergroup() @pytest.mark.tier1 -def test_positive_CRUD(): +def test_positive_CRUD(module_target_sat): """Create new user group with valid elements that attached group. List the user group, update and delete it. @@ -52,14 +37,14 @@ def test_positive_CRUD(): :CaseImportance: Critical """ - user = make_user() + user = module_target_sat.cli_factory.user() ug_name = random.choice(valid_usernames_list()) role_name = random.choice(valid_usernames_list()) - role = make_role({'name': role_name}) - sub_user_group = make_usergroup() + role = module_target_sat.cli_factory.make_role({'name': role_name}) + sub_user_group = module_target_sat.cli_factory.usergroup() # Create - user_group = make_usergroup( + user_group = module_target_sat.cli_factory.usergroup( { 'user-ids': user['id'], 'name': ug_name, @@ -75,24 +60,26 @@ def test_positive_CRUD(): assert user_group['user-groups'][0]['usergroup'] == sub_user_group['name'] # List - result_list = UserGroup.list({'search': 'name={}'.format(user_group['name'])}) + result_list = module_target_sat.cli.UserGroup.list( + {'search': 'name={}'.format(user_group['name'])} + ) assert len(result_list) > 0 - assert UserGroup.exists(search=('name', user_group['name'])) + assert module_target_sat.cli.UserGroup.exists(search=('name', user_group['name'])) # Update new_name = random.choice(valid_usernames_list()) - UserGroup.update({'id': user_group['id'], 'new-name': new_name}) - user_group = UserGroup.info({'id': user_group['id']}) + module_target_sat.cli.UserGroup.update({'id': user_group['id'], 'new-name': new_name}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert user_group['name'] == new_name # Delete - UserGroup.delete({'name': user_group['name']}) + module_target_sat.cli.UserGroup.delete({'name': user_group['name']}) with pytest.raises(CLIReturnCodeError): - UserGroup.info({'name': user_group['name']}) + module_target_sat.cli.UserGroup.info({'name': user_group['name']}) @pytest.mark.tier1 -def test_positive_create_with_multiple_elements(): +def test_positive_create_with_multiple_elements(module_target_sat): """Create new user group using multiple users, roles and user groups attached to that group. @@ -104,17 +91,19 @@ def test_positive_create_with_multiple_elements(): :CaseImportance: Critical """ count = 2 - users = [make_user()['login'] for _ in range(count)] - roles = [make_role()['name'] for _ in range(count)] - sub_user_groups = [make_usergroup()['name'] for _ in range(count)] - user_group = make_usergroup({'users': users, 'roles': roles, 'user-groups': sub_user_groups}) + users = [module_target_sat.cli_factory.user()['login'] for _ in range(count)] + roles = [module_target_sat.cli_factory.make_role()['name'] for _ in range(count)] + sub_user_groups = [module_target_sat.cli_factory.usergroup()['name'] for _ in range(count)] + user_group = module_target_sat.cli_factory.usergroup( + {'users': users, 'roles': roles, 'user-groups': sub_user_groups} + ) assert sorted(users) == sorted(user_group['users']) assert sorted(roles) == sorted(user_group['roles']) assert sorted(sub_user_groups) == sorted(ug['usergroup'] for ug in user_group['user-groups']) @pytest.mark.tier2 -def test_positive_add_and_remove_elements(): +def test_positive_add_and_remove_elements(module_target_sat): """Create new user group. Add and remove several element from the group. :id: a4ce8724-d3c8-4c00-9421-aaa40394134d @@ -123,20 +112,20 @@ def test_positive_add_and_remove_elements(): :expectedresults: Elements are added to user group and then removed successfully. - - :CaseLevel: Integration """ - role = make_role() - user_group = make_usergroup() - user = make_user() - sub_user_group = make_usergroup() + role = module_target_sat.cli_factory.make_role() + user_group = module_target_sat.cli_factory.usergroup() + user = module_target_sat.cli_factory.user() + sub_user_group = module_target_sat.cli_factory.usergroup() # Add elements by id - UserGroup.add_role({'id': user_group['id'], 'role-id': role['id']}) - UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) - UserGroup.add_user_group({'id': user_group['id'], 'user-group-id': sub_user_group['id']}) + module_target_sat.cli.UserGroup.add_role({'id': user_group['id'], 'role-id': role['id']}) + module_target_sat.cli.UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) + module_target_sat.cli.UserGroup.add_user_group( + {'id': user_group['id'], 'user-group-id': sub_user_group['id']} + ) - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['roles']) == 1 assert user_group['roles'][0] == role['name'] assert len(user_group['users']) == 1 @@ -145,11 +134,13 @@ def test_positive_add_and_remove_elements(): assert user_group['user-groups'][0]['usergroup'] == sub_user_group['name'] # Remove elements by name - UserGroup.remove_role({'id': user_group['id'], 'role': role['name']}) - UserGroup.remove_user({'id': user_group['id'], 'user': user['login']}) - UserGroup.remove_user_group({'id': user_group['id'], 'user-group': sub_user_group['name']}) + module_target_sat.cli.UserGroup.remove_role({'id': user_group['id'], 'role': role['name']}) + module_target_sat.cli.UserGroup.remove_user({'id': user_group['id'], 'user': user['login']}) + module_target_sat.cli.UserGroup.remove_user_group( + {'id': user_group['id'], 'user-group': sub_user_group['name']} + ) - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['roles']) == 0 assert len(user_group['users']) == 0 assert len(user_group['user-groups']) == 0 @@ -157,7 +148,7 @@ def test_positive_add_and_remove_elements(): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_user_assigned_to_usergroup(): +def test_positive_remove_user_assigned_to_usergroup(module_target_sat): """Create new user and assign it to user group. Then remove that user. :id: 2a2623ce-4723-4402-aae7-8675473fd8bd @@ -166,21 +157,19 @@ def test_positive_remove_user_assigned_to_usergroup(): :customerscenario: true - :CaseLevel: Integration - :BZ: 1667704 """ - user = make_user() - user_group = make_usergroup() - UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) - User.delete({'id': user['id']}) - user_group = UserGroup.info({'id': user_group['id']}) + user = module_target_sat.cli_factory.user() + user_group = module_target_sat.cli_factory.usergroup() + module_target_sat.cli.UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) + module_target_sat.cli.User.delete({'id': user['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert user['login'] not in user_group['users'] @pytest.mark.tier2 @pytest.mark.parametrize("ldap_auth_source", ["AD"], indirect=True) -def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): +def test_positive_automate_bz1426957(ldap_auth_source, function_user_group, target_sat): """Verify role is properly reflected on AD user. :id: 1c1209a6-5bb8-489c-a151-bb2fce4dbbfc @@ -191,11 +180,9 @@ def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): :customerscenario: true - :CaseLevel: Integration - :BZ: 1426957, 1667704 """ - ext_user_group = make_usergroup_external( + ext_user_group = target_sat.cli_factory.usergroup_external( { 'auth-source-id': ldap_auth_source[1].id, 'user-group-id': function_user_group['id'], @@ -203,23 +190,26 @@ def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): } ) assert ext_user_group['auth-source'] == ldap_auth_source[1].name - role = make_role() - UserGroup.add_role({'id': function_user_group['id'], 'role-id': role['id']}) - Task.with_user( + role = target_sat.cli_factory.make_role() + target_sat.cli.UserGroup.add_role({'id': function_user_group['id'], 'role-id': role['id']}) + target_sat.cli.Task.with_user( username=ldap_auth_source[0]['ldap_user_name'], password=ldap_auth_source[0]['ldap_user_passwd'], ).list() - UserGroupExternal.refresh({'user-group-id': function_user_group['id'], 'name': 'foobargroup'}) + target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': function_user_group['id'], 'name': 'foobargroup'} + ) assert ( - role['name'] in User.info({'login': ldap_auth_source[0]['ldap_user_name']})['user-groups'] + role['name'] + in target_sat.cli.User.info({'login': ldap_auth_source[0]['ldap_user_name']})['user-groups'] ) - User.delete({'login': ldap_auth_source[0]['ldap_user_name']}) - LDAPAuthSource.delete({'id': ldap_auth_source[1].id}) + target_sat.cli.User.delete({'login': ldap_auth_source[0]['ldap_user_name']}) + target_sat.cli.LDAPAuthSource.delete({'id': ldap_auth_source[1].id}) @pytest.mark.tier2 @pytest.mark.parametrize("ldap_auth_source", ["AD"], indirect=True) -def test_negative_automate_bz1437578(ldap_auth_source, function_user_group): +def test_negative_automate_bz1437578(ldap_auth_source, function_user_group, module_target_sat): """Verify error message on usergroup create with 'Domain Users' on AD user. :id: d4caf33e-b9eb-4281-9e04-fbe1d5b035dc @@ -228,22 +218,20 @@ def test_negative_automate_bz1437578(ldap_auth_source, function_user_group): :expectedresults: Error message as Domain Users is a special group in AD. - :CaseLevel: Integration - :BZ: 1437578 """ with pytest.raises(CLIReturnCodeError): - result = UserGroupExternal.create( + result = module_target_sat.cli.UserGroupExternal.create( { 'auth-source-id': ldap_auth_source[1].id, 'user-group-id': function_user_group['id'], 'name': 'Domain Users', } ) - assert ( - 'Could not create external user group: ' - 'Name is not found in the authentication source' - 'Name Domain Users is a special group in AD.' - ' Unfortunately, we cannot obtain membership information' - ' from a LDAP search and therefore sync it.' == result - ) + assert ( + result == 'Could not create external user group: ' + 'Name is not found in the authentication source' + 'Name Domain Users is a special group in AD.' + ' Unfortunately, we cannot obtain membership information' + ' from a LDAP search and therefore sync it.' + ) diff --git a/tests/foreman/cli/test_vm_install_products_package.py b/tests/foreman/cli/test_vm_install_products_package.py index b60bcae7c90..f03fc19e997 100644 --- a/tests/foreman/cli/test_vm_install_products_package.py +++ b/tests/foreman/cli/test_vm_install_products_package.py @@ -4,33 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker +import pytest -from robottelo.cli.factory import make_lifecycle_environment from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import DISTROS_SUPPORTED -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + DISTROS_SUPPORTED, + FAKE_0_CUSTOM_PACKAGE, +) from robottelo.hosts import ContentHost @pytest.fixture -def lce(function_entitlement_manifest_org): - return make_lifecycle_environment({'organization-id': function_entitlement_manifest_org.id}) +def lce(function_entitlement_manifest_org, target_sat): + return target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': function_entitlement_manifest_org.id} + ) @pytest.mark.tier4 diff --git a/tests/foreman/cli/test_webhook.py b/tests/foreman/cli/test_webhook.py index 110b8a08860..2849b6aa57d 100644 --- a/tests/foreman/cli/test_webhook.py +++ b/tests/foreman/cli/test_webhook.py @@ -4,33 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: HooksandWebhooks :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from functools import partial from random import choice -import pytest from box import Box from fauxfactory import gen_alphanumeric +import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.webhook import Webhook -from robottelo.constants import WEBHOOK_EVENTS -from robottelo.constants import WEBHOOK_METHODS +from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS +from robottelo.exceptions import CLIReturnCodeError -@pytest.fixture(scope='function') -def webhook_factory(request, class_org, class_location): +@pytest.fixture +def webhook_factory(request, class_org, class_location, class_target_sat): def _create_webhook(org, loc, options=None): """Function for creating a new Webhook @@ -50,21 +43,21 @@ def _create_webhook(org, loc, options=None): if options.get('target-url') is None: options['target-url'] = 'http://localhost/some-path' - return Box(Webhook.create(options)) + return Box(class_target_sat.cli.Webhook.create(options)) return partial(_create_webhook, org=class_org, loc=class_location) def assert_created(options, hook): for option in options.items(): - if not option[0] in ['event', 'organization-id', 'location-id']: + if option[0] not in ['event', 'organization-id', 'location-id']: assert hook[option[0]] == option[1] class TestWebhook: @pytest.mark.tier3 @pytest.mark.e2e - def test_positive_end_to_end(self, webhook_factory): + def test_positive_end_to_end(self, webhook_factory, class_target_sat): """Test creation, list, update and removal of webhook :id: d893d176-cbe9-421b-8631-7c7a1a462ea5 @@ -82,22 +75,28 @@ def test_positive_end_to_end(self, webhook_factory): assert webhook_options['event'] == webhook_item['event'].rsplit('.', 2)[0] # Find webhook by name - webhook_search = Webhook.info({'name': webhook_options['name']}) + webhook_search = class_target_sat.cli.Webhook.info({'name': webhook_options['name']}) # A non empty dict has been returned assert webhook_search # Test that webhook gets updated different_url = 'http://localhost/different-path' - Webhook.update({'name': webhook_options['name'], 'target-url': different_url}) - webhook_search_after_update = Webhook.info({'name': webhook_options['name']}) + class_target_sat.cli.Webhook.update( + {'name': webhook_options['name'], 'target-url': different_url} + ) + webhook_search_after_update = class_target_sat.cli.Webhook.info( + {'name': webhook_options['name']} + ) assert webhook_search_after_update['target-url'] == different_url # Test that webhook is deleted - Webhook.delete({'name': webhook_options['name']}) - webhook_deleted_search = Webhook.list({'search': webhook_options['name']}) + class_target_sat.cli.Webhook.delete({'name': webhook_options['name']}) + webhook_deleted_search = class_target_sat.cli.Webhook.list( + {'search': webhook_options['name']} + ) assert len(webhook_deleted_search) == 0 - def test_webhook_disabled_enabled(self, webhook_factory): + def test_webhook_disabled_enabled(self, webhook_factory, class_target_sat): """Test disable/enable the webhook :id: 4fef4320-0655-440d-90e7-150ffcdcd043 @@ -107,17 +106,17 @@ def test_webhook_disabled_enabled(self, webhook_factory): hook = webhook_factory() # The new webhook is enabled by default on creation - assert Webhook.info({'name': hook.name})['enabled'] == 'yes' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'yes' - Webhook.update({'name': hook.name, 'enabled': 'no'}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'enabled': 'no'}) # The webhook should be disabled now - assert Webhook.info({'name': hook.name})['enabled'] == 'no' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'no' - Webhook.update({'name': hook.name, 'enabled': 'yes'}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'enabled': 'yes'}) # The webhook should be enabled again - assert Webhook.info({'name': hook.name})['enabled'] == 'yes' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'yes' - def test_negative_update_invalid_url(self, webhook_factory): + def test_negative_update_invalid_url(self, webhook_factory, class_target_sat): """Test webhook negative update - invalid target URL fails :id: 7a6c87f5-0e6c-4a55-b495-b1bfb24607bd @@ -130,4 +129,4 @@ def test_negative_update_invalid_url(self, webhook_factory): invalid_url = '$%^##@***' with pytest.raises(CLIReturnCodeError): - Webhook.update({'name': hook.name, 'target-url': invalid_url}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'target-url': invalid_url}) diff --git a/tests/foreman/conftest.py b/tests/foreman/conftest.py index 4e3aca8a77c..1be6ba294e6 100644 --- a/tests/foreman/conftest.py +++ b/tests/foreman/conftest.py @@ -1,6 +1,8 @@ """Configurations for py.test runner""" + import pytest +from robottelo.hosts import Satellite from robottelo.logging import collection_logger @@ -28,6 +30,10 @@ def pytest_collection_modifyitems(session, items, config): deselected_items = [] for item in items: + if any("manifest" in f for f in getattr(item, "fixturenames", ())): + item.add_marker("manifester") + if any("ldap" in f for f in getattr(item, "fixturenames", ())): + item.add_marker("ldap") # 1. Deselect tests marked with @pytest.mark.deselect # WONTFIX BZs makes test to be dynamically marked as deselect. deselect = item.get_closest_marker('deselect') @@ -40,3 +46,32 @@ def pytest_collection_modifyitems(session, items, config): config.hook.pytest_deselected(items=deselected_items) items[:] = [item for item in items if item not in deselected_items] + + +@pytest.fixture(autouse=True) +def ui_session_record_property(request, record_property): + """ + Autouse fixture to set the record_property attribute for Satellite instances in the test. + + This fixture iterates over all fixtures in the current test node + (excluding the current fixture) and sets the record_property attribute + for instances of the Satellite class. + + Args: + request: The pytest request object. + record_property: The value to set for the record_property attribute. + """ + test_directories = [ + 'tests/foreman/destructive', + 'tests/foreman/ui', + 'tests/foreman/sanity', + 'tests/foreman/virtwho', + ] + test_file_path = request.node.fspath.strpath + if any(directory in test_file_path for directory in test_directories): + for fixture in request.node.fixturenames: + if request.fixturename != fixture and isinstance( + request.getfixturevalue(fixture), Satellite + ): + sat = request.getfixturevalue(fixture) + sat.record_property = record_property diff --git a/tests/foreman/data/ant-7.7.7-1.noarch.rpm b/tests/foreman/data/ant-7.7.7-1.noarch.rpm new file mode 100644 index 00000000000..70d588254b9 Binary files /dev/null and b/tests/foreman/data/ant-7.7.7-1.noarch.rpm differ diff --git a/tests/foreman/data/expired-manifest.zip b/tests/foreman/data/expired-manifest.zip new file mode 100644 index 00000000000..8737ba42db2 Binary files /dev/null and b/tests/foreman/data/expired-manifest.zip differ diff --git a/tests/foreman/data/hammer_commands.json b/tests/foreman/data/hammer_commands.json index 7b449e5e9a0..90b88f4536f 100644 --- a/tests/foreman/data/hammer_commands.json +++ b/tests/foreman/data/hammer_commands.json @@ -746,7 +746,13 @@ "value": null }, { - "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | limit | x | x | attach | x | x | version | x | x | environment | x | x | view | x | x | collections/id | x | x | collections/name | x | x | overrides/content label | x | x | overrides/name | x | x | overrides/value | x | x | purpose/service level | x | x | purpose/purpose usage | x | x | purpose/purpose role | x | x | purpose/purpose addons | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Show hosts associated to an activation key", + "name": "show-hosts", + "shortname": null, + "value": "BOOLEAN" + }, + { + "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | limit | x | x | attach | x | x | version | x | x | environment | x | x | view | x | x | hosts/id | x | x | hosts/name | x | x | collections/id | x | x | collections/name | x | x | overrides/content label | x | x | overrides/name | x | x | overrides/value | x | x | purpose/service level | x | x | purpose/purpose usage | x | x | purpose/purpose role | x | x | purpose/purpose addons | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -1612,7 +1618,7 @@ "value": "VALUE" }, { - "help": "Print help ------------------------------|-----|---------|----- | ALL | DEFAULT | THIN ------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | url | x | x | type | x | x | content source type | x | x | username | x | x | Subpaths/ | x | x | Products/id | x | x | Products/organization id | x | x | Products/name | x | x | Products/label | x | x | proxies/id | x | x | proxies/name | x | x | proxies/url | x | x | proxies/download policy | x | x | ------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help ------------------------------|-----|---------|----- | ALL | DEFAULT | THIN ------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | url | x | x | type | x | x | content source type | x | x | username | x | x | ssl | x | x | ca cert/id | x | x | ca cert/name | x | x | client cert/id | x | x | client cert/name | x | x | client key/id | x | x | client key/name | x | x | Subpaths/ | x | x | Products/id | x | x | Products/organization id | x | x | Products/name | x | x | Products/label | x | x | proxies/id | x | x | proxies/name | x | x | proxies/url | x | x | proxies/download policy | x | x | ------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -4108,7 +4114,7 @@ "value": "VALUE" }, { - "help": "Print help --------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------|-----|---------|----- | x | x | x at | x | x | name | x | x | x proxy name | x | x | name | x | x | | x | x | | x | x | | x | x | --------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string Values: compliant, incompliant, inconclusive string Values: true, false string string integer integer string string string integer string Values: host, policy datetime string string integer text string string string integer string string datetime text string text string string", + "help": "Print help --------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------|-----|---------|----- | x | x | x at | x | x | name | x | x | x proxy name | x | x | name | x | x | | x | x | | x | x | | x | x | --------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string Values: compliant, incompliant, inconclusive string Values: true, false string string integer integer string string string integer string Values: host, policy datetime lifecycle_environment string integer text string string string integer string string datetime text string text string string", "name": "help", "shortname": "h", "value": null @@ -4321,6 +4327,31 @@ ], "subcommands": [] }, + { + "description": "Authenticate against external source (IPA/PAM) with credentials", + "name": "basic-external", + "options": [ + { + "help": "Print help", + "name": "help", + "shortname": "h", + "value": null + }, + { + "help": "Password to access the remote system", + "name": "password", + "shortname": "p", + "value": "VALUE" + }, + { + "help": "Username to access the remote system you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "name": "username", + "shortname": "u", + "value": "VALUE" + } + ], + "subcommands": [] + }, { "description": "Negotiate the login credentials from the auth ticket (Kerberos)", "name": "negotiate", @@ -6816,7 +6847,7 @@ "value": null }, { - "help": "Print help -----------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | | x | x | | x | x | Features/name | x | x | Features/version | x | x | Locations/ | x | x | Organizations/ | x | x | at | x | x | at | x | x | -----------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help -----------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | | x | x | count | x | x | Features/name | x | x | Features/version | x | x | Locations/ | x | x | Organizations/ | x | x | at | x | x | at | x | x | -----------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -7602,7 +7633,7 @@ "value": "KEY_VALUE_LIST" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 OpenStack: --volume: oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi Rackspace: --volume: VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) GCE: --volume: size_gb Volume size in GB, integer value", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 OpenStack: --volume: oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi Rackspace: --volume: VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) GCE: --volume: size_gb Volume size in GB, integer value", "name": "help", "shortname": "h", "value": null @@ -7693,7 +7724,7 @@ "value": "KEY_VALUE_LIST" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", "name": "help", "shortname": "h", "value": null @@ -7942,7 +7973,7 @@ "value": "KEY_VALUE_LIST" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", "name": "help", "shortname": "h", "value": null @@ -8112,7 +8143,7 @@ "value": "NUMBER" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 OpenStack: --volume: oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi Rackspace: --volume: VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) GCE: --volume: size_gb Volume size in GB, integer value", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 OpenStack: --volume: oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi Rackspace: --volume: VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) GCE: --volume: size_gb Volume size in GB, integer value", "name": "help", "shortname": "h", "value": null @@ -10749,7 +10780,7 @@ "value": null }, { - "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | at | x | x | | x | x | status/applied | x | x | status/restarted | x | x | status/failed | x | x | status/restart failures | x | x | status/skipped | x | x | status/pending | x | x | metrics/config_retrieval | x | x | metrics/exec | x | x | metrics/file | x | x | metrics/package | x | x | metrics/service | x | x | metrics/user | x | x | metrics/yumrepo | x | x | metrics/filebucket | x | x | metrics/cron | x | x | metrics/total | x | x | Logs/resource | x | x | Logs/message | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | at | x | x | | x | x | status/applied | x | x | status/restarted | x | x | status/failed | x | x | status/restart failures | x | x | status/skipped | x | x | status/pending | x | x | metrics/config retrieval | x | x | metrics/exec | x | x | metrics/file | x | x | metrics/package | x | x | metrics/service | x | x | metrics/user | x | x | metrics/yumrepo | x | x | metrics/filebucket | x | x | metrics/cron | x | x | metrics/total | x | x | Logs/resource | x | x | Logs/message | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -11216,7 +11247,7 @@ "value": null }, { - "help": "Export formats. Choose syncable if content is to be imported via repository sync. Choose importable if content is to be imported via hammer content-import. Defaults to importable. Possible value(s): 'syncable', 'importable'", + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", "name": "format", "shortname": null, "value": "ENUM" @@ -11271,7 +11302,7 @@ "value": "NUMBER" }, { - "help": "Export formats. Choose syncable if content is to be imported via repository sync. Choose importable if content is to be imported via hammer content-import. Defaults to importable. Possible value(s): 'syncable', 'importable'", + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", "name": "format", "shortname": null, "value": "ENUM" @@ -11374,7 +11405,7 @@ "value": null }, { - "help": "Export formats. Choose syncable if content is to be imported via repository sync. Choose importable if content is to be imported via hammer content-import. Defaults to importable. Possible value(s): 'syncable', 'importable'", + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", "name": "format", "shortname": null, "value": "ENUM" @@ -11528,6 +11559,12 @@ "shortname": null, "value": null }, + { + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", + "name": "format", + "shortname": null, + "value": "ENUM" + }, { "help": "Export history id used for incremental export. If not provided the most recent export history will be used.", "name": "from-history-id", @@ -11583,6 +11620,12 @@ "shortname": null, "value": "NUMBER" }, + { + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", + "name": "format", + "shortname": null, + "value": "ENUM" + }, { "help": "Export history id used for incremental export. If not provided the most recent export history will be used.", "name": "from-history-id", @@ -11686,6 +11729,12 @@ "shortname": null, "value": null }, + { + "help": "Export formats.Choose syncable if the exported content needs to be in a yum format. This option is only available for yum, file repositories. Choose importable if the importing server uses the same version and exported content needs to be one of yum, file, ansible_collection, docker repositories. Possible value(s): 'syncable', 'importable'", + "name": "format", + "shortname": null, + "value": "ENUM" + }, { "help": "Export history id used for incremental export. If not provided the most recent export history will be used.", "name": "from-history-id", @@ -14419,6 +14468,12 @@ "shortname": null, "value": "VALUE" }, + { + "help": "Check audited changes and proceed only if content or filters have changed since last publish", + "name": "publish-only-if-needed", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "Specify the list of units in each repo", "name": "repos-units", @@ -15148,6 +15203,12 @@ "shortname": null, "value": "NUMBER" }, + { + "help": "Whether or not to return filters applied to the content view version", + "name": "include-applied-filters", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "VALUE/NUMBER Name/Id of associated lifecycle environment", "name": "lifecycle-environment", @@ -15191,7 +15252,7 @@ "value": "VALUE" }, { - "help": "Print help -----------------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------------|-----|---------|----- | x | x | x | x | x | | x | x | x | x | x | view id | x | x | view name | x | x | view label | x | x | environments/id | x | x | environments/name | x | x | environments/label | x | x | Repositories/id | x | x | Repositories/name | x | x | Repositories/label | x | x | -----------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help -----------------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------------|-----|---------|----- | x | x | x | x | x | | x | x | x | x | x | view id | x | x | view name | x | x | view label | x | x | environments/id | x | x | environments/name | x | x | environments/label | x | x | Repositories/id | x | x | Repositories/name | x | x | Repositories/label | x | x | applied filters | x | x | filters/id | x | x | filters/name | x | x | solving | x | x | -----------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -15245,6 +15306,12 @@ "shortname": null, "value": "BOOLEAN" }, + { + "help": "Whether or not to return filters applied to the content view version", + "name": "include-applied-filters", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "VALUE/NUMBER Filter versions by environment", "name": "lifecycle-environment", @@ -21265,7 +21332,7 @@ "value": "KEY_VALUE_LIST" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string parameters accept format defined by its schema (bold are required; <> contains acceptable type; [] contains acceptable value): \"\u001b[1mname\u001b[0m=\\,\u001b[1mvalue\u001b[0m=\\,parameter_type=[string|boolean|integer|real|array|hash|yaml|json]\\,hidden_value=[true|false|1|0], ... \" \"product_id=\\,product_name=\\,arch=\\,version=, ... \" mac ip Possible values: interface, bmc, bond, bridge name subnet_id domain_id identifier true/false true/false, each managed hosts needs to have one primary interface. true/false true/false virtual=true: tag VLAN tag, this attribute has precedence over the subnet VLAN ID. Only for virtual interfaces. attached_to Identifier of the interface to which this interface belongs, e.g. eth1. type=bond: mode Possible values: balance-rr, active-backup, balance-xor, broadcast, 802.3ad, balance-tlb, balance-alb attached_devices Identifiers of slave interfaces, e.g. [eth1,eth2] bond_options type=bmc: provider always IPMI username password \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order start Boolean (expressed as 0 or 1), whether to start the machine or not OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. start Boolean, set 1 to start the vm Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order start Must be a 1 or 0, whether to start the machine or not AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string parameters accept format defined by its schema (bold are required; <> contains acceptable type; [] contains acceptable value): \"\u001b[1mname\u001b[0m=\\,\u001b[1mvalue\u001b[0m=\\,parameter_type=[string|boolean|integer|real|array|hash|yaml|json]\\,hidden_value=[true|false|1|0], ... \" \"product_id=\\,product_name=\\,arch=\\,version=, ... \" mac ip Possible values: interface, bmc, bond, bridge name subnet_id domain_id identifier true/false true/false, each managed hosts needs to have one primary interface. true/false true/false virtual=true: tag VLAN tag, this attribute has precedence over the subnet VLAN ID. Only for virtual interfaces. attached_to Identifier of the interface to which this interface belongs, e.g. eth1. type=bond: mode Possible values: balance-rr, active-backup, balance-xor, broadcast, 802.3ad, balance-tlb, balance-alb attached_devices Identifiers of slave interfaces, e.g. [eth1,eth2] bond_options type=bmc: provider always IPMI username password \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order start Boolean (expressed as 0 or 1), whether to start the machine or not OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. start Boolean, set 1 to start the vm Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order start Must be a 1 or 0, whether to start the machine or not AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", "name": "help", "shortname": "h", "value": null @@ -21575,7 +21642,7 @@ ], "subcommands": [ { - "description": "Schedule errata for installation using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Schedule errata for installation using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "apply", "options": [ { @@ -21585,7 +21652,7 @@ "value": null }, { - "help": "List of Errata ids to install. Will be removed in a future release", + "help": "List of Errata ids to install. Will be removed in Katello 4.10", "name": "errata-ids", "shortname": null, "value": "LIST" @@ -22812,7 +22879,7 @@ "value": "BOOLEAN" }, { - "help": "Print help -----------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------|-----|---------|----- | x | x | x | x | x | x system | x | x | group | x | x | | x | x | | x | x | status | x | x | | x | | | x | | information | x | | view | x | x | environment | x | x | | x | | | x | | | x | | status | x | x | -----------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer string string integer datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string integer string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified string integer datetime string string reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", + "help": "Print help -----------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------|-----|---------|----- | x | x | x | x | x | x system | x | x | group | x | x | | x | x | | x | x | status | x | x | | x | | | x | | information | x | | view | x | x | environment | x | x | | x | | | x | | | x | | status | x | x | -----------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer configuration_status.applied integer configuration_status.enabled Values: true, false configuration_status.failed integer configuration_status.failed_restarts integer configuration_status.interesting Values: true, false configuration_status.pending integer configuration_status.restarted integer configuration_status.skipped integer string string datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman Values: true, false infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified Values: PXELinux_BIOS, PXELinux_UEFI, Grub_UEFI, Grub2_BIOS, Grub2_ELF, Grub2_UEFI, Grub2_UEFI_SecureBoot, Grub2_UEFI_HTTP, Grub2_UEFI_HTTPS, Grub2_UEFI_HTTPS_SecureBoot, iPXE_Embedded, iPXE_UEFI_HTTP, iPXE_Chain_BIOS, iPXE_Chain_UEFI string integer datetime string string reported.bios_release_date reported.bios_vendor reported.bios_version reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", "name": "help", "shortname": "h", "value": null @@ -22833,7 +22900,7 @@ ], "subcommands": [ { - "description": "Install packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Install packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "install", "options": [ { @@ -22943,7 +23010,7 @@ "subcommands": [] }, { - "description": "Uninstall packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Uninstall packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "remove", "options": [ { @@ -22980,7 +23047,7 @@ "subcommands": [] }, { - "description": "Update packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Update packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "upgrade", "options": [ { @@ -23017,7 +23084,7 @@ "subcommands": [] }, { - "description": "Update packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Update packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "upgrade-all", "options": [ { @@ -23062,7 +23129,7 @@ ], "subcommands": [ { - "description": "Install packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Install packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "install", "options": [ { @@ -23099,7 +23166,7 @@ "subcommands": [] }, { - "description": "Uninstall packages remotely using katello-agent. NOTE: Katello-agent is deprecated and will be removed in a future release. Consider using remote execution instead.", + "description": "Uninstall packages remotely using katello-agent. WARNING: Katello-agent is deprecated and will be removed in Katello 4.10. Migrate to remote execution now.", "name": "remove", "options": [ { @@ -24749,7 +24816,7 @@ "value": "KEY_VALUE_LIST" }, { - "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string parameters accept format defined by its schema (bold are required; <> contains acceptable type; [] contains acceptable value): \"name=\\,value=\\,parameter_type=[string|boolean|integer|real|array|hash|yaml|json]\\,hidden_value=[true|false|1|0], ... \" \"product_id=\\,product_name=\\,arch=\\,version=, ... \" mac ip Possible values: interface, bmc, bond, bridge name subnet_id domain_id identifier true/false true/false, each managed hosts needs to have one primary interface. true/false true/false virtual=true: tag VLAN tag, this attribute has precedence over the subnet VLAN ID. Only for virtual interfaces. attached_to Identifier of the interface to which this interface belongs, e.g. eth1. type=bond: mode Possible values: balance-rr, active-backup, balance-xor, broadcast, 802.3ad, balance-tlb, balance-alb attached_devices Identifiers of slave interfaces, e.g. [eth1,eth2] bond_options type=bmc: provider always IPMI username password \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order start Boolean (expressed as 0 or 1), whether to start the machine or not OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virto or virto_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. start Boolean, set 1 to start the vm Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order start Must be a 1 or 0, whether to start the machine or not AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", + "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string parameters accept format defined by its schema (bold are required; <> contains acceptable type; [] contains acceptable value): \"name=\\,value=\\,parameter_type=[string|boolean|integer|real|array|hash|yaml|json]\\,hidden_value=[true|false|1|0], ... \" \"product_id=\\,product_name=\\,arch=\\,version=, ... \" mac ip Possible values: interface, bmc, bond, bridge name subnet_id domain_id identifier true/false true/false, each managed hosts needs to have one primary interface. true/false true/false virtual=true: tag VLAN tag, this attribute has precedence over the subnet VLAN ID. Only for virtual interfaces. attached_to Identifier of the interface to which this interface belongs, e.g. eth1. type=bond: mode Possible values: balance-rr, active-backup, balance-xor, broadcast, 802.3ad, balance-tlb, balance-alb attached_devices Identifiers of slave interfaces, e.g. [eth1,eth2] bond_options type=bmc: provider always IPMI username password \u001b[1mNOTE:\u001b[0m Bold attributes are required. EC2: --volume: --interface: --compute-attributes: availability_zone flavor_id groups security_group_ids managed_ip Libvirt: --volume: \u001b[1mpool_name\u001b[0m One of available storage pools \u001b[1mcapacity\u001b[0m String value, e.g. 10G allocation Initial allocation, e.g. 0G format_type Possible values: raw, qcow2 --interface: compute_type Possible values: bridge, network compute_bridge Name of interface according to type compute_model Possible values: virtio, rtl8139, ne2k_pci, pcnet, e1000 compute_network Libvirt instance network, e.g. default --compute-attributes: \u001b[1mcpus\u001b[0m Number of CPUs \u001b[1mmemory\u001b[0m String, amount of memory, value in bytes cpu_mode Possible values: default, host-model, host-passthrough boot_order Device names to specify the boot order start Boolean (expressed as 0 or 1), whether to start the machine or not OpenStack: --volume: --interface: --compute-attributes: availability_zone boot_from_volume flavor_ref image_ref tenant_id security_groups network oVirt: --volume: size_gb Volume size in GB, integer value storage_domain ID or name of storage domain bootable Boolean, set 1 for bootable, only one volume can be bootable preallocate Boolean, set 1 to preallocate wipe_after_delete Boolean, set 1 to wipe disk after delete interface Disk interface name, must be ide, virtio or virtio_scsi --interface: compute_name Compute name, e.g. eth0 compute_network Select one of available networks for a cluster, must be an ID or a name compute_interface Interface type compute_vnic_profile Vnic Profile --compute-attributes: cluster ID or name of cluster to use template Hardware profile to use cores Integer value, number of cores sockets Integer value, number of sockets memory Amount of memory, integer value in bytes ha Boolean, set 1 to high availability display_type Possible values: VNC, SPICE keyboard_layout Possible values: ar, de-ch, es, fo, fr-ca, hu, ja, mk, no, pt-br, sv, da, en-gb, et, fr, fr-ch, is, lt, nl, pl, ru, th, de, en-us, fi, fr-be, hr, it, lv, nl-be, pt, sl, tr. Not usable if display type is SPICE. start Boolean, set 1 to start the vm Rackspace: --volume: --interface: --compute-attributes: flavor_id VMware: --volume: name storage_pod Storage Pod ID from VMware datastore Datastore ID from VMware mode persistent/independent_persistent/independent_nonpersistent size_gb Integer number, volume size in GB thin true/false eager_zero true/false controller_key Associated SCSI controller key --interface: compute_type Type of the network adapter, for example one of: VirtualVmxnet3 VirtualE1000 See documentation center for your version of vSphere to find more details about available adapter types: https://www.vmware.com/support/pubs/ compute_network Network ID or Network Name from VMware --compute-attributes: \u001b[1mcluster\u001b[0m Cluster ID from VMware \u001b[1mcorespersocket\u001b[0m Number of cores per socket (applicable to hardware versions < 10 only) \u001b[1mcpus\u001b[0m CPU count \u001b[1mmemory_mb\u001b[0m Integer number, amount of memory in MB \u001b[1mpath\u001b[0m Path to folder \u001b[1mresource_pool\u001b[0m Resource Pool ID from VMware firmware automatic/bios/efi guest_id Guest OS ID form VMware hardware_version Hardware version ID from VMware memoryHotAddEnabled Must be a 1 or 0, lets you add memory resources while the machine is on cpuHotAddEnabled Must be a 1 or 0, lets you add CPU resources while the machine is on add_cdrom Must be a 1 or 0, Add a CD-ROM drive to the virtual machine annotation Annotation Notes scsi_controllers List with SCSI controllers definitions type - ID of the controller from VMware key - Key of the controller (e.g. 1000) boot_order Device names to specify the boot order start Must be a 1 or 0, whether to start the machine or not AzureRM: --volume: disk_size_gb Volume Size in GB (integer value) data_disk_caching Data Disk Caching (None, ReadOnly, ReadWrite) --interface: compute_network Select one of available Azure Subnets, must be an ID compute_public_ip Public IP (None, Static, Dynamic) compute_private_ip Static Private IP (expressed as true or false) --compute-attributes: resource_group Existing Azure Resource Group of user vm_size VM Size, eg. Standard_A0 etc. username The Admin username password The Admin password platform OS type eg. Linux ssh_key_data SSH key for passwordless authentication os_disk_caching OS disk caching premium_os_disk Premium OS Disk, Boolean as 0 or 1 script_command Custom Script Command script_uris Comma seperated file URIs GCE: --volume: size_gb Volume size in GB, integer value --interface: --compute-attributes: machine_type network associate_external_ip", "name": "help", "shortname": "h", "value": null @@ -25197,7 +25264,7 @@ "value": "BOOLEAN" }, { - "help": "Print help ------------|-----|---------|----- | ALL | DEFAULT | THIN ------------|-----|---------|----- | x | x | x | x | x | x | x | | | x | | | x | | ------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer string string integer datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string integer string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified string integer datetime string string reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", + "help": "Print help ------------|-----|---------|----- | ALL | DEFAULT | THIN ------------|-----|---------|----- | x | x | x | x | x | x | x | | | x | | | x | | ------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer configuration_status.applied integer configuration_status.enabled Values: true, false configuration_status.failed integer configuration_status.failed_restarts integer configuration_status.interesting Values: true, false configuration_status.pending integer configuration_status.restarted integer configuration_status.skipped integer string string datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman Values: true, false infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified Values: PXELinux_BIOS, PXELinux_UEFI, Grub_UEFI, Grub2_BIOS, Grub2_ELF, Grub2_UEFI, Grub2_UEFI_SecureBoot, Grub2_UEFI_HTTP, Grub2_UEFI_HTTPS, Grub2_UEFI_HTTPS_SecureBoot, iPXE_Embedded, iPXE_UEFI_HTTP, iPXE_Chain_BIOS, iPXE_Chain_UEFI string integer datetime string string reported.bios_release_date reported.bios_vendor reported.bios_version reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", "name": "help", "shortname": "h", "value": null @@ -28392,6 +28459,12 @@ "shortname": null, "value": "NUMBER" }, + { + "help": "Override the global time to pickup interval for this invocation only", + "name": "time-to-pickup", + "shortname": null, + "value": "NUMBER" + }, { "help": "Print help you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", @@ -28442,7 +28515,7 @@ "value": null }, { - "help": "Print help --------------------|-----|-------- | ALL | DEFAULT --------------------|-----|-------- | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x ordering | x | x | x | x category | x | x | x | x line | x | x logic id | x | x | x | x --------------------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help --------------------|-----|-------- | ALL | DEFAULT --------------------|-----|-------- | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x | x ordering | x | x | x | x category | x | x | x | x line | x | x logic id | x | x to pickup | x | x | x | x --------------------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -28663,6 +28736,12 @@ "description": "Create a job template", "name": "create", "options": [ + { + "help": "Enable the callback plugin for this template", + "name": "ansible-callback-enabled", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "", "name": "audit-comment", @@ -29131,7 +29210,7 @@ "value": null }, { - "help": "Print help ---------------|-----|---------|----- | ALL | DEFAULT | THIN ---------------|-----|---------|----- | x | x | x | x | x | x category | x | x | | x | x | | x | x | | x | x | | x | x | Locations/ | x | x | Organizations/ | x | x | ---------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help -------------------------|-----|---------|----- | ALL | DEFAULT | THIN -------------------------|-----|---------|----- | x | x | x | x | x | x category | x | x | | x | x | | x | x | callback enabled | x | x | | x | x | | x | x | Locations/ | x | x | Organizations/ | x | x | -------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -29222,6 +29301,12 @@ "description": "Update a job template", "name": "update", "options": [ + { + "help": "Enable the callback plugin for this template", + "name": "ansible-callback-enabled", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "", "name": "audit-comment", @@ -29607,10 +29692,10 @@ "value": "VALUE" }, { - "help": "Set true if you want to see only library environments Possible value(s): 'true', 'false'", + "help": "Set true if you want to see only library environments", "name": "library", "shortname": null, - "value": "ENUM" + "value": "BOOLEAN" }, { "help": "Filter only environments containing this name", @@ -29679,6 +29764,12 @@ "description": "List environment paths", "name": "paths", "options": [ + { + "help": "Show whether each lifecycle environment is associated with the given Capsule id.", + "name": "content-source-id", + "shortname": null, + "value": "NUMBER" + }, { "help": "Show specified fields or predefined field sets only. (See below)", "name": "fields", @@ -33217,6 +33308,12 @@ "description": "Update the CDN configuration", "name": "configure-cdn", "options": [ + { + "help": "If product certificates should be used to authenticate to a custom CDN.", + "name": "custom-cdn-auth-enabled", + "shortname": null, + "value": "VALUE" + }, { "help": "Id of the Organization", "name": "id", @@ -34523,6 +34620,12 @@ "shortname": null, "value": "VALUE" }, + { + "help": "Whether Simple Content Access should be enabled for the organization.", + "name": "simple-content-access", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "Capsule names/ids", "name": "smart-proxies", @@ -37378,7 +37481,7 @@ "value": "BOOLEAN" }, { - "help": "Print help -----------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------|-----|---------|----- | x | x | x | x | x | x system | x | x | group | x | x | | x | x | | x | x | status | x | x | | x | | | x | | information | x | | view | x | x | environment | x | x | | x | | | x | | | x | | status | x | x | -----------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer string string integer datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string integer string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified string integer datetime string string reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", + "help": "Print help -----------------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------------|-----|---------|----- | x | x | x | x | x | x system | x | x | group | x | x | | x | x | | x | x | status | x | x | | x | | | x | | information | x | | view | x | x | environment | x | x | | x | | | x | | | x | | status | x | x | -----------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string Values: mismatched, matched, not_specified string string string date string string boolean boot_time Values: true, false Values: built, pending, token_expired, build_failed text string integer configuration_status.applied integer configuration_status.enabled Values: true, false configuration_status.failed integer configuration_status.failed_restarts integer configuration_status.interesting Values: true, false configuration_status.pending integer configuration_status.restarted integer configuration_status.skipped integer string string datetime integer string integer Values: security_needed, errata_needed, updated, unknown Values: ok, error string Values: ok, warning, error string string string string string integer string string boolean string integer string infrastructure_facet.foreman Values: true, false infrastructure_facet.smart_proxy_id Values: reporting, no_report Values: disconnect, sync integer string datetime string string job_invocation.id string job_invocation.result Values: cancelled, failed, pending, success datetime datetime string string integer string Values: true, false string string string integer string string string integer string string string string integer string string string string string integer string Values: mismatched, matched, not_specified Values: PXELinux_BIOS, PXELinux_UEFI, Grub_UEFI, Grub2_BIOS, Grub2_ELF, Grub2_UEFI, Grub2_UEFI_SecureBoot, Grub2_UEFI_HTTP, Grub2_UEFI_HTTPS, Grub2_UEFI_HTTPS_SecureBoot, iPXE_Embedded, iPXE_UEFI_HTTP, iPXE_Chain_BIOS, iPXE_Chain_UEFI string integer datetime string string reported.bios_release_date reported.bios_vendor reported.bios_version reported.boot_time reported.cores reported.disks_total reported.kernel_version reported.ram reported.sockets reported.virtual Values: true, false string string text Values: mismatched, matched, not_specified string Values: mismatched, matched, not_specified string status.applied integer status.enabled Values: true, false status.failed integer status.failed_restarts integer status.interesting Values: true, false status.pending integer status.restarted integer status.skipped integer string subnet.name text string subnet6.name text string string Values: valid, partial, invalid, unknown, disabled, unsubscribed_hypervisor string Values: reboot_needed, process_restart_needed, updated string string text Values: mismatched, matched, not_specified user.firstname string user.lastname string user.login string user.mail string string usergroup.name string string", "name": "help", "shortname": "h", "value": null @@ -39227,7 +39330,7 @@ "value": null }, { - "help": "Print help -----------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | | x | x | | x | x | Features/name | x | x | Features/version | x | x | Locations/ | x | x | Organizations/ | x | x | at | x | x | at | x | x | -----------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help -----------------|-----|---------|----- | ALL | DEFAULT | THIN -----------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | | x | x | count | x | x | Features/name | x | x | Features/version | x | x | Locations/ | x | x | Organizations/ | x | x | at | x | x | at | x | x | -----------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -40106,7 +40209,7 @@ "value": null }, { - "help": "Print help ----------------|-----|-------- | ALL | DEFAULT ----------------|-----|-------- | x | x line | x | x | x | x occurrence | x | x occurrence | x | x | x | x limit | x | x until | x | x | x | x ----------------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help ----------------|-----|-------- | ALL | DEFAULT ----------------|-----|-------- | x | x line | x | x | x | x occurrence | x | x occurrence | x | x count | x | x | x | x occurrence | x | x occurrence | x | x | x | x limit | x | x limit | x | x until | x | x | x | x | x | x ----------------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -40185,7 +40288,7 @@ "value": "VALUE" }, { - "help": "Print help ----------|-----|-------- | ALL | DEFAULT ----------|-----|-------- | x | x line | x | x | x | x time | x | x | x | x ----------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help ----------------|-----|-------- | ALL | DEFAULT ----------------|-----|-------- | x | x line | x | x count | x | x | x | x occurrence | x | x occurrence | x | x | x | x limit | x | x time | x | x | x | x | x | x ----------------|-----|-------- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -40530,7 +40633,7 @@ "value": null }, { - "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | at | x | x | | x | x | status/applied | x | x | status/restarted | x | x | status/failed | x | x | status/restart failures | x | x | status/skipped | x | x | status/pending | x | x | metrics/config_retrieval | x | x | metrics/exec | x | x | metrics/file | x | x | metrics/package | x | x | metrics/service | x | x | metrics/user | x | x | metrics/yumrepo | x | x | metrics/filebucket | x | x | metrics/cron | x | x | metrics/total | x | x | Logs/resource | x | x | Logs/message | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help --------------------------------|-----|---------|----- | ALL | DEFAULT | THIN --------------------------------|-----|---------|----- | x | x | x | x | x | at | x | x | | x | x | status/applied | x | x | status/restarted | x | x | status/failed | x | x | status/restart failures | x | x | status/skipped | x | x | status/pending | x | x | metrics/config retrieval | x | x | metrics/exec | x | x | metrics/file | x | x | metrics/package | x | x | metrics/service | x | x | metrics/user | x | x | metrics/yumrepo | x | x | metrics/filebucket | x | x | metrics/cron | x | x | metrics/total | x | x | Logs/resource | x | x | Logs/message | x | x | --------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -41844,7 +41947,7 @@ "value": "ENUM" }, { - "help": "List of content units to ignore while syncing a yum repository. Must be subset of srpm", + "help": "List of content units to ignore while syncing a yum repository. Must be subset of srpm,treeinfo", "name": "ignorable-content", "shortname": null, "value": "LIST" @@ -41868,10 +41971,10 @@ "value": "VALUE" }, { - "help": "True if this repository when synced has to be mirrored from the source and stale rpms removed (Deprecated)", - "name": "mirror-on-sync", + "help": "Time to expire yum metadata in seconds. Only relevant for custom yum repositories.", + "name": "metadata-expire", "shortname": null, - "value": "BOOLEAN" + "value": "NUMBER" }, { "help": "Policy to set for mirroring content. Must be one of additive. Possible value(s): 'additive', 'mirror_complete', 'mirror_content_only'", @@ -42122,7 +42225,7 @@ "value": null }, { - "help": "Print help ----------------------------------------------|-----|---------|----- | ALL | DEFAULT | THIN ----------------------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | hat repository | x | x | type | x | x | type | x | x | policy | x | x | | x | x | via http | x | x | at | x | x | path | x | x | policy | x | x | repository name | x | x | image tags filter | x | x | repository name | x | x | content units | x | x | proxy/id | x | x | proxy/name | x | x | proxy/http proxy policy | x | x | Product/id | x | x | Product/name | x | x | key/id | x | x | key/name | x | x | Sync/status | x | x | Sync/last sync date | x | x | | x | x | | x | x | counts/packages | x | x | counts/source rpms | x | x | counts/package groups | x | x | counts/errata | x | x | counts/container image manifest lists | x | x | counts/container image manifests | x | x | counts/container image tags | x | x | counts/files | x | x | counts/module streams | x | x | ----------------------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", + "help": "Print help ----------------------------------------------|-----|---------|----- | ALL | DEFAULT | THIN ----------------------------------------------|-----|---------|----- | x | x | x | x | x | x | x | x | | x | x | | x | x | hat repository | x | x | type | x | x | label | x | x | type | x | x | policy | x | x | | x | x | via http | x | x | at | x | x | path | x | x | policy | x | x | expiration | x | x | repository name | x | x | image tags filter | x | x | repository name | x | x | content units | x | x | proxy/id | x | x | proxy/name | x | x | proxy/http proxy policy | x | x | Product/id | x | x | Product/name | x | x | key/id | x | x | key/name | x | x | Sync/status | x | x | Sync/last sync date | x | x | | x | x | | x | x | counts/packages | x | x | counts/source rpms | x | x | counts/package groups | x | x | counts/errata | x | x | counts/container image manifest lists | x | x | counts/container image manifests | x | x | counts/container image tags | x | x | counts/files | x | x | counts/module streams | x | x | ----------------------------------------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string", "name": "help", "shortname": "h", "value": null @@ -42357,7 +42460,7 @@ "value": "VALUE" }, { - "help": "Print help -------------|-----|---------|----- | ALL | DEFAULT | THIN -------------|-----|---------|----- | x | x | x | x | x | x | x | x | type | x | x | | x | x | -------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string integer text string boolean string string string string string string string integer string Values: true, false", + "help": "Print help --------------|-----|---------|----- | ALL | DEFAULT | THIN --------------|-----|---------|----- | x | x | x | x | x | x | x | x | type | x | x | label | x | x | | x | x | --------------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string string integer text string boolean string string string string string string string integer string Values: true, false", "name": "help", "shortname": "h", "value": null @@ -42661,6 +42764,12 @@ "description": "Show the available repository types", "name": "types", "options": [ + { + "help": "When set to 'True' repository types that are creatable will be returned", + "name": "creatable", + "shortname": null, + "value": "BOOLEAN" + }, { "help": "Show specified fields or predefined field sets only. (See below)", "name": "fields", @@ -42819,7 +42928,7 @@ "value": "NUMBER" }, { - "help": "List of content units to ignore while syncing a yum repository. Must be subset of srpm", + "help": "List of content units to ignore while syncing a yum repository. Must be subset of srpm,treeinfo", "name": "ignorable-content", "shortname": null, "value": "LIST" @@ -42837,10 +42946,10 @@ "value": "LIST" }, { - "help": "True if this repository when synced has to be mirrored from the source and stale rpms removed (Deprecated)", - "name": "mirror-on-sync", + "help": "Time to expire yum metadata in seconds. Only relevant for custom yum repositories.", + "name": "metadata-expire", "shortname": null, - "value": "BOOLEAN" + "value": "NUMBER" }, { "help": "Policy to set for mirroring content. Must be one of additive. Possible value(s): 'additive', 'mirror_complete', 'mirror_content_only'", @@ -43486,6 +43595,12 @@ "shortname": null, "value": null }, + { + "help": "Limit content to Red Hat / custom Possible value(s): 'redhat', 'custom'", + "name": "repository-type", + "shortname": null, + "value": "ENUM" + }, { "help": "Search string", "name": "search", @@ -43505,13 +43620,13 @@ "value": "BOOLEAN" }, { - "help": "If true, return custom repository sets along with redhat repos", + "help": "If true, return custom repository sets along with redhat repos. Will be ignored if repository_type is supplied.", "name": "with-custom", "shortname": null, "value": "BOOLEAN" }, { - "help": "Print help -------|-----|---------|----- | ALL | DEFAULT | THIN -------|-----|---------|----- | x | x | x | x | x | | x | x | x -------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string Values: true, false string string string string integer string", + "help": "Print help -------|-----|---------|----- | ALL | DEFAULT | THIN -------|-----|---------|----- | x | x | x | x | x | | x | x | x -------|-----|---------|----- you can find option types and the value an option can accept: One of true/false, yes/no, 1/0 Date and time in YYYY-MM-DD HH:MM:SS or ISO 8601 format Possible values are described in the option's description Path to a file Comma-separated list of key=value. JSON is acceptable and preferred way for such parameters Comma separated list of values. Values containing comma should be quoted or escaped with backslash. JSON is acceptable and preferred way for such parameters Any combination of possible values described in the option's description Numeric value. Integer Comma separated list of values defined by a schema. JSON is acceptable and preferred way for such parameters Value described in the option's description. Mostly simple string string string Values: true, false string string string string integer string Values: true, false", "name": "help", "shortname": "h", "value": null @@ -52788,12 +52903,6 @@ "shortname": null, "value": "BOOLEAN" }, - { - "help": "The frequency of VM-to-host mapping updates for AHV(in seconds)", - "name": "ahv-update-interval", - "shortname": null, - "value": "NUMBER" - }, { "help": "Hypervisor blacklist, applicable only when filtering mode is set to 2. Wildcards and regular expressions are supported, multiple records must be separated by comma.", "name": "blacklist", @@ -53298,12 +53407,6 @@ "shortname": null, "value": "BOOLEAN" }, - { - "help": "The frequency of VM-to-host mapping updates for AHV(in seconds)", - "name": "ahv-update-interval", - "shortname": null, - "value": "NUMBER" - }, { "help": "Hypervisor blacklist, applicable only when filtering mode is set to 2. Wildcards and regular expressions are supported, multiple records must be separated by comma.", "name": "blacklist", @@ -53500,7 +53603,7 @@ "value": "BOOLEAN" }, { - "help": "Possible value(s): 'actions.katello.content_view.promote_succeeded', 'actions.katello.content_view.publish_succeeded', 'actions.katello.repository.sync_succeeded', 'actions.remote_execution.run_host_job_ansible_configure_cloud_connector_succeeded', 'actions.remote_execution.run_host_job_ansible_enable_web_console_succeeded', 'actions.remote_execution.run_host_job_ansible_run_capsule_upgrade_succeeded', 'actions.remote_execution.run_host_job_ansible_run_host_succeeded', 'actions.remote_execution.run_host_job_ansible_run_insights_plan_succeeded', 'actions.remote_execution.run_host_job_ansible_run_playbook_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_oval_scans_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_scans_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_remove_succeeded', 'actions.remote_execution.run_host_job_katello_group_update_succeeded', 'actions.remote_execution.run_host_job_katello_host_tracer_resolve_succeeded', 'actions.remote_execution.run_host_job_katello_module_stream_action_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_succeeded', 'actions.remote_execution.run_host_job_katello_package_remove_succeeded', 'actions.remote_execution.run_host_job_katello_package_update_succeeded', 'actions.remote_execution.run_host_job_katello_packages_remove_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_packages_update_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_service_restart_succeeded', 'actions.remote_execution.run_host_job_leapp_preupgrade_succeeded', 'actions.remote_execution.run_host_job_leapp_remediation_plan_succeeded', 'actions.remote_execution.run_host_job_leapp_upgrade_succeeded', 'actions.remote_execution.run_host_job_puppet_run_host_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_connector_run_playbook_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_remediate_hosts_succeeded', 'actions.remote_execution.run_host_job_succeeded', 'build_entered', 'build_exited', 'content_view_created', 'content_view_destroyed', 'content_view_updated', 'domain_created', 'domain_destroyed', 'domain_updated', 'host_created', 'host_destroyed', 'host_updated', 'hostgroup_created', 'hostgroup_destroyed', 'hostgroup_updated', 'model_created', 'model_destroyed', 'model_updated', 'status_changed', 'subnet_created', 'subnet_destroyed', 'subnet_updated', 'user_created', 'user_destroyed', 'user_updated'", + "help": "Possible value(s): 'actions.katello.content_view.promote_succeeded', 'actions.katello.content_view.publish_succeeded', 'actions.katello.repository.sync_succeeded', 'actions.remote_execution.run_host_job_ansible_configure_cloud_connector_succeeded', 'actions.remote_execution.run_host_job_ansible_enable_web_console_succeeded', 'actions.remote_execution.run_host_job_ansible_run_capsule_upgrade_succeeded', 'actions.remote_execution.run_host_job_ansible_run_host_succeeded', 'actions.remote_execution.run_host_job_ansible_run_insights_plan_succeeded', 'actions.remote_execution.run_host_job_ansible_run_playbook_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_oval_scans_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_scans_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_remove_succeeded', 'actions.remote_execution.run_host_job_katello_group_update_succeeded', 'actions.remote_execution.run_host_job_katello_host_tracer_resolve_succeeded', 'actions.remote_execution.run_host_job_katello_module_stream_action_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_succeeded', 'actions.remote_execution.run_host_job_katello_package_remove_succeeded', 'actions.remote_execution.run_host_job_katello_package_update_succeeded', 'actions.remote_execution.run_host_job_katello_packages_remove_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_packages_update_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_service_restart_succeeded', 'actions.remote_execution.run_host_job_leapp_preupgrade_succeeded', 'actions.remote_execution.run_host_job_leapp_remediation_plan_succeeded', 'actions.remote_execution.run_host_job_leapp_upgrade_succeeded', 'actions.remote_execution.run_host_job_puppet_run_host_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_connector_run_playbook_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_remediate_hosts_succeeded', 'actions.remote_execution.run_host_job_run_script_succeeded', 'actions.remote_execution.run_host_job_succeeded', 'actions.remote_execution.run_hosts_job_running', 'actions.remote_execution.run_hosts_job_succeeded', 'build_entered', 'build_exited', 'content_view_created', 'content_view_destroyed', 'content_view_updated', 'domain_created', 'domain_destroyed', 'domain_updated', 'host_created', 'host_destroyed', 'host_updated', 'hostgroup_created', 'hostgroup_destroyed', 'hostgroup_updated', 'model_created', 'model_destroyed', 'model_updated', 'status_changed', 'subnet_created', 'subnet_destroyed', 'subnet_updated', 'user_created', 'user_destroyed', 'user_updated'", "name": "event", "shortname": null, "value": "ENUM" @@ -53840,7 +53943,7 @@ "value": "BOOLEAN" }, { - "help": "Possible value(s): 'actions.katello.content_view.promote_succeeded', 'actions.katello.content_view.publish_succeeded', 'actions.katello.repository.sync_succeeded', 'actions.remote_execution.run_host_job_ansible_configure_cloud_connector_succeeded', 'actions.remote_execution.run_host_job_ansible_enable_web_console_succeeded', 'actions.remote_execution.run_host_job_ansible_run_capsule_upgrade_succeeded', 'actions.remote_execution.run_host_job_ansible_run_host_succeeded', 'actions.remote_execution.run_host_job_ansible_run_insights_plan_succeeded', 'actions.remote_execution.run_host_job_ansible_run_playbook_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_oval_scans_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_scans_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_remove_succeeded', 'actions.remote_execution.run_host_job_katello_group_update_succeeded', 'actions.remote_execution.run_host_job_katello_host_tracer_resolve_succeeded', 'actions.remote_execution.run_host_job_katello_module_stream_action_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_succeeded', 'actions.remote_execution.run_host_job_katello_package_remove_succeeded', 'actions.remote_execution.run_host_job_katello_package_update_succeeded', 'actions.remote_execution.run_host_job_katello_packages_remove_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_packages_update_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_service_restart_succeeded', 'actions.remote_execution.run_host_job_leapp_preupgrade_succeeded', 'actions.remote_execution.run_host_job_leapp_remediation_plan_succeeded', 'actions.remote_execution.run_host_job_leapp_upgrade_succeeded', 'actions.remote_execution.run_host_job_puppet_run_host_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_connector_run_playbook_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_remediate_hosts_succeeded', 'actions.remote_execution.run_host_job_succeeded', 'build_entered', 'build_exited', 'content_view_created', 'content_view_destroyed', 'content_view_updated', 'domain_created', 'domain_destroyed', 'domain_updated', 'host_created', 'host_destroyed', 'host_updated', 'hostgroup_created', 'hostgroup_destroyed', 'hostgroup_updated', 'model_created', 'model_destroyed', 'model_updated', 'status_changed', 'subnet_created', 'subnet_destroyed', 'subnet_updated', 'user_created', 'user_destroyed', 'user_updated'", + "help": "Possible value(s): 'actions.katello.content_view.promote_succeeded', 'actions.katello.content_view.publish_succeeded', 'actions.katello.repository.sync_succeeded', 'actions.remote_execution.run_host_job_ansible_configure_cloud_connector_succeeded', 'actions.remote_execution.run_host_job_ansible_enable_web_console_succeeded', 'actions.remote_execution.run_host_job_ansible_run_capsule_upgrade_succeeded', 'actions.remote_execution.run_host_job_ansible_run_host_succeeded', 'actions.remote_execution.run_host_job_ansible_run_insights_plan_succeeded', 'actions.remote_execution.run_host_job_ansible_run_playbook_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_oval_scans_succeeded', 'actions.remote_execution.run_host_job_foreman_openscap_run_scans_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_errata_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_install_succeeded', 'actions.remote_execution.run_host_job_katello_group_remove_succeeded', 'actions.remote_execution.run_host_job_katello_group_update_succeeded', 'actions.remote_execution.run_host_job_katello_host_tracer_resolve_succeeded', 'actions.remote_execution.run_host_job_katello_module_stream_action_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_package_install_succeeded', 'actions.remote_execution.run_host_job_katello_package_remove_succeeded', 'actions.remote_execution.run_host_job_katello_package_update_succeeded', 'actions.remote_execution.run_host_job_katello_packages_remove_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_packages_update_by_search_succeeded', 'actions.remote_execution.run_host_job_katello_service_restart_succeeded', 'actions.remote_execution.run_host_job_leapp_preupgrade_succeeded', 'actions.remote_execution.run_host_job_leapp_remediation_plan_succeeded', 'actions.remote_execution.run_host_job_leapp_upgrade_succeeded', 'actions.remote_execution.run_host_job_puppet_run_host_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_connector_run_playbook_succeeded', 'actions.remote_execution.run_host_job_rh_cloud_remediate_hosts_succeeded', 'actions.remote_execution.run_host_job_run_script_succeeded', 'actions.remote_execution.run_host_job_succeeded', 'actions.remote_execution.run_hosts_job_running', 'actions.remote_execution.run_hosts_job_succeeded', 'build_entered', 'build_exited', 'content_view_created', 'content_view_destroyed', 'content_view_updated', 'domain_created', 'domain_destroyed', 'domain_updated', 'host_created', 'host_destroyed', 'host_updated', 'hostgroup_created', 'hostgroup_destroyed', 'hostgroup_updated', 'model_created', 'model_destroyed', 'model_updated', 'status_changed', 'subnet_created', 'subnet_destroyed', 'subnet_updated', 'user_created', 'user_destroyed', 'user_updated'", "name": "event", "shortname": null, "value": "ENUM" diff --git a/tests/foreman/destructive/conftest.py b/tests/foreman/destructive/conftest.py index f83f6a6313e..c80a2b6d288 100644 --- a/tests/foreman/destructive/conftest.py +++ b/tests/foreman/destructive/conftest.py @@ -20,6 +20,6 @@ def test_foo(session): with session: # your ui test steps here session.architecture.create({'name': 'bar'}) - """ - return module_target_sat.ui_session(test_name, ui_user.login, ui_user.password) + with module_target_sat.ui_session(test_name, ui_user.login, ui_user.password) as session: + yield session diff --git a/tests/foreman/destructive/test_ansible.py b/tests/foreman/destructive/test_ansible.py index 34e7985ec8c..b57ba89c58f 100644 --- a/tests/foreman/destructive/test_ansible.py +++ b/tests/foreman/destructive/test_ansible.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Ansible +:CaseComponent: Ansible-ConfigurationManagement :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -pytestmark = pytest.mark.destructive +pytestmark = [pytest.mark.destructive, pytest.mark.upgrade] def test_positive_persistent_ansible_cfg_change(target_sat): @@ -26,11 +21,7 @@ def test_positive_persistent_ansible_cfg_change(target_sat): :id: c22fcd47-8627-4230-aa1f-7d4fc8517a0e - :BZ: 1786358 - - :customerscenario: true - - :Steps: + :steps: 1. Update value in ansible.cfg. 2. Verify value is updated in the file. 3. Run "satellite-installer". @@ -38,6 +29,10 @@ def test_positive_persistent_ansible_cfg_change(target_sat): :expectedresults: Changes in ansible.cfg are persistent after running "satellite-installer". + + :BZ: 1786358 + + :customerscenario: true """ ansible_cfg = '/etc/ansible/ansible.cfg' param = 'local_tmp = /tmp' @@ -53,8 +48,7 @@ def test_positive_import_all_roles(target_sat): :id: 53fe3857-a08f-493d-93c7-3fed331ed391 - :Steps: - + :steps: 1. Navigate to the Configure > Roles page. 2. Click the `Import from [hostname]` button. 3. Get total number of importable roles from pagination. diff --git a/tests/foreman/destructive/test_auth.py b/tests/foreman/destructive/test_auth.py index 30f08f09b65..fad1d3d7276 100644 --- a/tests/foreman/destructive/test_auth.py +++ b/tests/foreman/destructive/test_auth.py @@ -4,27 +4,20 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings from robottelo.constants import HAMMER_CONFIG LOGEDIN_MSG = "Session exists, currently logged in as '{0}'" -LOGEDOFF_MSG = "Using sessions, you are currently not logged in" -NOTCONF_MSG = "Credentials are not configured." password = gen_string('alpha') pytestmark = pytest.mark.destructive @@ -38,11 +31,10 @@ def test_positive_password_reset(target_sat): :expectedresults: verify the 'foreman-rake permissions:reset' command for the admin user :CaseImportance: High - """ result = target_sat.execute('foreman-rake permissions:reset') assert result.status == 0 - reset_password = result.stdout.splitlines()[0].split('password: ')[1] + reset_password = result.stdout.splitlines()[1].split('password: ')[1] result = target_sat.execute( f'''sed -i -e '/username/d;/password/d;/use_sessions/d' {HAMMER_CONFIG};\ echo ' :use_sessions: true' >> {HAMMER_CONFIG}''' @@ -52,5 +44,5 @@ def test_positive_password_reset(target_sat): {'username': settings.server.admin_username, 'password': reset_password} ) result = target_sat.cli.Auth.with_user().status() - assert LOGEDIN_MSG.format(settings.server.admin_username) in result[0]['message'] + assert LOGEDIN_MSG.format(settings.server.admin_username) in result.split("\n")[1] assert target_sat.cli.Org.with_user().list() diff --git a/tests/foreman/destructive/test_capsule.py b/tests/foreman/destructive/test_capsule.py index cc0b23b19eb..1cc63f4fbbe 100644 --- a/tests/foreman/destructive/test_capsule.py +++ b/tests/foreman/destructive/test_capsule.py @@ -1,23 +1,18 @@ """Test class for the capsule CLI. -:Requirement: Capsule +:Requirement: ForemanProxy :CaseAutomation: Automated -:CaseLevel: Component +:CaseComponent: ForemanProxy -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional +:Team: Platform :CaseImportance: Critical -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.hosts import Capsule from robottelo.utils.installer import InstallerCommand @@ -33,8 +28,6 @@ def test_positive_capsule_certs_generate_with_special_char(target_sat): :expectedresults: capsule-certs-generate works - :CaseLevel: Component - :BZ: 1908841 :customerscenario: true diff --git a/tests/foreman/destructive/test_capsule_loadbalancer.py b/tests/foreman/destructive/test_capsule_loadbalancer.py index 5d94b3190af..07ad5cffb27 100644 --- a/tests/foreman/destructive/test_capsule_loadbalancer.py +++ b/tests/foreman/destructive/test_capsule_loadbalancer.py @@ -4,57 +4,69 @@ :CaseAutomation: Automated -:CaseLevel: Integration +:CaseComponent: ForemanProxy -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional +:Team: Platform :CaseImportance: High -:Upstream: No """ import pytest +from wrapanapi import VmState +from robottelo import constants from robottelo.config import settings -from robottelo.constants import CLIENT_PORT -from robottelo.constants import DataFile +from robottelo.constants import CLIENT_PORT, DataFile from robottelo.utils.installer import InstallerCommand pytestmark = [pytest.mark.no_containers, pytest.mark.destructive] @pytest.fixture(scope='module') -def content_for_client(module_target_sat, module_org, module_lce, module_cv, module_ak): +def content_for_client(module_target_sat, module_sca_manifest_org, module_lce, module_cv): """Setup content to be used by haproxy and client :return: Activation key, client lifecycle environment(used by setup_capsules()) """ - module_target_sat.cli_factory.setup_org_for_a_custom_repo( - { - 'url': settings.repos.RHEL7_OS, - 'organization-id': module_org.id, - 'content-view-id': module_cv.id, - 'lifecycle-environment-id': module_lce.id, - 'activationkey-id': module_ak.id, - } - ) - return {'client_ak': module_ak, 'client_lce': module_lce} + rhel_ver = settings.content_host.default_rhel_version + baseos = f'rhel{rhel_ver}_bos' + appstream = f'rhel{rhel_ver}_aps' + + rh_repos = [] + for repo in [baseos, appstream]: + synced_repo_id = module_target_sat.api_factory.enable_sync_redhat_repo( + constants.REPOS[repo], module_sca_manifest_org.id + ) + repo = module_target_sat.api.Repository(id=synced_repo_id).read() + rh_repos.append(repo) + + module_cv.repository = rh_repos + module_cv.update(['repository']) + module_cv.publish() + module_cv = module_cv.read() + cvv = module_cv.version[0] + cvv.promote(data={'environment_ids': module_lce.id}) + module_cv = module_cv.read() + ak = module_target_sat.api.ActivationKey( + content_view=module_cv, + environment=module_lce, + organization=module_sca_manifest_org, + ).create() + + return {'client_ak': ak, 'client_lce': module_lce} @pytest.fixture(scope='module') def setup_capsules( module_org, - rhel7_contenthost_module, + module_rhel_contenthost, module_lb_capsule, module_target_sat, content_for_client, ): """Install capsules with loadbalancer options""" - extra_cert_var = {'foreman-proxy-cname': rhel7_contenthost_module.hostname} - extra_installer_var = {'certs-cname': rhel7_contenthost_module.hostname} + extra_cert_var = {'foreman-proxy-cname': module_rhel_contenthost.hostname} + extra_installer_var = {'certs-cname': module_rhel_contenthost.hostname} for capsule in module_lb_capsule: capsule.register_to_cdn() @@ -88,7 +100,7 @@ def setup_capsules( {'id': capsule_id, 'organization-id': module_org.id} ) - yield { + return { 'capsule_1': module_lb_capsule[0], 'capsule_2': module_lb_capsule[1], } @@ -97,20 +109,20 @@ def setup_capsules( @pytest.fixture(scope='module') def setup_haproxy( module_org, - rhel7_contenthost_module, + module_rhel_contenthost, content_for_client, module_target_sat, setup_capsules, ): """Install and configure haproxy and setup logging""" - haproxy = rhel7_contenthost_module + haproxy = module_rhel_contenthost # Using same AK for haproxy just for packages haproxy_ak = content_for_client['client_ak'] haproxy.execute('firewall-cmd --add-service RH-Satellite-6-capsule') haproxy.execute('firewall-cmd --runtime-to-permanent') haproxy.install_katello_ca(module_target_sat) haproxy.register_contenthost(module_org.label, haproxy_ak.name) - result = haproxy.execute('yum install haproxy policycoreutils-python -y') + result = haproxy.execute('yum install haproxy policycoreutils-python-utils -y') assert result.status == 0 haproxy.execute('rm -f /etc/haproxy/haproxy.cfg') haproxy.session.sftp_write( @@ -176,50 +188,48 @@ def loadbalancer_setup( @pytest.mark.e2e @pytest.mark.tier1 -def test_loadbalancer_register_client_using_ak_to_ha_proxy(loadbalancer_setup, rhel7_contenthost): - """Register the client using ak to the haproxy +@pytest.mark.rhel_ver_list([settings.content_host.default_rhel_version]) +def test_loadbalancer_install_package( + loadbalancer_setup, setup_capsules, rhel_contenthost, module_org, module_location, request +): + r"""Install packages on a content host regardless of the registered capsule being available :id: bd3c2e50-18e2-4be7-8a7f-c32472e17c61 - :Steps: + :steps: 1. run `subscription-manager register --org=Your_Organization \ - --activationkey=Your_Activation_Key \ - --serverurl=https://loadbalancer.example.com:8443/rhsm \ - --baseurl=https://loadbalancer.example.com/pulp/content` - 2. Check which capsule the host got registered. - 3. Try package installation - 4. Remove the package and unregister the host - 5. Again register, verify it's the other capsule this time. + --activationkey=Your_Activation_Key \` + 2. Try package installation + 3. Check which capsule the host got registered. + 4. Remove the package + 5. Take down the capsule that the host was registered to 6. Try package installation again :expectedresults: The client should be get the package irrespective of the capsule registration. - :CaseLevel: Integration - """ - url = f'https://{loadbalancer_setup["setup_haproxy"]["haproxy"].hostname}' - server_url = f'{url}:8443/rhsm' - base_url = f'{url}/pulp/content' + """ - result = rhel7_contenthost.download_install_rpm( - repo_url=f'{url}/pub', package_name='katello-ca-consumer-latest.noarch' - ) - assert result.status == 0 - rhel7_contenthost.register_contenthost( - org=loadbalancer_setup['module_org'].label, - activation_key=loadbalancer_setup['content_for_client']['client_ak'].name, - serverurl=server_url, - baseurl=base_url, + # Register content host + result = rhel_contenthost.register( + org=module_org, + loc=module_location, + activation_keys=loadbalancer_setup['content_for_client']['client_ak'].name, + target=setup_capsules['capsule_1'], + force=True, ) - result = rhel7_contenthost.execute('yum install -y tree') + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # Try package installation + result = rhel_contenthost.execute('yum install -y tree') assert result.status == 0 hosts = loadbalancer_setup['module_target_sat'].cli.Host.list( {'organization-id': loadbalancer_setup['module_org'].id} ) - assert rhel7_contenthost.hostname in [host['name'] for host in hosts] + assert rhel_contenthost.hostname in [host['name'] for host in hosts] - result = rhel7_contenthost.execute('rpm -qa | grep katello-ca-consumer') + result = rhel_contenthost.execute('rpm -qa | grep katello-ca-consumer') # Find which capsule the host is registered to since it's RoundRobin # The following also asserts the above result @@ -228,43 +238,21 @@ def test_loadbalancer_register_client_using_ak_to_ha_proxy(loadbalancer_setup, r if loadbalancer_setup['setup_capsules']['capsule_1'].hostname in result.stdout else loadbalancer_setup['setup_capsules']['capsule_2'] ) - # Find the other capsule - for capsule in loadbalancer_setup['setup_capsules'].values(): - if registered_to_capsule != capsule: - other_capsule = capsule - - result = rhel7_contenthost.execute('yum remove -y tree') - assert result.status == 0 - - # For other capsule - rhel7_contenthost.remove_katello_ca() - rhel7_contenthost.unregister() - - result = rhel7_contenthost.execute('rm -f katello-ca-consumer-latest.noarch.rpm') - assert result.status == 0 - - result = rhel7_contenthost.download_install_rpm( - repo_url=f'{url}/pub', package_name='katello-ca-consumer-latest.noarch' + request.addfinalizer( + lambda: registered_to_capsule.power_control(state=VmState.RUNNING, ensure=True) ) + + # Remove the packages from the client + result = rhel_contenthost.execute('yum remove -y tree') assert result.status == 0 - rhel7_contenthost.register_contenthost( - org=loadbalancer_setup['module_org'].label, - activation_key=loadbalancer_setup['content_for_client']['client_ak'].name, - serverurl=server_url, - baseurl=base_url, - ) - result = rhel7_contenthost.execute('rpm -qa | grep katello-ca-consumer') - assert other_capsule.hostname in result.stdout + # Power off the capsule that the client is registered to + registered_to_capsule.power_control(state=VmState.STOPPED, ensure=True) - result = rhel7_contenthost.execute('yum install -y tree') + # Try package installation again + result = rhel_contenthost.execute('yum install -y tree') assert result.status == 0 - hosts = loadbalancer_setup['module_target_sat'].cli.Host.list( - {'organization-id': loadbalancer_setup['module_org'].id} - ) - assert rhel7_contenthost.hostname in [host['name'] for host in hosts] - @pytest.mark.rhel_ver_match('[^6]') @pytest.mark.tier1 @@ -279,7 +267,7 @@ def test_client_register_through_lb( :id: c7e47d61-167b-4fc2-8d1a-d9a64350fdc4 - :Steps: + :steps: 1. Setup capsules, host and loadbalancer. 2. Generate curl command for host registration. 3. Register host through loadbalancer using global registration @@ -287,8 +275,6 @@ def test_client_register_through_lb( :expectedresults: Global Registration should have option to register through loadbalancer and host should get registered successfully. - :CaseLevel: Integration - :BZ: 1963266 :customerscenario: true @@ -305,7 +291,7 @@ def test_client_register_through_lb( loadbalancer_setup['setup_haproxy']['haproxy'].hostname in rhel_contenthost.subscription_config['server']['hostname'] ) - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] + assert rhel_contenthost.subscription_config['server']['port'] == CLIENT_PORT # Host registration by Second Capsule through Loadbalancer result = rhel_contenthost.register( @@ -320,7 +306,7 @@ def test_client_register_through_lb( loadbalancer_setup['setup_haproxy']['haproxy'].hostname in rhel_contenthost.subscription_config['server']['hostname'] ) - assert CLIENT_PORT == rhel_contenthost.subscription_config['server']['port'] + assert rhel_contenthost.subscription_config['server']['port'] == CLIENT_PORT hosts = loadbalancer_setup['module_target_sat'].cli.Host.list( {'organization-id': loadbalancer_setup['module_org'].id} diff --git a/tests/foreman/destructive/test_capsulecontent.py b/tests/foreman/destructive/test_capsulecontent.py new file mode 100644 index 00000000000..12f3455c08d --- /dev/null +++ b/tests/foreman/destructive/test_capsulecontent.py @@ -0,0 +1,179 @@ +"""Capsule-Content related tests, which require destructive Satellite + +:Requirement: Capsule-Content + +:CaseAutomation: Automated + +:CaseComponent: Capsule-Content + +:team: Phoenix-content + +:CaseImportance: High + +""" +from box import Box +from fauxfactory import gen_alpha +import pytest + +from robottelo import constants + +pytestmark = [pytest.mark.destructive] + + +@pytest.mark.tier4 +@pytest.mark.skip_if_not_set('capsule') +def test_positive_sync_without_deadlock( + target_sat, large_capsule_configured, function_entitlement_manifest_org +): + """Synchronize one bigger repo published in multiple CVs to a blank Capsule. + Assert that the sync task succeeds and no deadlock happens. + + :id: 91c6eec9-a582-46ea-9898-bdcaebcea2f0 + + :setup: + 1. A blank external capsule that has not been synced yet (!) with immediate download + policy and running multiple (4 and more) pulpcore workers. + + :steps: + 1. Sync one bigger repository to the Satellite. + 2. Create a Content View, add the repository and publish it. + 3. Create several copies of the CV and publish them. + 4. Add the Library LCE to the Capsule. + 5. Sync the Capsule. + + :expectedresults: + 1. Sync passes without deadlock. + + :customerscenario: true + + :BZ: 2062526 + """ + # Note: As of now BZ#2122872 prevents us to use the originally intended RHEL7 repo because + # of a memory leak causing Satellite OOM crash in this scenario. Therefore, for now we use + # smaller RHSCL repo instead, which was also capable to hit the deadlock issue, regardless + # the lower rpms count. When the BZ is fixed, reconsider upscale to RHEL7 repo or similar. + repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=function_entitlement_manifest_org.id, + product=constants.REPOS['rhscl7']['product'], + repo=constants.REPOS['rhscl7']['name'], + reposet=constants.REPOSET['rhscl7'], + releasever=constants.REPOS['rhscl7']['releasever'], + ) + repo = target_sat.api.Repository(id=repo_id).read() + repo.sync(timeout='60m') + + cv = target_sat.publish_content_view(function_entitlement_manifest_org, repo) + + for _ in range(4): + copy_id = target_sat.api.ContentView(id=cv.id).copy(data={'name': gen_alpha()})['id'] + copy_cv = target_sat.api.ContentView(id=copy_id).read() + copy_cv.publish() + + proxy = large_capsule_configured.nailgun_smart_proxy.read() + proxy.download_policy = 'immediate' + proxy.update(['download_policy']) + + nailgun_capsule = large_capsule_configured.nailgun_capsule + lce = target_sat.api.LifecycleEnvironment( + organization=function_entitlement_manifest_org + ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] + nailgun_capsule.content_add_lifecycle_environment(data={'environment_id': lce.id}) + result = nailgun_capsule.content_lifecycle_environments() + assert len(result['results']) == 1 + assert result['results'][0]['id'] == lce.id + sync_status = nailgun_capsule.content_sync(timeout='90m') + assert sync_status['result'] == 'success', 'Capsule sync task failed.' + + +@pytest.mark.tier4 +@pytest.mark.skip_if_not_set('capsule') +def test_positive_sync_without_deadlock_after_rpm_trim_changelog( + target_sat, capsule_configured, function_entitlement_manifest_org +): + """Promote a CV published with larger repos into multiple LCEs, assign LCEs to blank Capsule. + Assert that the sync task succeeds and no deadlock happens. + + :id: 91c6eec9-a582-46ea-9898-bdcaebcea2f1 + + :setup: + 1. Blank external capsule which is not synced yet, running with 4 & more pulpcore-workers + + :steps: + 1. Sync a few large repositories to the Satellite. + 2. Create a Content View, add the repository and publish it. + 3. Promote it to 10+ LCEs and assign them to Capsule + 4. Sync the Capsule + 5. pulpcore-manager rpm-trim-changelogs --changelog-limit x + 6. Sync Capsule again and verify no deadlocks occurs. + 7. Repeat step 5 with a lesser changelog limit and step 6 again + + :expectedresults: + 1. Sync passes without deadlock. + + :customerscenario: true + + :BZ: 2170535, 2218661 + """ + org = function_entitlement_manifest_org + rh_repos = [] + tasks = [] + LCE_COUNT = 10 + for name in ['rhel8_bos', 'rhel8_aps']: + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=org.id, + product=constants.REPOS[name]['product'], + repo=constants.REPOS[name]['name'], + reposet=constants.REPOS[name]['reposet'], + releasever=constants.REPOS[name]['releasever'], + ) + # Sync step because repo is not synced by default + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() + task = rh_repo.sync(synchronous=False) + tasks.append(task) + rh_repos.append(rh_repo) + for task in tasks: + target_sat.wait_for_tasks( + search_query=(f'id = {task["id"]}'), + poll_timeout=2500, + ) + task_status = target_sat.api.ForemanTask(id=task['id']).poll() + assert task_status['result'] == 'success' + + cv = target_sat.publish_content_view(org, rh_repos) + cvv = cv.version[0].read() + nailgun_capsule = capsule_configured.nailgun_capsule + + for i in range(LCE_COUNT): + lce = target_sat.api.LifecycleEnvironment(name=f'lce{i}', organization=org).create() + cvv.promote(data={'environment_ids': lce.id, 'force': False}) + nailgun_capsule.content_add_lifecycle_environment(data={'environment_id': lce.id}) + + result = Box(nailgun_capsule.content_lifecycle_environments()) + assert len(result.results) == LCE_COUNT + # Verify all LCE have a CV promoted + assert [env.content_views[0].name for env in result.results].count(cv.name) == LCE_COUNT + + sync_status = nailgun_capsule.content_sync(timeout='60m') + assert sync_status['result'] == 'success', 'Capsule sync task failed' + + # Run rpm-trim-changelogs with changelog-limit below 10 then again sync capsule + # and repeat again with changelog-limit below it to make sure deadlock + pulp_cmd = ( + 'sudo -u pulp PULP_SETTINGS="/etc/pulp/settings.py" ' + '/usr/bin/pulpcore-manager rpm-trim-changelogs --changelog-limit ' + ) + for limit in range(9, 0, -1): + result = target_sat.execute(f'{pulp_cmd}{limit}') + assert f'Trimmed changelogs for {cvv.package_count} packages' in result.stdout + target_sat.cli.ContentView.version_republish_repositories({'id': cvv.id, 'force': 'true'}) + + sync_status = nailgun_capsule.content_sync(timeout='60m') + assert sync_status['result'] == 'success', 'Capsule sync task failed' + + # Verify no deadlock detected in the postgres logs and pulp/syslogs + day = target_sat.execute('date').stdout[:3] + check_log_files = [f'/var/lib/pgsql/data/log/postgresql-{day}.log', '/var/log/messages'] + for file in check_log_files: + assert capsule_configured.execute(f'grep -i "deadlock detected" {file}').status diff --git a/tests/foreman/destructive/test_clone.py b/tests/foreman/destructive/test_clone.py index 861ae2dc311..f5f75dcd1a1 100644 --- a/tests/foreman/destructive/test_clone.py +++ b/tests/foreman/destructive/test_clone.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: SatelliteClone :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -27,10 +22,11 @@ @pytest.mark.e2e -@pytest.mark.parametrize("sat_ready_rhel", [8], indirect=True) @pytest.mark.parametrize('backup_type', ['online', 'offline']) @pytest.mark.parametrize('skip_pulp', [False, True], ids=['include_pulp', 'skip_pulp']) -def test_positive_clone_backup(target_sat, sat_ready_rhel, backup_type, skip_pulp): +def test_positive_clone_backup( + target_sat, sat_ready_rhel, backup_type, skip_pulp, custom_synced_repo +): """Make an online/offline backup with/without pulp data of Satellite and clone it (restore it). :id: 5b9182d5-6789-4d2c-bcc3-6641b96ab277 @@ -46,13 +42,15 @@ def test_positive_clone_backup(target_sat, sat_ready_rhel, backup_type, skip_pul :parametrized: yes - :BZ: 2142514 + :BZ: 2142514, 2013776 :customerscenario: true """ rhel_version = sat_ready_rhel._v_major sat_version = target_sat.version + pulp_artifact_len = len(target_sat.execute('ls /var/lib/pulp/media/artifact').stdout) + # SATELLITE PART - SOURCE SERVER # Enabling and starting services assert target_sat.cli.Service.enable().status == 0 @@ -67,16 +65,6 @@ def test_positive_clone_backup(target_sat, sat_ready_rhel, backup_type, skip_pul assert backup_result.status == 0 sat_backup_dir = backup_result.stdout.strip().split()[-2] - if skip_pulp: - # Copying satellite pulp data to target RHEL - assert sat_ready_rhel.execute('mkdir -p /var/lib/pulp').status == 0 - assert ( - target_sat.execute( - f'''sshpass -p "{SSH_PASS}" scp -o StrictHostKeyChecking=no \ - -r /var/lib/pulp root@{sat_ready_rhel.hostname}:/var/lib/pulp/pulp''' - ).status - == 0 - ) # Copying satellite backup to target RHEL assert ( target_sat.execute( @@ -95,7 +83,9 @@ def test_positive_clone_backup(target_sat, sat_ready_rhel, backup_type, skip_pul # Disabling repositories assert sat_ready_rhel.execute('subscription-manager repos --disable=*').status == 0 # Getting satellite maintenace repo - sat_ready_rhel.download_repofile(product='satellite', release=sat_version) + sat_ready_rhel.download_repofile( + product='satellite', release=sat_version, snap=settings.server.version.snap + ) # Enabling repositories for repo in getattr(constants, f"OHSNAP_RHEL{rhel_version}_REPOS"): sat_ready_rhel.enable_repo(repo, force=True) @@ -117,6 +107,22 @@ def test_positive_clone_backup(target_sat, sat_ready_rhel, backup_type, skip_pul cloned_sat = Satellite(sat_ready_rhel.hostname) assert cloned_sat.cli.Health.check().status == 0 + # If --skip-pulp-data make sure you can rsync /var/lib/pulp over per BZ#2013776 + if skip_pulp: + # Copying satellite pulp data to target RHEL + assert ( + target_sat.execute( + f'sshpass -p "{SSH_PASS}" rsync -e "ssh -o StrictHostKeyChecking=no" --archive --partial --progress --compress ' + f'/var/lib/pulp root@{sat_ready_rhel.hostname}:/var/lib/' + ).status + == 0 + ) + + # Make sure all of the pulp data that was on the original Satellite is on the clone + assert ( + len(sat_ready_rhel.execute('ls /var/lib/pulp/media/artifact').stdout) == pulp_artifact_len + ) + @pytest.mark.pit_server def test_positive_list_tasks(target_sat): diff --git a/tests/foreman/destructive/test_contenthost.py b/tests/foreman/destructive/test_contenthost.py index 451e8ca6b35..d300ef9ab0e 100644 --- a/tests/foreman/destructive/test_contenthost.py +++ b/tests/foreman/destructive/test_contenthost.py @@ -4,23 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hosts-Content :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest from robottelo.config import settings -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE +from robottelo.constants import FAKE_0_CUSTOM_PACKAGE, FAKE_1_CUSTOM_PACKAGE pytestmark = pytest.mark.destructive @@ -34,8 +28,6 @@ def test_content_access_after_stopped_foreman(target_sat, rhel7_contenthost): :expectedresults: Package should get installed even after foreman service is stopped - :CaseLevel: System - :CaseImportance: Medium :CaseComponent: Infrastructure @@ -46,13 +38,6 @@ def test_content_access_after_stopped_foreman(target_sat, rhel7_contenthost): """ org = target_sat.api.Organization().create() org.sca_disable() - # adding remote_execution_connect_by_ip=Yes at org level - target_sat.api.Parameter( - name='remote_execution_connect_by_ip', - value='Yes', - parameter_type='boolean', - organization=org.id, - ).create() lce = target_sat.api.LifecycleEnvironment(organization=org).create() repos_collection = target_sat.cli_factory.RepositoryCollection( distro='rhel7', @@ -60,7 +45,7 @@ def test_content_access_after_stopped_foreman(target_sat, rhel7_contenthost): target_sat.cli_factory.YumRepository(url=settings.repos.yum_1.url), ], ) - repos_collection.setup_content(org.id, lce.id, upload_manifest=False) + repos_collection.setup_content(org.id, lce.id, upload_manifest=False, override=True) repos_collection.setup_virtual_machine(rhel7_contenthost, install_katello_agent=False) result = rhel7_contenthost.execute(f'yum -y install {FAKE_1_CUSTOM_PACKAGE}') assert result.status == 0 diff --git a/tests/foreman/destructive/test_contentview.py b/tests/foreman/destructive/test_contentview.py index 99b1ea2a54b..03af433a939 100644 --- a/tests/foreman/destructive/test_contentview.py +++ b/tests/foreman/destructive/test_contentview.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from nailgun.entity_mixins import TaskFailedError +import pytest from robottelo import constants diff --git a/tests/foreman/destructive/test_discoveredhost.py b/tests/foreman/destructive/test_discoveredhost.py index ace6c579106..8034957ec72 100644 --- a/tests/foreman/destructive/test_discoveredhost.py +++ b/tests/foreman/destructive/test_discoveredhost.py @@ -8,19 +8,13 @@ :CaseAutomation: Automated -:CaseLevel: System - -:TestType: Functional - -:Upstream: No """ -import re from copy import copy +import re -import pytest from nailgun import entity_mixins -from wait_for import TimedOutError -from wait_for import wait_for +import pytest +from wait_for import TimedOutError, wait_for from robottelo.logging import logger @@ -40,7 +34,7 @@ def _read_log(ch, pattern): def _wait_for_log(channel, pattern, timeout=5, delay=0.2): """_read_log method enclosed in wait_for method""" - matching_log = wait_for( + return wait_for( _read_log, func_args=( channel, @@ -51,7 +45,6 @@ def _wait_for_log(channel, pattern, timeout=5, delay=0.2): delay=delay, logger=logger, ) - return matching_log def _assert_discovered_host(host, channel=None, user_config=None, sat=None): @@ -71,9 +64,9 @@ def _assert_discovered_host(host, channel=None, user_config=None, sat=None): ]: try: dhcp_pxe = _wait_for_log(channel, pattern[0], timeout=10) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') + raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') from err groups = re.search('DHCPACK on (\\d.+) to', dhcp_pxe.out) assert len(groups.groups()) == 1, 'Unable to parse bootloader ip address' @@ -88,9 +81,9 @@ def _assert_discovered_host(host, channel=None, user_config=None, sat=None): ]: try: _wait_for_log(channel, pattern[0], timeout=20) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for VM (tftp) to fetch {pattern[1]}') + raise AssertionError(f'Timed out waiting for VM (tftp) to fetch {pattern[1]}') from err # assert that server receives DHCP discover from FDI for pattern in [ @@ -102,9 +95,9 @@ def _assert_discovered_host(host, channel=None, user_config=None, sat=None): ]: try: dhcp_fdi = _wait_for_log(channel, pattern[0], timeout=30) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') + raise AssertionError(f'Timed out waiting for {pattern[1]} from VM') from err groups = re.search('DHCPACK on (\\d.+) to', dhcp_fdi.out) assert len(groups.groups()) == 1, 'Unable to parse FDI ip address' fdi_ip = groups.groups()[0] @@ -117,18 +110,18 @@ def _assert_discovered_host(host, channel=None, user_config=None, sat=None): f'"/api/v2/discovered_hosts/facts" for {fdi_ip}', timeout=60, ) - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError('Timed out waiting for /facts POST request') + raise AssertionError('Timed out waiting for /facts POST request') from err groups = re.search('\\[I\\|app\\|([a-z0-9]+)\\]', facts_fdi.out) assert len(groups.groups()) == 1, 'Unable to parse POST request UUID' req_id = groups.groups()[0] try: _wait_for_log(channel, f'\\[I\\|app\\|{req_id}\\] Completed 201 Created') - except TimedOutError: + except TimedOutError as err: # raise assertion error - raise AssertionError('Timed out waiting for "/facts" 201 response') + raise AssertionError('Timed out waiting for "/facts" 201 response') from err default_config = entity_mixins.DEFAULT_SERVER_CONFIG @@ -144,8 +137,10 @@ def _assert_discovered_host(host, channel=None, user_config=None, sat=None): delay=2, logger=logger, ) - except TimedOutError: - raise AssertionError('Timed out waiting for discovered_host to appear on satellite') + except TimedOutError as err: + raise AssertionError( + 'Timed out waiting for discovered_host to appear on satellite' + ) from err discovered_host = sat.api.DiscoveredHost(user_config or default_config).search( query={'search': f'name={host.guest_name}'} ) @@ -211,7 +206,7 @@ def test_positive_provision_pxe_host_dhcp_change(): :Setup: Provisioning should be configured and a host should be discovered - :Steps: + :steps: 1. Set some dhcp range in dhcpd.conf in satellite. 2. Create subnet entity in satellite with a range different from whats defined in `dhcpd.conf`. diff --git a/tests/foreman/destructive/test_foreman_rake.py b/tests/foreman/destructive/test_foreman_rake.py index f655fc3787b..5162fa94be2 100644 --- a/tests/foreman/destructive/test_foreman_rake.py +++ b/tests/foreman/destructive/test_foreman_rake.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: System - -:TestType: Functional - :CaseImportance: Medium :CaseComponent: TasksPlugin :Team: Endeavour -:Upstream: No """ import pytest @@ -28,7 +23,7 @@ def test_positive_katello_reimport(target_sat): :id: b4119265-1bf0-4b0b-8b96-43f68af39708 - :Steps: Have satellite up and run 'foreman-rake katello:reimport' + :steps: Have satellite up and run 'foreman-rake katello:reimport' :expectedresults: Successfully reimport without errors diff --git a/tests/foreman/destructive/test_foreman_service.py b/tests/foreman/destructive/test_foreman_service.py index 7f481df3114..6d0d24de110 100644 --- a/tests/foreman/destructive/test_foreman_service.py +++ b/tests/foreman/destructive/test_foreman_service.py @@ -4,13 +4,8 @@ :CaseAutomation: Automated -:CaseLevel: System - -:TestType: Functional - :CaseImportance: Medium -:Upstream: No """ import pytest @@ -29,7 +24,7 @@ def test_positive_foreman_service_auto_restart(foreman_service_teardown): :id: 766560b8-30bb-11eb-8dae-d46d6dd3b5b2 - :Steps: + :steps: 1. Stop the Foreman Service 2. Make any API call to Satellite diff --git a/tests/foreman/destructive/test_host.py b/tests/foreman/destructive/test_host.py index c0fe45f1b98..bab00e5acab 100644 --- a/tests/foreman/destructive/test_host.py +++ b/tests/foreman/destructive/test_host.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from airgun.exceptions import NoSuchElementException +import pytest from robottelo.constants import ANY_CONTEXT @@ -41,8 +36,6 @@ def test_positive_cockpit(self, cockpit_host, class_cockpit_sat, class_org): :BZ: 1876220 - :CaseLevel: System - :steps: 1. kill the cockpit service. 2. go to web console and verify if getting 503 error. diff --git a/tests/foreman/destructive/test_infoblox.py b/tests/foreman/destructive/test_infoblox.py index 27221f861b9..52e26830b14 100644 --- a/tests/foreman/destructive/test_infoblox.py +++ b/tests/foreman/destructive/test_infoblox.py @@ -2,20 +2,19 @@ :Requirement: Infoblox, Installer -:CaseLevel: System - :CaseComponent: DHCPDNS :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_mac, gen_string import pytest +import requests +from requests.exceptions import HTTPError +from robottelo.config import settings from robottelo.utils.installer import InstallerCommand pytestmark = pytest.mark.destructive @@ -50,10 +49,44 @@ ), ] +infoblox_plugin_enable = [ + 'enable-foreman-proxy-plugin-dhcp-infoblox', + 'enable-foreman-proxy-plugin-dns-infoblox', +] + +infoblox_plugin_disable = [ + 'no-enable-foreman-proxy-plugin-dhcp-infoblox', + 'no-enable-foreman-proxy-plugin-dns-infoblox', + 'reset-foreman-proxy-dhcp-provider', + 'reset-foreman-proxy-dns-provider', +] +infoblox_plugin_disable_opts = { + 'foreman-proxy-dhcp': 'false', + 'foreman-proxy-dns': 'false', +} + +infoblox_plugin_opts = { + 'foreman-proxy-dhcp': 'true', + 'foreman-proxy-dhcp-managed': 'false', + 'foreman-proxy-dhcp-provider': 'infoblox', + 'foreman-proxy-dhcp-server': settings.infoblox.hostname, + 'foreman-proxy-plugin-dhcp-infoblox-dns-view': 'default', + 'foreman-proxy-plugin-dhcp-infoblox-network-view': 'default', + 'foreman-proxy-plugin-dhcp-infoblox-username': settings.infoblox.username, + 'foreman-proxy-plugin-dhcp-infoblox-password': settings.infoblox.password, + 'foreman-proxy-plugin-dhcp-infoblox-record-type': 'fixedaddress', + 'foreman-proxy-dns': 'true', + 'foreman-proxy-dns-provider': 'infoblox', + 'foreman-proxy-plugin-dns-infoblox-username': settings.infoblox.username, + 'foreman-proxy-plugin-dns-infoblox-password': settings.infoblox.password, + 'foreman-proxy-plugin-dns-infoblox-dns-server': settings.infoblox.hostname, + 'foreman-proxy-plugin-dns-infoblox-dns-view': 'default', +} + @pytest.mark.tier4 @pytest.mark.parametrize( - 'command_args,command_opts,rpm_command', + ('command_args', 'command_opts', 'rpm_command'), params, ids=['isc_dhcp', 'infoblox_dhcp', 'infoblox_dns'], ) @@ -62,7 +95,7 @@ def test_plugin_installation(target_sat, command_args, command_opts, rpm_command :id: c75aa5f3-870a-4f4a-9d7a-0a871b47fd6f - :Steps: Run installer with mininum options required to install plugins + :steps: Run installer with mininum options required to install plugins :expectedresults: Plugins install successfully @@ -76,3 +109,147 @@ def test_plugin_installation(target_sat, command_args, command_opts, rpm_command installer = target_sat.install(InstallerCommand(command_args, **command_opts)) assert 'Success!' in installer.stdout assert target_sat.execute(rpm_command).status == 0 + + +@pytest.mark.e2e +@pytest.mark.parametrize('module_sync_kickstart_content', [8], indirect=True) +def test_infoblox_end_to_end( + request, + module_sync_kickstart_content, + module_provisioning_capsule, + module_target_sat, + module_sca_manifest_org, + module_location, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, +): + """Verify end to end infoblox plugin integration and host creation with + Infoblox as DHCP and DNS provider + + :id: 0aebdf39-84ba-4182-9a17-6eb523f1695f + + :steps: + 1. Run installer to integrate Satellite and Infoblox. + 2. Assert if DHCP, DNS provider and server is properly set. + 3. Create a host with proper domain and subnet. + 4. Check if A record is created with host ip and hostname on Infoblox + 5. Delete the host, domain, subnet + + :expectedresults: Satellite and Infoblox are integrated properly with DHCP, DNS + provider and server.Also, A record is created for the host created. + + :BZ: 1813953, 2210256 + + :customerscenario: true + """ + enable_infoblox_plugin = InstallerCommand( + installer_args=infoblox_plugin_enable, + installer_opts=infoblox_plugin_opts, + ) + result = module_target_sat.install(enable_infoblox_plugin) + assert result.status == 0 + assert 'Success!' in result.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep enable-foreman-proxy-plugin-dhcp-infoblox') + ) + assert 'default: true' in installer.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep enable-foreman-proxy-plugin-dns-infoblox') + ) + assert 'default: true' in installer.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep foreman-proxy-dhcp-provider') + ) + assert 'current: "infoblox"' in installer.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep foreman-proxy-dns-provider') + ) + assert 'current: "infoblox"' in installer.stdout + + installer = module_target_sat.install(InstallerCommand(help='| grep foreman-proxy-dhcp-server')) + assert f'current: "{settings.infoblox.hostname}"' in installer.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep foreman-proxy-plugin-dns-infoblox-dns-server') + ) + assert f'current: "{settings.infoblox.hostname}"' in installer.stdout + + macaddress = gen_mac(multicast=False) + # using the domain name as defined in Infoblox DNS + domain = module_target_sat.api.Domain( + name=settings.infoblox.domain, + location=[module_location], + dns=module_provisioning_capsule.id, + organization=[module_sca_manifest_org], + ).create() + subnet = module_target_sat.api.Subnet( + location=[module_location], + organization=[module_sca_manifest_org], + network=settings.infoblox.network, + cidr=settings.infoblox.network_prefix, + mask=settings.infoblox.netmask, + from_=settings.infoblox.start_range, + to=settings.infoblox.end_range, + boot_mode='DHCP', + ipam='DHCP', + dhcp=module_provisioning_capsule.id, + tftp=module_provisioning_capsule.id, + dns=module_provisioning_capsule.id, + discovery=module_provisioning_capsule.id, + remote_execution_proxy=[module_provisioning_capsule.id], + domain=[domain.id], + ).create() + host = module_target_sat.api.Host( + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=macaddress, + operatingsystem=module_sync_kickstart_content.os, + architecture=default_architecture, + domain=domain, + subnet=subnet, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + content_facet_attributes={ + 'content_source_id': module_provisioning_capsule.id, + 'content_view_id': module_default_org_view.id, + 'lifecycle_environment_id': module_lce_library.id, + }, + ).create() + # check if A Record is created for the host IP on Infoblox + url = f'https://{settings.infoblox.hostname}/wapi/v2.0/ipv4address?ip_address={host.ip}' + auth = (settings.infoblox.username, settings.infoblox.password) + result = requests.get(url, auth=auth, verify=False) + assert result.status_code == 200 + # check hostname and ip is present in A record + assert host.name in result.text + assert host.ip in result.text + # delete host + module_target_sat.api.Subnet(id=subnet.id, domain=[]).update() + module_target_sat.api.Host(id=host.id).delete() + module_target_sat.api.Subnet(id=subnet.id).delete() + module_target_sat.api.Domain(id=domain.id).delete() + with pytest.raises(HTTPError): + host.read() + # disable dhcp and dns plugin + disable_infoblox_plugin = InstallerCommand( + installer_args=infoblox_plugin_disable, installer_opts=infoblox_plugin_disable_opts + ) + result = module_target_sat.install(disable_infoblox_plugin) + assert result.status == 0 + assert 'Success!' in result.stdout + installer = module_target_sat.install( + InstallerCommand(help='| grep enable-foreman-proxy-plugin-dhcp-infoblox') + ) + assert 'default: false' in installer.stdout + + installer = module_target_sat.install( + InstallerCommand(help='| grep enable-foreman-proxy-plugin-dns-infoblox') + ) + assert 'default: false' in installer.stdout diff --git a/tests/foreman/destructive/test_installer.py b/tests/foreman/destructive/test_installer.py index 2bade1108a3..36776213c13 100644 --- a/tests/foreman/destructive/test_installer.py +++ b/tests/foreman/destructive/test_installer.py @@ -1,26 +1,22 @@ """Smoke tests to check installation health -:Requirement: Installer +:Requirement: Installation :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Installer +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ +import random + +from fauxfactory import gen_domain, gen_string import pytest -from fauxfactory import gen_domain -from fauxfactory import gen_string -from robottelo.config import settings +from robottelo.constants import SATELLITE_ANSWER_FILE from robottelo.utils.installer import InstallerCommand pytestmark = pytest.mark.destructive @@ -56,8 +52,6 @@ def test_installer_sat_pub_directory_accessibility(target_sat): :CaseImportance: High - :CaseLevel: System - :BZ: 1960801 :customerscenario: true @@ -70,7 +64,10 @@ def test_installer_sat_pub_directory_accessibility(target_sat): https_curl_command = f'curl -i {target_sat.url}/pub/ -k' for command in [http_curl_command, https_curl_command]: accessibility_check = target_sat.execute(command) - assert 'HTTP/1.1 200 OK' or 'HTTP/2 200 ' in accessibility_check.stdout.split('\r\n') + assert ( + 'HTTP/1.1 200 OK' in accessibility_check.stdout + or 'HTTP/2 200' in accessibility_check.stdout + ) target_sat.get( local_path='custom-hiera-satellite.yaml', remote_path=f'{custom_hiera_location}', @@ -80,7 +77,8 @@ def test_installer_sat_pub_directory_accessibility(target_sat): assert 'Success!' in command_output.stdout for command in [http_curl_command, https_curl_command]: accessibility_check = target_sat.execute(command) - assert 'HTTP/1.1 200 OK' or 'HTTP/2 200 ' not in accessibility_check.stdout.split('\r\n') + assert 'HTTP/1.1 200 OK' not in accessibility_check.stdout + assert 'HTTP/2 200' not in accessibility_check.stdout target_sat.put( local_path='custom-hiera-satellite.yaml', remote_path=f'{custom_hiera_location}', @@ -89,46 +87,6 @@ def test_installer_sat_pub_directory_accessibility(target_sat): assert 'Success!' in command_output.stdout -def test_installer_inventory_plugin_update(target_sat): - """DB consistency should not break after enabling the inventory plugin flags - - :id: a2b66d38-e819-428f-9529-23bed398c916 - - :steps: - 1. Enable the cloud inventory plugin flag - - :expectedresults: inventory flag should be updated successfully without any db consistency - error. - - :CaseImportance: High - - :CaseLevel: System - - :BZ: 1863597 - - :customerscenario: true - - """ - target_sat.create_custom_repos(rhel7=settings.repos.rhel7_os) - installer_cmd = target_sat.install( - InstallerCommand( - 'enable-foreman-plugin-rh-cloud', - foreman_proxy_plugin_remote_execution_script_install_key=['true'], - ) - ) - assert 'Success!' in installer_cmd.stdout - verify_rhcloud_flag = target_sat.install( - InstallerCommand(help='|grep "\'foreman_plugin_rh_cloud\' puppet module (default: true)"') - ) - assert 'true' in verify_rhcloud_flag.stdout - verify_proxy_plugin_flag = target_sat.install( - InstallerCommand( - **{'full-help': '| grep -A1 foreman-proxy-plugin-remote-execution-script-install-key'} - ) - ) - assert '(current: true)' in verify_proxy_plugin_flag.stdout - - def test_positive_mismatched_satellite_fqdn(target_sat, set_random_fqdn): """The satellite-installer should display the mismatched FQDN @@ -155,3 +113,114 @@ def test_positive_mismatched_satellite_fqdn(target_sat, set_random_fqdn): ) installer_command_output = target_sat.execute('satellite-installer').stderr assert warning_message in str(installer_command_output) + + +def test_positive_installer_certs_regenerate(target_sat): + """Ensure "satellite-installer --certs-regenerate true" command correctly generates + /etc/tomcat/cert-users.properties after editing answers file + + :id: db6152c3-4459-425b-998d-4a7992ca1f72 + + :steps: + 1. Update /etc/foreman-installer/scenarios.d/satellite-answers.yaml + 2. Fill some empty strings in certs category for 'state' + 3. Run satellite-installer --certs-regenerate true + 4. hammer ping + + :expectedresults: Correct generation of /etc/tomcat/cert-users.properties + + :BZ: 1964037 + + :customerscenario: true + """ + target_sat.execute(f'sed -i "s/state: North Carolina/state: \'\'/g" {SATELLITE_ANSWER_FILE}') + result = target_sat.execute(f'grep state: {SATELLITE_ANSWER_FILE}') + assert "state: ''" in result.stdout + result = target_sat.install( + InstallerCommand( + 'certs-update-all', + 'certs-update-server', + 'certs-update-server-ca', + certs_regenerate=['true'], + ) + ) + assert result.status == 0 + assert 'FAIL' not in target_sat.cli.Base.ping() + + +def test_positive_installer_puma_worker_count(target_sat): + """Installer should set the puma worker count and thread max without having to manually + restart the foreman service. + + :id: d0e7d958-dd3e-4962-bf5a-8d7ec36f3485 + + :steps: + 1. Check how many puma workers there are + 2. Select a new worker count that is less than the default + 2. Change answer's file to have new count for puma workers + 3. Run satellite-installer --foreman-foreman-service-puma-workers new_count --foreman-foreman-service-puma-threads-max new_count + + :expectedresults: aux should show there are only new_count puma workers after installer runs + + :BZ: 2025760 + + :customerscenario: true + """ + count = int(target_sat.execute('pgrep --full "puma: cluster worker" | wc -l').stdout) + worker_count = str(random.randint(1, count - 1)) + result = target_sat.install( + InstallerCommand( + foreman_foreman_service_puma_workers=worker_count, + foreman_foreman_service_puma_threads_max=worker_count, + ) + ) + assert result.status == 0 + result = target_sat.execute(f'grep "foreman_service_puma_workers" {SATELLITE_ANSWER_FILE}') + assert worker_count in result.stdout + result = target_sat.execute('ps aux | grep -v grep | grep -e USER -e puma') + for i in range(count): + if i < int(worker_count): + assert f'cluster worker {i}' in result.stdout + else: + assert f'cluster worker {i}' not in result.stdout + + +def test_negative_handle_invalid_certificate(cert_setup_destructive_teardown): + """Satellite installer should not do any harmful changes to existing satellite after attempt + to use invalid certificates. + + :id: 97b72faf-4684-4d8c-ae0e-1ebd5085620b + + :steps: + 1. Launch satellite installer and attempt to use invalid certificates + + :expectedresults: Satellite installer should fail and Satellite should be running + + :BZ: 2221621, 2238363 + + :customerscenario: true + + """ + cert_data, satellite = cert_setup_destructive_teardown + + # check if satellite is running + result = satellite.execute('hammer ping') + assert result.status == 0, f'Hammer Ping failed:\n{result.stderr}' + + # attempt to use invalid certificates + result = satellite.install( + InstallerCommand( + 'certs-update-server', + 'certs-update-server-ca', + scenario='satellite', + certs_server_cert='/root/certs/invalid.crt', + certs_server_key='/root/certs/invalid.key', + certs_server_ca_cert=f'"/root/{cert_data["ca_bundle_file_name"]}"', + ) + ) + # installer should fail with non-zero value + assert result.status != 0 + assert "verification failed" in result.stdout + + result = satellite.execute('hammer ping') + assert result.status == 0, f'Hammer Ping failed:\n{result.stderr}' diff --git a/tests/foreman/destructive/test_katello_agent.py b/tests/foreman/destructive/test_katello_agent.py index f1af4dab41c..86943017ee7 100644 --- a/tests/foreman/destructive/test_katello_agent.py +++ b/tests/foreman/destructive/test_katello_agent.py @@ -24,6 +24,7 @@ pytestmark = [ pytest.mark.run_in_one_thread, pytest.mark.destructive, + pytest.mark.no_containers, pytest.mark.tier5, pytest.mark.upgrade, ] @@ -39,7 +40,6 @@ def test_positive_apply_errata(katello_agent_client): :parametrized: yes - :CaseLevel: System """ sat = katello_agent_client['sat'] client = katello_agent_client['client'] @@ -67,7 +67,6 @@ def test_positive_install_and_remove_package(katello_agent_client): :parametrized: yes - :CaseLevel: System """ sat = katello_agent_client['sat'] client = katello_agent_client['client'] @@ -92,7 +91,6 @@ def test_positive_upgrade_package(katello_agent_client): :parametrized: yes - :CaseLevel: System """ sat = katello_agent_client['sat'] client = katello_agent_client['client'] @@ -114,7 +112,6 @@ def test_positive_upgrade_packages_all(katello_agent_client): :parametrized: yes - :CaseLevel: System """ sat = katello_agent_client['sat'] client = katello_agent_client['client'] @@ -134,7 +131,6 @@ def test_positive_install_and_remove_package_group(katello_agent_client): :parametrized: yes - :CaseLevel: System """ sat = katello_agent_client['sat'] client = katello_agent_client['client'] @@ -146,3 +142,49 @@ def test_positive_install_and_remove_package_group(katello_agent_client): sat.cli.Host.package_group_remove(hammer_args) for package in constants.FAKE_0_CUSTOM_PACKAGE_GROUP: assert client.run(f'rpm -q {package}').status != 0 + + +def test_positive_upgrade_warning(sat_with_katello_agent): + """Ensure a warning is dispayed when upgrading with katello-agent enabled. + + :id: 2bfc5e3e-e147-4e7a-a25c-85be22ef6921 + + :expectedresults: Upgrade check fails and warning with proper message is displayed. + + """ + sat = sat_with_katello_agent + ver = sat.version.split('.') + target_ver = f'{ver[0]}.{int(ver[1]) + 1}' + warning = ( + 'The katello-agent feature is enabled on this system. As of Satellite 6.15, katello-agent ' + 'is removed and will no longer function. Before proceeding with the upgrade, you should ' + 'ensure that you have deployed and configured an alternative tool for remote package ' + 'management and patching for content hosts, such as Remote Execution (REX) with pull-based ' + 'transport. See the Managing Hosts guide in the Satellite documentation for more info. ' + 'Disable katello-agent with the command `satellite-installer --foreman-proxy-content-enable' + '-katello-agent false` before proceeding with the upgrade. Alternatively, you may skip ' + 'this check and proceed by running satellite-maintain again with the `--whitelist` option, ' + 'which will automatically uninstall katello-agent.' + ) + + upstream_rpms = sat.get_repo_files_by_url(constants.FOREMAN_NIGHTLY_URL) + fm_rpm = [rpm for rpm in upstream_rpms if 'foreman_maintain' in rpm] + assert fm_rpm, 'No upstream foreman-maintain package found' + + for rpm in fm_rpm: + res = sat.execute(f'yum -y install {constants.FOREMAN_NIGHTLY_URL}{rpm}') + assert res.status == 0, f'{rpm} installation failed' + + res = sat.cli.Upgrade.list_versions() + assert res.status == 0, 'Upgrade list-versions command failed' + assert target_ver in res.stdout, 'Target version or Scenario not found' + + res = sat.cli.Upgrade.check( + options={ + 'target-version': target_ver, + 'whitelist': 'repositories-setup,repositories-validate', + 'assumeyes': True, + } + ) + assert res.status, 'Upgrade check passed unexpectedly' + assert warning in res.stdout, 'Katello-agent warning message missing or changed' diff --git a/tests/foreman/destructive/test_katello_certs_check.py b/tests/foreman/destructive/test_katello_certs_check.py index 5e9100aa2e3..db133e99749 100644 --- a/tests/foreman/destructive/test_katello_certs_check.py +++ b/tests/foreman/destructive/test_katello_certs_check.py @@ -4,25 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: System - -:CaseComponent: Certificates +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No - """ import re import pytest -import robottelo.constants as constants -from robottelo.config import settings from robottelo.utils.issue_handlers import is_open pytestmark = pytest.mark.destructive @@ -79,64 +71,6 @@ def test_positive_update_katello_certs(cert_setup_destructive_teardown): assert result.status == 0, 'Not all services are running' -@pytest.mark.e2e -@pytest.mark.parametrize( - 'certs_vm_setup', - [ - {'nick': 'rhel8', 'target_memory': '20GiB', 'target_cores': 4}, - ], - ids=['rhel8'], - indirect=True, -) -def test_positive_install_sat_with_katello_certs(certs_vm_setup): - """Update certificates on a currently running satellite instance. - - :id: 47e3a57f-d7a2-40d2-bbc7-d1bb3d79a7e1 - - :steps: - - 1. Generate the custom certs on RHEL 7 machine - 2. Install satellite with custom certs - 3. Assert output does not report SSL certificate error - 4. Assert all services are running - - - :expectedresults: Satellite should be installed using the custom certs. - - :CaseAutomation: Automated - """ - cert_data, rhel_vm = certs_vm_setup - version = rhel_vm.os_version.major - rhel_vm.download_repofile(product='satellite', release=settings.server.version.release) - rhel_vm.register_contenthost( - org=None, - lce=None, - username=settings.subscription.rhn_username, - password=settings.subscription.rhn_password, - ) - result = rhel_vm.subscription_manager_attach_pool([settings.subscription.rhn_poolid])[0] - for repo in getattr(constants, f"OHSNAP_RHEL{version}_REPOS"): - rhel_vm.enable_repo(repo, force=True) - rhel_vm.execute('yum -y update') - result = rhel_vm.execute(getattr(constants, f"INSTALL_RHEL{version}_STEPS")) - assert result.status == 0 - command = ( - 'satellite-installer --scenario satellite ' - f'--certs-server-cert "/root/{cert_data["cert_file_name"]}" ' - f'--certs-server-key "/root/{cert_data["key_file_name"]}" ' - f'--certs-server-ca-cert "/root/{cert_data["ca_bundle_file_name"]}" ' - ) - result = rhel_vm.execute(command, timeout=2200000) - assert result.status == 0 - # assert no hammer ping SSL cert error - result = rhel_vm.execute('hammer ping') - assert 'SSL certificate verification failed' not in result.stdout - assert result.stdout.count('ok') == 8 - # assert all services are running - result = rhel_vm.execute('satellite-maintain health check --label services-up -y') - assert result.status == 0, 'Not all services are running' - - def test_regeneration_ssl_build_certs(target_sat): """delete the ssl-build folder and cross check that ssl-build folder is recovered/regenerated after running the installer diff --git a/tests/foreman/destructive/test_ldap_authentication.py b/tests/foreman/destructive/test_ldap_authentication.py index 2288ed0cd7a..33539fff89e 100644 --- a/tests/foreman/destructive/test_ldap_authentication.py +++ b/tests/foreman/destructive/test_ldap_authentication.py @@ -4,31 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Integration - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import os from time import sleep +from navmazing import NavigationTriesExceeded import pyotp import pytest -from navmazing import NavigationTriesExceeded -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings -from robottelo.constants import CERT_PATH -from robottelo.constants import HAMMER_CONFIG -from robottelo.constants import HAMMER_SESSIONS -from robottelo.constants import LDAP_ATTR +from robottelo.constants import CERT_PATH, HAMMER_CONFIG, HAMMER_SESSIONS, LDAP_ATTR +from robottelo.exceptions import CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.datafactory import gen_string @@ -83,7 +75,7 @@ def set_certificate_in_satellite(server_type, sat, hostname=None): raise AssertionError(f'Failed to restart the httpd after applying {server_type} cert') -@pytest.fixture() +@pytest.fixture def ldap_tear_down(module_target_sat): """Teardown the all ldap settings user, usergroup and ldap delete""" yield @@ -95,14 +87,14 @@ def ldap_tear_down(module_target_sat): ldap_auth.delete() -@pytest.fixture() +@pytest.fixture def external_user_count(module_target_sat): """return the external auth source user count""" users = module_target_sat.api.User().search() - yield len([user for user in users if user.auth_source_name == 'External']) + return len([user for user in users if user.auth_source_name == 'External']) -@pytest.fixture() +@pytest.fixture def groups_teardown(module_target_sat): """teardown for groups created for external/remote groups""" yield @@ -115,7 +107,7 @@ def groups_teardown(module_target_sat): user_groups[0].delete() -@pytest.fixture() +@pytest.fixture def rhsso_groups_teardown(module_target_sat, default_sso_host): """Teardown the rhsso groups""" yield @@ -123,7 +115,6 @@ def rhsso_groups_teardown(module_target_sat, default_sso_host): default_sso_host.delete_rhsso_group(group_name) -@pytest.mark.external_auth @pytest.fixture def configure_hammer_session(parametrized_enrolled_sat, enable=True): """Take backup of the hammer config file and enable use_sessions""" @@ -207,9 +198,8 @@ def test_positive_create_with_https( assert ldap_source['ldap_server']['port'] == '636' with module_target_sat.ui_session( test_name, username, auth_data['ldap_user_passwd'] - ) as ldapsession: - with pytest.raises(NavigationTriesExceeded): - ldapsession.user.search('') + ) as ldapsession, pytest.raises(NavigationTriesExceeded): + ldapsession.user.search('') assert module_target_sat.api.User().search(query={'search': f'login="{username}"'}) @@ -227,7 +217,6 @@ def test_single_sign_on_ldap_ipa_server( :expectedresults: After single sign on user should redirected from /extlogin to /hosts page :BZ: 1941997 - """ result = target_sat.execute(f'echo {settings.ipa.password} | kinit {settings.ipa.user}') assert result.status == 0 @@ -252,7 +241,6 @@ def test_single_sign_on_ldap_ad_server( using curl. It should navigate to hosts page. (verify using url only) :BZ: 1941997 - """ # create the kerberos ticket for authentication result = target_sat.execute(f'echo {settings.ldap.password} | kinit {settings.ldap.username}') @@ -387,6 +375,7 @@ def test_external_new_user_login_and_check_count_rhsso( with module_target_sat.ui_session(login=False) as rhsso_session: with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.rhsso_login.login(login_details) + with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.task.read_all() assert error.typename == 'NavigationTriesExceeded' @@ -433,6 +422,7 @@ def test_login_failure_rhsso_user_if_internal_user_exist( with module_target_sat.ui_session(login=False) as rhsso_session: with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.rhsso_login.login(login_details) + with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.task.read_all() assert error.typename == 'NavigationTriesExceeded' @@ -461,7 +451,6 @@ def test_user_permissions_rhsso_user_after_group_delete( :expectedresults: external rhsso user's permissions should get revoked after external rhsso group deletion. - """ default_sso_host.get_rhsso_client_id() username = settings.rhsso.rhsso_user @@ -476,9 +465,9 @@ def test_user_permissions_rhsso_user_after_group_delete( default_sso_host.update_rhsso_user(username, group_name=group_name) # creating satellite external group - user_group = module_target_sat.cli_factory.make_usergroup({'admin': 1, 'name': group_name}) + user_group = module_target_sat.cli_factory.usergroup({'admin': 1, 'name': group_name}) external_auth_source = module_target_sat.cli.ExternalAuthSource.info({'name': "External"}) - module_target_sat.cli_factory.make_usergroup_external( + module_target_sat.cli_factory.usergroup_external( { 'auth-source-id': external_auth_source['id'], 'user-group-id': user_group['id'], @@ -549,15 +538,15 @@ def test_user_permissions_rhsso_user_multiple_group( group_names = ['sat_users', 'sat_admins'] arguments = [{'roles': katello_role.name}, {'admin': 1}] external_auth_source = module_target_sat.cli.ExternalAuthSource.info({'name': "External"}) - for group_name, argument in zip(group_names, arguments): + for group_name, argument in zip(group_names, arguments, strict=True): # adding/creating rhsso groups default_sso_host.create_group(group_name=group_name) default_sso_host.update_rhsso_user(username, group_name=group_name) argument['name'] = group_name # creating satellite external groups - user_group = module_target_sat.cli_factory.make_usergroup(argument) - module_target_sat.cli_factory.make_usergroup_external( + user_group = module_target_sat.cli_factory.usergroup(argument) + module_target_sat.cli_factory.usergroup_external( { 'auth-source-id': external_auth_source['id'], 'user-group-id': user_group['id'], @@ -619,7 +608,6 @@ def test_permissions_external_ldap_mapped_rhsso_group( :expectedresults: The external ldap mapped rhsso user should contain the permissions based on the user group level - """ ad_data = ad_data() login_details = { @@ -664,7 +652,6 @@ def test_negative_negotiate_login_without_ticket( :expectedresults: 1. Proper messages are returned in all cases. 2. Login and hosts listing fails without Kerberos ticket. - """ result = parametrized_enrolled_sat.cli.Auth.status() assert NO_KERB_MSG in str(result) @@ -703,7 +690,6 @@ def test_positive_negotiate_login_with_ticket( 2. Negotiate login works with the ticket. 3. External user is created and permissions enforcing works. 4. Proper messages are returned in all cases. - """ auth_type = request.node.callspec.params['parametrized_enrolled_sat'] user = ( @@ -767,7 +753,6 @@ def test_positive_negotiate_CRUD( 3. Listing and CRUD operations via hammer succeed. :BZ: 2122617 - """ auth_type = request.node.callspec.params['parametrized_enrolled_sat'] user = ( @@ -844,7 +829,6 @@ def test_positive_negotiate_logout( 1. Session is closed on log out properly on logout. 2. Hammer command fails after log out. 3. Proper messages are returned in all cases. - """ auth_type = request.node.callspec.params['parametrized_enrolled_sat'] user = ( @@ -878,7 +862,7 @@ def test_positive_negotiate_logout( @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], @@ -907,7 +891,6 @@ def test_positive_autonegotiate( :expectedresults: 1. Kerberos ticket can be acquired. 2. Automatic login occurs on first hammer command, user is created - """ auth_type = request.node.callspec.params['parametrized_enrolled_sat'] user = ( @@ -934,7 +917,7 @@ def test_positive_autonegotiate( @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], @@ -967,7 +950,6 @@ def test_positive_negotiate_manual_with_autonegotiation_disabled( 1. Kerberos ticket can be acquired. 2. Manual login successful, user is created. 3. Session is kept for following Hammer commands. - """ with parametrized_enrolled_sat.omit_credentials(): auth_type = request.node.callspec.params['parametrized_enrolled_sat'] @@ -1016,7 +998,7 @@ def test_positive_negotiate_manual_with_autonegotiation_disabled( ids=['sessions_enabled', 'sessions_disabled'], ) @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], @@ -1048,7 +1030,6 @@ def test_negative_autonegotiate_with_autonegotiation_disabled( 1. Kerberos ticket can be acquired. 2. Autonegotiation doesn't occur 3. Action is denied and user not created because the user isn't authenticated. - """ with parametrized_enrolled_sat.omit_credentials(): auth_type = request.node.callspec.params['parametrized_enrolled_sat'] diff --git a/tests/foreman/destructive/test_ldapauthsource.py b/tests/foreman/destructive/test_ldapauthsource.py index 0d3fd88b8e4..0cebd782532 100644 --- a/tests/foreman/destructive/test_ldapauthsource.py +++ b/tests/foreman/destructive/test_ldapauthsource.py @@ -4,26 +4,20 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from time import sleep import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings from robottelo.constants import HAMMER_CONFIG - +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.destructive] @@ -35,20 +29,16 @@ def configure_hammer_session(sat, enable=True): sat.execute(f"echo ' :use_sessions: {'true' if enable else 'false'}' >> {HAMMER_CONFIG}") -@pytest.fixture() +@pytest.fixture def rh_sso_hammer_auth_setup(module_target_sat, default_sso_host, request): """rh_sso hammer setup before running the auth login tests""" configure_hammer_session(module_target_sat) client_config = {'publicClient': 'true'} default_sso_host.update_client_configuration(client_config) - - def rh_sso_hammer_auth_cleanup(): - """restore the hammer config backup file and rhsso client settings""" - module_target_sat.execute(f'mv {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') - client_config = {'publicClient': 'false'} - default_sso_host.update_client_configuration(client_config, module_target_sat) - - request.addfinalizer(rh_sso_hammer_auth_cleanup) + yield + module_target_sat.execute(f'mv {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') + client_config = {'publicClient': 'false'} + default_sso_host.update_client_configuration(client_config) def test_rhsso_login_using_hammer( diff --git a/tests/foreman/destructive/test_leapp_satellite.py b/tests/foreman/destructive/test_leapp_satellite.py index e4683e2f067..9022b4fec86 100644 --- a/tests/foreman/destructive/test_leapp_satellite.py +++ b/tests/foreman/destructive/test_leapp_satellite.py @@ -2,23 +2,17 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Upgrades :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker +import pytest -from robottelo.hosts import get_sat_rhel_version -from robottelo.hosts import get_sat_version +from robottelo.hosts import get_sat_rhel_version, get_sat_version @pytest.mark.e2e @@ -47,10 +41,12 @@ def test_positive_leapp(target_sat): ) # Recreate the session object within a Satellite object after upgrading target_sat.connect() + # Clean cached properties after the upgrade + target_sat.clean_cached_properties() # Get RHEL version after upgrading - result = target_sat.execute('cat /etc/redhat-release | grep -Po "\\d"') + res_rhel_version = target_sat.os_version.major # Check if RHEL was upgraded - assert result.stdout[0] == str(orig_rhel_ver + 1), 'RHEL was not upgraded' + assert res_rhel_version == orig_rhel_ver + 1, 'RHEL was not upgraded' # Check satellite's health sat_health = target_sat.execute('satellite-maintain health check') assert sat_health.status == 0, 'Satellite health check failed' diff --git a/tests/foreman/destructive/test_packages.py b/tests/foreman/destructive/test_packages.py new file mode 100644 index 00000000000..f052948002f --- /dev/null +++ b/tests/foreman/destructive/test_packages.py @@ -0,0 +1,55 @@ +"""Test class for satellite-maintain packages functionality + +:Requirement: foreman-maintain + +:CaseAutomation: Automated + +:CaseComponent: SatelliteMaintain + +:Team: Platform + +:CaseImportance: Critical + +""" +import re + +import pytest + +pytestmark = pytest.mark.destructive + + +@pytest.mark.include_capsule +def test_positive_all_packages_update(target_sat): + """Verify update and check-update work as expected. + + :id: eb8a5611-b1a8-4a18-b80e-56b045c0d2f6 + + :steps: + 1. Run yum update + 2. Reboot + 3. Run satellite-maintain packages check-update + + :expectedresults: update should update the packages, + and check-update should list no packages at the end. + + :BZ: 2218656 + + :customerscenario: true + """ + # Register to CDN for package updates + target_sat.register_to_cdn() + # Update packages with yum + result = target_sat.execute('yum update -y --disableplugin=foreman-protector') + assert result.status == 0 + # Reboot + if target_sat.execute('needs-restarting -r').status == 1: + target_sat.power_control(state='reboot') + # Run check-update again to verify there are no more packages available to update + result = target_sat.cli.Packages.check_update() + # Regex to match if there are packages available to update + # Matches lines like '\n\nwalrus.noarch 5.21-1 custom_repo\n' + pattern = '(\\n){1,2}(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\n)' + matches = re.search(pattern, result.stdout) + assert matches is None # No packages available to update + assert 'FAIL' not in result.stdout + assert result.status == 0 diff --git a/tests/foreman/destructive/test_ping.py b/tests/foreman/destructive/test_ping.py index 67818b6a74e..96ae171c8f7 100644 --- a/tests/foreman/destructive/test_ping.py +++ b/tests/foreman/destructive/test_ping.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hammer :Team: Endeavour -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest @@ -29,7 +24,7 @@ def tomcat_service_teardown(request, module_target_sat): def _finalize(): assert module_target_sat.cli.Service.start(options={'only': 'tomcat.service'}).status == 0 - yield module_target_sat + return module_target_sat def test_negative_cli_ping_fail_status(tomcat_service_teardown): diff --git a/tests/foreman/destructive/test_puppetplugin.py b/tests/foreman/destructive/test_puppetplugin.py index faef6531427..9bd118ac4e7 100644 --- a/tests/foreman/destructive/test_puppetplugin.py +++ b/tests/foreman/destructive/test_puppetplugin.py @@ -4,22 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from robottelo.constants import PUPPET_CAPSULE_INSTALLER -from robottelo.constants import PUPPET_COMMON_INSTALLER_OPTS +from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS from robottelo.hosts import Satellite from robottelo.utils.installer import InstallerCommand diff --git a/tests/foreman/destructive/test_realm.py b/tests/foreman/destructive/test_realm.py index f0dfdbfc3fe..cbddcdc3bc8 100644 --- a/tests/foreman/destructive/test_realm.py +++ b/tests/foreman/destructive/test_realm.py @@ -4,25 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.base import CLIReturnCodeError - +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.destructive] @@ -40,7 +34,7 @@ def test_positive_delete_by_name( :expectedresults: Realm is deleted """ - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( {'realm-proxy-id': module_fake_proxy.id, 'realm-type': 'Active Directory'} ) module_target_sat.cli.Realm.delete({'name': realm['name']}) @@ -61,7 +55,7 @@ def test_positive_delete_by_id( :expectedresults: Realm is deleted """ - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( {'realm-proxy-id': module_fake_proxy.id, 'realm-type': 'Active Directory'} ) module_target_sat.cli.Realm.delete({'id': realm['id']}) @@ -83,7 +77,7 @@ def test_positive_realm_info_name( :expectedresults: Realm information obtained by name is correct """ - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': module_fake_proxy.id, @@ -93,7 +87,7 @@ def test_positive_realm_info_name( ) request.addfinalizer(lambda: module_target_sat.cli.Realm.delete({'id': realm['id']})) info = module_target_sat.cli.Realm.info({'name': realm['name']}) - for key in info.keys(): + for key in info: assert info[key] == realm[key] @@ -111,7 +105,7 @@ def test_positive_realm_info_id( :expectedresults: Realm information obtained by ID is correct """ - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': module_fake_proxy.id, @@ -121,7 +115,7 @@ def test_positive_realm_info_id( ) request.addfinalizer(lambda: module_target_sat.cli.Realm.delete({'id': realm['id']})) info = module_target_sat.cli.Realm.info({'id': realm['id']}) - for key in info.keys(): + for key in info: assert info[key] == realm[key] assert info == module_target_sat.cli.Realm.info({'id': realm['id']}) @@ -143,7 +137,7 @@ def test_positive_realm_update_name( """ realm_name = gen_string('alpha', random.randint(1, 30)) new_realm_name = gen_string('alpha', random.randint(1, 30)) - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( { 'name': realm_name, 'realm-proxy-id': module_fake_proxy.id, @@ -175,7 +169,7 @@ def test_negative_realm_update_invalid_type( """ realm_type = 'Red Hat Identity Management' new_realm_type = gen_string('alpha') - realm = module_target_sat.cli_factory.make_realm( + realm = module_target_sat.cli_factory.realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': module_fake_proxy.id, diff --git a/tests/foreman/destructive/test_registration.py b/tests/foreman/destructive/test_registration.py new file mode 100644 index 00000000000..9c8dbfa4929 --- /dev/null +++ b/tests/foreman/destructive/test_registration.py @@ -0,0 +1,92 @@ +"""Tests for registration. + +:Requirement: Registration + +:CaseComponent: Registration + +:CaseAutomation: Automated + +:CaseImportance: High + +:Team: Rocket +""" +import pytest + +from robottelo.config import settings + +pytestmark = pytest.mark.destructive + + +@pytest.mark.tier3 +@pytest.mark.no_containers +@pytest.mark.rhel_ver_match('[^6]') +def test_host_registration_rex_pull_mode( + module_org, + module_satellite_mqtt, + module_location, + module_ak_with_cv, + module_capsule_configured_mqtt, + rhel_contenthost, +): + """Verify content host registration with Satellite/Capsule as MQTT broker + + :id: a082f599-fbf7-4779-aa18-5139e2bce779 + + :expectedresults: Host registered successfully with MQTT broker + + :parametrized: yes + """ + org = module_org + client_repo = settings.repos.SATCLIENT_REPO[f'rhel{rhel_contenthost.os_version.major}'] + # register host to satellite with pull provider rex + command = module_satellite_mqtt.api.RegistrationCommand( + organization=org, + location=module_location, + activation_keys=[module_ak_with_cv.name], + setup_remote_execution_pull=True, + insecure=True, + repo=client_repo, + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # check mqtt client is running + result = rhel_contenthost.execute('systemctl status yggdrasild') + assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' + assert rhel_contenthost.execute('yggdrasil status').status == 0 + mqtt_url = f'mqtts://{module_satellite_mqtt.hostname}:1883' + assert rhel_contenthost.execute(f'cat /etc/yggdrasil/config.toml | grep {mqtt_url}').status == 0 + + # Update module_capsule_configured_mqtt to include module_org/module_location + nc = module_capsule_configured_mqtt.nailgun_smart_proxy + module_satellite_mqtt.api.SmartProxy(id=nc.id, organization=[org]).update(['organization']) + module_satellite_mqtt.api.SmartProxy(id=nc.id, location=[module_location]).update(['location']) + + # register host to capsule with pull provider rex + command = module_satellite_mqtt.api.RegistrationCommand( + smart_proxy=nc, + organization=org, + location=module_location, + activation_keys=[module_ak_with_cv.name], + setup_remote_execution_pull=True, + repo=client_repo, + insecure=True, + force=True, + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # check mqtt client is running + result = rhel_contenthost.execute('systemctl status yggdrasild') + assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' + assert rhel_contenthost.execute('yggdrasil status').status == 0 + new_mqtt_url = f'mqtts://{module_capsule_configured_mqtt.hostname}:1883' + assert ( + rhel_contenthost.execute(f'cat /etc/yggdrasil/config.toml | grep {new_mqtt_url}').status + == 0 + ) + # After force register existing config.toml is saved as backup + assert ( + rhel_contenthost.execute(f'cat /etc/yggdrasil/config.toml.bak | grep {mqtt_url}').status + == 0 + ) diff --git a/tests/foreman/destructive/test_remoteexecution.py b/tests/foreman/destructive/test_remoteexecution.py index 2be17e31d58..c9c6bdb8b22 100644 --- a/tests/foreman/destructive/test_remoteexecution.py +++ b/tests/foreman/destructive/test_remoteexecution.py @@ -4,22 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import client from nailgun.entity_mixins import TaskFailedError +import pytest from robottelo.config import get_credentials from robottelo.hosts import get_sat_version @@ -121,7 +116,7 @@ def test_positive_use_alternate_directory( assert result.status == 0 command = f'echo {gen_string("alpha")}' - invocation_command = target_sat.cli_factory.make_job_invocation( + invocation_command = target_sat.cli_factory.job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -131,14 +126,14 @@ def test_positive_use_alternate_directory( result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) try: assert result['success'] == '1' - except AssertionError: + except AssertionError as err: output = ' '.join( target_sat.cli.JobInvocation.get_output( {'id': invocation_command['id'], 'host': client.hostname} ) ) result = f'host output: {output}' - raise AssertionError(result) + raise AssertionError(result) from err task = target_sat.cli.Task.list_tasks({'search': command})[0] search = target_sat.cli.Task.list_tasks({'search': f'id={task["id"]}'}) diff --git a/tests/foreman/destructive/test_rename.py b/tests/foreman/destructive/test_rename.py index 2204e3df5b6..03fd9ff0d6f 100644 --- a/tests/foreman/destructive/test_rename.py +++ b/tests/foreman/destructive/test_rename.py @@ -4,21 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: System - -:CaseComponent: satellite-change-hostname +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No - """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.cli import hammer from robottelo.config import settings diff --git a/tests/foreman/destructive/test_repository.py b/tests/foreman/destructive/test_repository.py index f2ef0ddd4cf..026134f2017 100644 --- a/tests/foreman/destructive/test_repository.py +++ b/tests/foreman/destructive/test_repository.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from nailgun.entity_mixins import TaskFailedError +import pytest from robottelo import constants diff --git a/tests/foreman/endtoend/test_api_endtoend.py b/tests/foreman/endtoend/test_api_endtoend.py index e8a166d3206..dd1cb4fbd56 100644 --- a/tests/foreman/endtoend/test_api_endtoend.py +++ b/tests/foreman/endtoend/test_api_endtoend.py @@ -4,39 +4,32 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: API :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import http -import random from collections import defaultdict +import http from pprint import pformat -import pytest from deepdiff import DeepDiff from fauxfactory import gen_string -from nailgun import client -from nailgun import entities +from nailgun import client, entities +import pytest from robottelo import constants -from robottelo.config import get_credentials -from robottelo.config import get_url -from robottelo.config import setting_is_set -from robottelo.config import settings -from robottelo.config import user_nailgun_config +from robottelo.config import ( + get_credentials, + get_url, + setting_is_set, + settings, + user_nailgun_config, +) from robottelo.constants.repos import CUSTOM_RPM_REPO from robottelo.utils.issue_handlers import is_open -from robottelo.utils.manifest import clone - API_PATHS = { # flake8:noqa (line-too-long) @@ -431,6 +424,8 @@ '/api/hostgroups/:id/play_roles', '/api/hostgroups/multiple_play_roles', '/api/hostgroups/:id/ansible_roles', + '/api/hostgroups/:id/ansible_roles/:ansible_role_id', + '/api/hostgroups/:id/ansible_roles/:ansible_role_id', '/api/hostgroups/:id/assign_ansible_roles', ), 'hosts': ( @@ -453,6 +448,8 @@ '/api/hosts/:id/play_roles', '/api/hosts/multiple_play_roles', '/api/hosts/:id/ansible_roles', + '/api/hosts/:id/ansible_roles/:ansible_role_id', + '/api/hosts/:id/ansible_roles/:ansible_role_id', '/api/hosts/:id/assign_ansible_roles', '/api/hosts/:host_id/host_collections', '/api/hosts/:id/policies_enc', @@ -516,6 +513,7 @@ '/api/organizations/:organization_id/rh_cloud/report', '/api/organizations/:organization_id/rh_cloud/report', '/api/organizations/:organization_id/rh_cloud/inventory_sync', + '/api/organizations/:organization_id/rh_cloud/missing_hosts', '/api/rh_cloud/enable_connector', ), 'interfaces': ( @@ -634,8 +632,6 @@ ), 'oval_reports': ('/api/compliance/oval_reports/:cname/:oval_policy_id/:date',), 'package_groups': ( - '/katello/api/package_group', - '/katello/api/package_group', '/katello/api/package_groups/:id', '/katello/api/package_groups/compare', ), @@ -728,6 +724,7 @@ '/api/remote_execution_features', '/api/remote_execution_features/:id', '/api/remote_execution_features/:id', + '/api/api/hosts/:id/available_remote_execution_features', ), 'scap_content_profiles': ('/api/compliance/scap_content_profiles',), 'simple_content_access': ( @@ -847,7 +844,7 @@ '/katello/api/sync_plans', '/katello/api/sync_plans/:id/sync', ), - 'sync': ('/katello/api/organizations/:organization_id/products/:product_id/sync',), + 'sync': ('/katello/api/repositories/:repository_id/sync',), 'tailoring_files': ( '/api/compliance/tailoring_files', '/api/compliance/tailoring_files', @@ -910,6 +907,7 @@ '/api/webhooks', '/api/webhooks/:id', '/api/webhooks/:id', + '/api/webhooks/:id/test', '/api/webhooks/events', ), 'webhook_templates': ( @@ -951,7 +949,6 @@ def api_url(self): """We want to delay referencing get_url() until test execution""" return f'{get_url()}/api/v2' - @pytest.mark.build_sanity def test_positive_get_status_code(self, api_url): """GET ``api/v2`` and examine the response. @@ -1013,7 +1010,6 @@ class TestEndToEnd: def fake_manifest_is_set(self): return setting_is_set('fake_manifest') - @pytest.mark.build_sanity def test_positive_find_default_org(self): """Check if 'Default Organization' is present @@ -1027,7 +1023,6 @@ def test_positive_find_default_org(self): assert len(results) == 1 assert results[0].name == constants.DEFAULT_ORG - @pytest.mark.build_sanity def test_positive_find_default_loc(self): """Check if 'Default Location' is present @@ -1053,12 +1048,14 @@ def test_positive_find_admin_user(self): @pytest.mark.skip_if_not_set('libvirt') @pytest.mark.tier4 + @pytest.mark.no_containers + @pytest.mark.rhel_ver_match('7') @pytest.mark.e2e @pytest.mark.upgrade @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_contenthost): + def test_positive_end_to_end(self, function_entitlement_manifest, target_sat, rhel_contenthost): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions @@ -1087,6 +1084,8 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte :expectedresults: All tests should succeed and Content should be successfully fetched by client. + :bz: 2216461 + :parametrized: yes """ # step 1: Create a new user with admin permissions @@ -1097,11 +1096,10 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte # step 2.1: Create a new organization user_cfg = user_nailgun_config(login, password) org = target_sat.api.Organization(server_config=user_cfg).create() + org.sca_disable() - # step 2.2: Clone and upload manifest - if fake_manifest_is_set: - with clone() as manifest: - target_sat.upload_manifest(org.id, manifest.content) + # step 2.2: Upload manifest + target_sat.upload_manifest(org.id, function_entitlement_manifest.content) # step 2.3: Create a new lifecycle environment le1 = target_sat.api.LifecycleEnvironment(server_config=user_cfg, organization=org).create() @@ -1117,17 +1115,16 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte repositories.append(custom_repo) # step 2.6: Enable a Red Hat repository - if fake_manifest_is_set: - rhel_repo = target_sat.api.Repository( - id=target_sat.api_factory.enable_rhrepo_and_fetchid( - basearch='x86_64', - org_id=org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhst7']['name'], - reposet=constants.REPOSET['rhst7'], - ) + rhel_repo = target_sat.api.Repository( + id=target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch='x86_64', + org_id=org.id, + product=constants.PRDS['rhel'], + repo=constants.REPOS['rhst7']['name'], + reposet=constants.REPOSET['rhst7'], ) - repositories.append(rhel_repo) + ) + repositories.append(rhel_repo) # step 2.7: Synchronize these two repositories for repo in repositories: @@ -1166,14 +1163,13 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte activation_key.add_subscriptions(data={'quantity': 1, 'subscription_id': sub.id}) break # step 2.13.1: Enable product content - if fake_manifest_is_set: - activation_key.content_override( - data={ - 'content_overrides': [ - {'content_label': constants.REPOS['rhst7']['id'], 'value': '1'} - ] - } - ) + activation_key.content_override( + data={ + 'content_overrides': [ + {'content_label': constants.REPOS['rhst7']['id'], 'value': '1'} + ] + } + ) # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists @@ -1185,9 +1181,19 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte organization=org, ).create() # check that content view matches what we passed - assert content_host.content_facet_attributes['content_view_id'] == content_view.id + assert ( + content_host.content_facet_attributes['content_view_environments'][0]['content_view'][ + 'id' + ] + == content_view.id + ) # check that lifecycle environment matches - assert content_host.content_facet_attributes['lifecycle_environment_id'] == le1.id + assert ( + content_host.content_facet_attributes['content_view_environments'][0][ + 'lifecycle_environment' + ]['id'] + == le1.id + ) # step 2.14: Create a new libvirt compute resource target_sat.api.LibvirtComputeResource( @@ -1207,14 +1213,14 @@ def test_positive_end_to_end(self, fake_manifest_is_set, target_sat, rhel7_conte # step 2.18: Provision a client # TODO this isn't provisioning through satellite as intended # Note it wasn't well before the change that added this todo - rhel7_contenthost.install_katello_ca(target_sat) + rhel_contenthost.install_katello_ca(target_sat) # Register client with foreman server using act keys - rhel7_contenthost.register_contenthost(org.label, activation_key_name) - assert rhel7_contenthost.subscribed + rhel_contenthost.register_contenthost(org.label, activation_key_name) + assert rhel_contenthost.subscribed # Install rpm on client package_name = 'katello-agent' - result = rhel7_contenthost.execute(f'yum install -y {package_name}') + result = rhel_contenthost.execute(f'yum install -y {package_name}') assert result.status == 0 # Verify that the package is installed by querying it - result = rhel7_contenthost.run(f'rpm -q {package_name}') + result = rhel_contenthost.run(f'rpm -q {package_name}') assert result.status == 0 diff --git a/tests/foreman/endtoend/test_cli_endtoend.py b/tests/foreman/endtoend/test_cli_endtoend.py index 0baab0e65ab..e14085f39a7 100644 --- a/tests/foreman/endtoend/test_cli_endtoend.py +++ b/tests/foreman/endtoend/test_cli_endtoend.py @@ -4,43 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hammer :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_alphanumeric, gen_ipaddr import pytest -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_ipaddr from robottelo import constants -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.contentview import ContentView -from robottelo.cli.domain import Domain -from robottelo.cli.factory import make_user -from robottelo.cli.host import Host -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.location import Location -from robottelo.cli.org import Org -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.subnet import Subnet -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User -from robottelo.config import setting_is_set -from robottelo.config import settings +from robottelo.config import setting_is_set, settings from robottelo.constants.repos import CUSTOM_RPM_REPO -from robottelo.utils.manifest import clone @pytest.fixture(scope='module') @@ -50,50 +26,51 @@ def fake_manifest_is_set(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_default_org(): +def test_positive_cli_find_default_org(module_target_sat): """Check if 'Default Organization' is present :id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a :expectedresults: 'Default Organization' is found """ - result = Org.info({'name': constants.DEFAULT_ORG}) + result = module_target_sat.cli.Org.info({'name': constants.DEFAULT_ORG}) assert result['name'] == constants.DEFAULT_ORG @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_default_loc(): +def test_positive_cli_find_default_loc(module_target_sat): """Check if 'Default Location' is present :id: 11cf0d06-78ff-47e8-9d50-407a2ea31988 :expectedresults: 'Default Location' is found """ - result = Location.info({'name': constants.DEFAULT_LOC}) + result = module_target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) assert result['name'] == constants.DEFAULT_LOC @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_admin_user(): +def test_positive_cli_find_admin_user(module_target_sat): """Check if Admin User is present :id: f6755189-05a6-4d2f-a3b8-98be0cfacaee :expectedresults: Admin User is found and has Admin role """ - result = User.info({'login': 'admin'}) + result = module_target_sat.cli.User.info({'login': 'admin'}) assert result['login'] == 'admin' assert result['admin'] == 'yes' -@pytest.mark.skip_if_not_set('libvirt') +@pytest.mark.no_containers +@pytest.mark.rhel_ver_match('7') @pytest.mark.tier4 @pytest.mark.e2e @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_contenthost): +def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel_contenthost): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions @@ -122,37 +99,42 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content :expectedresults: All tests should succeed and Content should be successfully fetched by client. + :bz: 2216461 + :parametrized: yes """ # step 1: Create a new user with admin permissions password = gen_alphanumeric() - user = make_user({'admin': 'true', 'password': password}) + user = target_sat.cli_factory.user({'admin': 'true', 'password': password}) user['password'] = password # step 2.1: Create a new organization - org = _create(user, Org, {'name': gen_alphanumeric()}) + org = _create(user, target_sat.cli.Org, {'name': gen_alphanumeric()}) + target_sat.cli.SimpleContentAccess.disable({'organization-id': org['id']}) # step 2.2: Clone and upload manifest - if fake_manifest_is_set: - with clone() as manifest: - target_sat.put(manifest, manifest.filename) - Subscription.upload({'file': manifest.filename, 'organization-id': org['id']}) + target_sat.put(f'{function_entitlement_manifest.path}', f'{function_entitlement_manifest.name}') + target_sat.cli.Subscription.upload( + {'file': f'{function_entitlement_manifest.name}', 'organization-id': org['id']} + ) # step 2.3: Create a new lifecycle environment lifecycle_environment = _create( user, - LifecycleEnvironment, + target_sat.cli.LifecycleEnvironment, {'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'}, ) # step 2.4: Create a custom product - product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']}) + product = _create( + user, target_sat.cli.Product, {'name': gen_alphanumeric(), 'organization-id': org['id']} + ) repositories = [] # step 2.5: Create custom YUM repository custom_repo = _create( user, - Repository, + target_sat.cli.Repository, { 'content-type': 'yum', 'name': gen_alphanumeric(), @@ -164,37 +146,38 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content repositories.append(custom_repo) # step 2.6: Enable a Red Hat repository - if fake_manifest_is_set: - RepositorySet.enable( - { - 'basearch': 'x86_64', - 'name': constants.REPOSET['rhst7'], - 'organization-id': org['id'], - 'product': constants.PRDS['rhel'], - 'releasever': None, - } - ) - rhel_repo = Repository.info( - { - 'name': constants.REPOS['rhst7']['name'], - 'organization-id': org['id'], - 'product': constants.PRDS['rhel'], - } - ) - repositories.append(rhel_repo) + target_sat.cli.RepositorySet.enable( + { + 'basearch': 'x86_64', + 'name': constants.REPOSET['rhst7'], + 'organization-id': org['id'], + 'product': constants.PRDS['rhel'], + 'releasever': None, + } + ) + rhel_repo = target_sat.cli.Repository.info( + { + 'name': constants.REPOS['rhst7']['name'], + 'organization-id': org['id'], + 'product': constants.PRDS['rhel'], + } + ) + repositories.append(rhel_repo) # step 2.7: Synchronize these two repositories for repo in repositories: - Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']}) + target_sat.cli.Repository.with_user(user['login'], user['password']).synchronize( + {'id': repo['id']} + ) # step 2.8: Create content view content_view = _create( - user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']} + user, target_sat.cli.ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']} ) # step 2.9: Associate the YUM and Red Hat repositories to new content view for repo in repositories: - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': org['id'], @@ -203,26 +186,28 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content ) # step 2.10: Publish content view - ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']}) + target_sat.cli.ContentView.with_user(user['login'], user['password']).publish( + {'id': content_view['id']} + ) # step 2.11: Promote content view to the lifecycle environment - content_view = ContentView.with_user(user['login'], user['password']).info( + content_view = target_sat.cli.ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 - cv_version = ContentView.with_user(user['login'], user['password']).version_info( + cv_version = target_sat.cli.ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 1 - ContentView.with_user(user['login'], user['password']).version_promote( + target_sat.cli.ContentView.with_user(user['login'], user['password']).version_promote( {'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']} ) # check that content view exists in lifecycle - content_view = ContentView.with_user(user['login'], user['password']).info( + content_view = target_sat.cli.ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 - cv_version = ContentView.with_user(user['login'], user['password']).version_info( + cv_version = target_sat.cli.ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 2 @@ -231,7 +216,7 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content # step 2.12: Create a new activation key activation_key = _create( user, - ActivationKey, + target_sat.cli.ActivationKey, { 'content-view-id': content_view['id'], 'lifecycle-environment-id': lifecycle_environment['id'], @@ -241,12 +226,14 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content ) # step 2.13: Add the products to the activation key - subscription_list = Subscription.with_user(user['login'], user['password']).list( + subscription_list = target_sat.cli.Subscription.with_user(user['login'], user['password']).list( {'organization-id': org['id']}, per_page=False ) for subscription in subscription_list: if subscription['name'] == constants.DEFAULT_SUBSCRIPTION_NAME: - ActivationKey.with_user(user['login'], user['password']).add_subscription( + target_sat.cli.ActivationKey.with_user( + user['login'], user['password'] + ).add_subscription( { 'id': activation_key['id'], 'quantity': 1, @@ -255,20 +242,21 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content ) # step 2.13.1: Enable product content - if fake_manifest_is_set: - ActivationKey.with_user(user['login'], user['password']).content_override( - { - 'content-label': constants.REPOS['rhst7']['id'], - 'id': activation_key['id'], - 'organization-id': org['id'], - 'value': '1', - } - ) + target_sat.cli.ActivationKey.with_user(user['login'], user['password']).content_override( + { + 'content-label': constants.REPOS['rhst7']['id'], + 'id': activation_key['id'], + 'organization-id': org['id'], + 'value': '1', + } + ) # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host_name = gen_alphanumeric() - content_host = Host.with_user(user['login'], user['password']).subscription_register( + content_host = target_sat.cli.Host.with_user( + user['login'], user['password'] + ).subscription_register( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': lifecycle_environment['id'], @@ -277,20 +265,30 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content } ) - content_host = Host.with_user(user['login'], user['password']).info({'id': content_host['id']}) + content_host = target_sat.cli.Host.with_user(user['login'], user['password']).info( + {'id': content_host['id']}, output_format='json' + ) + # check that content view matches what we passed - assert content_host['content-information']['content-view']['name'] == content_view['name'] + assert ( + content_host['content-information']['content-view-environments']['1']['content-view'][ + 'name' + ] + == content_view['name'] + ) # check that lifecycle environment matches assert ( - content_host['content-information']['lifecycle-environment']['name'] + content_host['content-information']['content-view-environments']['1'][ + 'lifecycle-environment' + ]['name'] == lifecycle_environment['name'] ) # step 2.14: Create a new libvirt compute resource _create( user, - ComputeResource, + target_sat.cli.ComputeResource, { 'name': gen_alphanumeric(), 'provider': 'Libvirt', @@ -301,7 +299,7 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content # step 2.15: Create a new subnet subnet = _create( user, - Subnet, + target_sat.cli.Subnet, { 'name': gen_alphanumeric(), 'network': gen_ipaddr(ip3=True), @@ -310,15 +308,15 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content ) # step 2.16: Create a new domain - domain = _create(user, Domain, {'name': gen_alphanumeric()}) + domain = _create(user, target_sat.cli.Domain, {'name': gen_alphanumeric()}) # step 2.17: Create a new hostgroup and associate previous entities to it host_group = _create( user, - HostGroup, + target_sat.cli.HostGroup, {'domain-id': domain['id'], 'name': gen_alphanumeric(), 'subnet-id': subnet['id']}, ) - HostGroup.with_user(user['login'], user['password']).update( + target_sat.cli.HostGroup.with_user(user['login'], user['password']).update( { 'id': host_group['id'], 'organization-ids': org['id'], @@ -330,16 +328,16 @@ def test_positive_cli_end_to_end(fake_manifest_is_set, target_sat, rhel7_content # step 2.18: Provision a client # TODO this isn't provisioning through satellite as intended # Note it wasn't well before the change that added this todo - rhel7_contenthost.install_katello_ca(target_sat) + rhel_contenthost.install_katello_ca(target_sat) # Register client with foreman server using act keys - rhel7_contenthost.register_contenthost(org['label'], activation_key['name']) - assert rhel7_contenthost.subscribed + rhel_contenthost.register_contenthost(org['label'], activation_key['name']) + assert rhel_contenthost.subscribed # Install rpm on client package_name = 'katello-agent' - result = rhel7_contenthost.execute(f'yum install -y {package_name}') + result = rhel_contenthost.execute(f'yum install -y {package_name}') assert result.status == 0 # Verify that the package is installed by querying it - result = rhel7_contenthost.run(f'rpm -q {package_name}') + result = rhel_contenthost.run(f'rpm -q {package_name}') assert result.status == 0 diff --git a/tests/foreman/installer/test_infoblox.py b/tests/foreman/installer/test_infoblox.py index 0bdafe25d82..5f839d4e850 100644 --- a/tests/foreman/installer/test_infoblox.py +++ b/tests/foreman/installer/test_infoblox.py @@ -2,161 +2,18 @@ :Requirement: Infoblox, Installer -:CaseLevel: System +:CaseAutomation: Automated :CaseComponent: DHCPDNS :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -@pytest.mark.stubbed -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_set_dns_provider(): - """Check Infoblox DNS plugin is set as provider - - :id: 23f76fa8-79bb-11e6-a3d4-68f72889dc7f - - :Steps: Set infoblox as dns provider with options - --foreman-proxy-dns=true - --foreman-proxy-plugin-provider=infoblox - --enable-foreman-proxy-plugin-dns-infoblox - --foreman-proxy-plugin-dns-infoblox-dns-server= - --foreman-proxy-plugin-dns-infoblox-username= - --foreman-proxy-plugin-dns-infoblox-password= - - :expectedresults: Check inflobox is set as DNS provider - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_set_dhcp_provider(): - """Check Infoblox DHCP plugin is set as provider - - :id: 40783976-7e68-11e6-b728-68f72889dc7f - - :Steps: Set infoblox as dhcp provider with options - --foreman-proxy-dhcp=true - --foreman-proxy-plugin-dhcp-provider=infoblox - --enable-foreman-proxy-plugin-dhcp-infoblox - --foreman-proxy-plugin-dhcp-infoblox-dhcp-server= - --foreman-proxy-plugin-dhcp-infoblox-username= - --foreman-proxy-plugin-dhcp-infoblox-password= - - :expectedresults: Check inflobox is set as DHCP provider - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_update_dns_appliance_credentials(): - """Check infoblox appliance credentials are updated - - :id: 2e84a8b4-79b6-11e6-8bf8-68f72889dc7f - - :Steps: Pass appliance credentials via installer options - --foreman-proxy-plugin-dns-infoblox-username= - --foreman-proxy-plugin-dns-infoblox-password= - - :expectedresults: config/dns_infoblox.yml should be updated with - infoblox_hostname, username & password - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_enable_dns_plugin(): - """Check Infoblox DNS plugin can be enabled on server - - :id: f8be8c34-79b2-11e6-8992-68f72889dc7f - - :Steps: Enable Infoblox plugin via installer options - --enable-foreman-proxy-plugin-dns-infoblox - - :CaseLevel: System - - :expectedresults: Check DNS plugin is enabled on host - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_disable_dns_plugin(): - """Check Infoblox DNS plugin can be disabled on host - - :id: c5f563c6-79b3-11e6-8cb6-68f72889dc7f - - :Steps: Disable Infoblox plugin via installer - - :expectedresults: Check DNS plugin is disabled on host - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -@pytest.mark.upgrade -def test_enable_dhcp_plugin(): - """Check Infoblox DHCP plugin can be enabled on host - - :id: 75650c06-79b6-11e6-ad91-68f72889dc7f - - :Steps: Enable Infoblox plugin via installer option - --enable-foreman-proxy-plugin-dhcp-infoblox - - :expectedresults: Check DHCP plugin is enabled on host - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_disable_dhcp_plugin(): - """Check Infoblox DHCP plugin can be disabled on host - - :id: ea347f34-79b7-11e6-bb03-68f72889dc7f - - :Steps: Disable Infoblox plugin via installer - - :expectedresults: Check DHCP plugin is disabled on host - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - @pytest.mark.stubbed @pytest.mark.tier3 @pytest.mark.upgrade @@ -165,13 +22,11 @@ def test_dhcp_ip_range(): :id: ba957e82-79bb-11e6-94c5-68f72889dc7f - :Steps: Provision a host with infoblox as dhcp provider + :steps: Provision a host with infoblox as dhcp provider :expectedresults: Check host ip is on infoblox range configured by option --foreman-proxy-plugin-dhcp-infoblox-use-ranges=true - :CaseLevel: System - :CaseAutomation: NotAutomated """ @@ -184,14 +39,12 @@ def test_dns_records(): :id: 007ad06e-79bc-11e6-885f-68f72889dc7f - :Steps: + :steps: 1. Provision a host with infoblox as dns provider 2. Update a DNS record on infoblox :expectedresults: Check host dns is updated accordingly to infoblox - :CaseLevel: System - :CaseAutomation: NotAutomated """ diff --git a/tests/foreman/installer/test_installer.py b/tests/foreman/installer/test_installer.py index 5e67a993ddd..451c3898e9d 100644 --- a/tests/foreman/installer/test_installer.py +++ b/tests/foreman/installer/test_installer.py @@ -1,29 +1,25 @@ """Smoke tests to check installation health -:Requirement: Installer +:Requirement: Installation :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Installer +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest +import requests from robottelo import ssh from robottelo.config import settings -from robottelo.constants import DEFAULT_ORG +from robottelo.constants import DEFAULT_ORG, FOREMAN_SETTINGS_YML, PRDS, REPOS, REPOSET from robottelo.hosts import setup_capsule from robottelo.utils.installer import InstallerCommand - +from robottelo.utils.issue_handlers import is_open PREVIOUS_INSTALLER_OPTIONS = { '-', @@ -222,6 +218,7 @@ '--foreman-proxy-content-pulpcore-cache-expires-ttl', '--foreman-proxy-content-pulpcore-content-service-worker-timeout', '--foreman-proxy-content-pulpcore-django-secret-key', + '--foreman-proxy-content-pulpcore-hide-guarded-distributions', '--foreman-proxy-content-pulpcore-manage-postgresql', '--foreman-proxy-content-pulpcore-mirror', '--foreman-proxy-content-pulpcore-postgresql-db-name', @@ -496,9 +493,10 @@ '--puppet-agent-default-schedules', '--puppet-agent-noop', '--puppet-agent-restart-command', + '--puppet-agent-server-hostname', + '--puppet-agent-server-port', '--puppet-allow-any-crl-auth', '--puppet-auth-allowed', - '--puppet-auth-template', '--puppet-autosign', '--puppet-autosign-content', '--puppet-autosign-entries', @@ -530,11 +528,9 @@ '--puppet-package-source', '--puppet-pluginfactsource', '--puppet-pluginsource', - '--puppet-port', '--puppet-postrun-command', '--puppet-prerun-command', '--puppet-puppetconf-mode', - '--puppet-puppetmaster', '--puppet-report', '--puppet-run-hour', '--puppet-run-minute', @@ -648,7 +644,6 @@ '--puppet-server-storeconfigs', '--puppet-server-strict-variables', '--puppet-server-trusted-external-command', - '--puppet-server-use-legacy-auth-conf', '--puppet-server-user', '--puppet-server-version', '--puppet-server-versioned-code-content', @@ -807,6 +802,7 @@ '--reset-foreman-proxy-content-pulpcore-cache-expires-ttl', '--reset-foreman-proxy-content-pulpcore-content-service-worker-timeout', '--reset-foreman-proxy-content-pulpcore-django-secret-key', + '--reset-foreman-proxy-content-pulpcore-hide-guarded-distributions', '--reset-foreman-proxy-content-pulpcore-manage-postgresql', '--reset-foreman-proxy-content-pulpcore-mirror', '--reset-foreman-proxy-content-pulpcore-postgresql-db-name', @@ -1072,9 +1068,10 @@ '--reset-puppet-agent-default-schedules', '--reset-puppet-agent-noop', '--reset-puppet-agent-restart-command', + '--reset-puppet-agent-server-hostname', + '--reset-puppet-agent-server-port', '--reset-puppet-allow-any-crl-auth', '--reset-puppet-auth-allowed', - '--reset-puppet-auth-template', '--reset-puppet-autosign', '--reset-puppet-autosign-content', '--reset-puppet-autosign-entries', @@ -1106,11 +1103,9 @@ '--reset-puppet-package-source', '--reset-puppet-pluginfactsource', '--reset-puppet-pluginsource', - '--reset-puppet-port', '--reset-puppet-postrun-command', '--reset-puppet-prerun-command', '--reset-puppet-puppetconf-mode', - '--reset-puppet-puppetmaster', '--reset-puppet-report', '--reset-puppet-run-hour', '--reset-puppet-run-minute', @@ -1224,7 +1219,6 @@ '--reset-puppet-server-storeconfigs', '--reset-puppet-server-strict-variables', '--reset-puppet-server-trusted-external-command', - '--reset-puppet-server-use-legacy-auth-conf', '--reset-puppet-server-user', '--reset-puppet-server-version', '--reset-puppet-server-versioned-code-content', @@ -1323,6 +1317,228 @@ def extract_help(filter='params'): yield token.replace(',', '') +def common_sat_install_assertions(satellite): + sat_version = 'stream' if satellite.is_stream else satellite.version + assert settings.server.version.release == sat_version + + # no errors/failures in journald + result = satellite.execute( + r'journalctl --quiet --no-pager --boot --priority err -u "dynflow-sidekiq*" -u "foreman-proxy" -u "foreman" -u "httpd" -u "postgresql" -u "pulpcore-api" -u "pulpcore-content" -u "pulpcore-worker*" -u "redis" -u "tomcat"' + ) + assert len(result.stdout) == 0 + # no errors in /var/log/foreman/production.log + result = satellite.execute(r'grep --context=100 -E "\[E\|" /var/log/foreman/production.log') + if not is_open('BZ:2247484'): + assert len(result.stdout) == 0 + # no errors/failures in /var/log/foreman-installer/satellite.log + result = satellite.execute( + r'grep "\[ERROR" --context=100 /var/log/foreman-installer/satellite.log' + ) + assert len(result.stdout) == 0 + # no errors/failures in /var/log/httpd/* + result = satellite.execute(r'grep -iR "error" /var/log/httpd/*') + assert len(result.stdout) == 0 + # no errors/failures in /var/log/candlepin/* + result = satellite.execute(r'grep -iR "error" /var/log/candlepin/*') + assert len(result.stdout) == 0 + + result = satellite.cli.Health.check() + assert 'FAIL' not in result.stdout + + +def install_satellite(satellite, installer_args): + # Register for RHEL8 repos, get Ohsnap repofile, and enable and download satellite + satellite.register_to_cdn() + satellite.download_repofile( + product='satellite', + release=settings.server.version.release, + snap=settings.server.version.snap, + ) + satellite.execute('dnf -y module enable satellite:el8 && dnf -y install satellite') + # Configure Satellite firewall to open communication + satellite.execute( + 'firewall-cmd --permanent --add-service RH-Satellite-6 && firewall-cmd --reload' + ) + # Install Satellite + satellite.execute( + InstallerCommand(installer_args=installer_args).get_command(), + timeout='30m', + ) + + +@pytest.fixture(scope='module') +def sat_default_install(module_sat_ready_rhels): + """Install Satellite with default options""" + installer_args = [ + 'scenario satellite', + f'foreman-initial-admin-password {settings.server.admin_password}', + ] + install_satellite(module_sat_ready_rhels[0], installer_args) + return module_sat_ready_rhels[0] + + +@pytest.fixture(scope='module') +def sat_non_default_install(module_sat_ready_rhels): + """Install Satellite with various options""" + installer_args = [ + 'scenario satellite', + f'foreman-initial-admin-password {settings.server.admin_password}', + 'foreman-rails-cache-store type:redis', + 'foreman-proxy-content-pulpcore-hide-guarded-distributions false', + ] + install_satellite(module_sat_ready_rhels[1], installer_args) + return module_sat_ready_rhels[1] + + +@pytest.mark.e2e +@pytest.mark.tier1 +@pytest.mark.pit_server +def test_capsule_installation(sat_default_install, cap_ready_rhel, default_org): + """Run a basic Capsule installation + + :id: 64fa85b6-96e6-4fea-bea4-a30539d59e65 + + :steps: + 1. Get a Satellite + 2. Configure capsule repos + 3. Enable capsule module + 4. Install and setup capsule + + :expectedresults: + 1. Capsule is installed and setup correctly + 2. no unexpected errors in logs + 3. health check runs successfully + + :CaseImportance: Critical + """ + # Get Capsule repofile, and enable and download satellite-capsule + cap_ready_rhel.register_to_cdn() + cap_ready_rhel.download_repofile( + product='capsule', + release=settings.server.version.release, + snap=settings.server.version.snap, + ) + cap_ready_rhel.execute( + 'dnf -y module enable satellite-capsule:el8 && dnf -y install satellite-capsule' + ) + # Setup Capsule + org = sat_default_install.api.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[ + 0 + ] + setup_capsule(sat_default_install, cap_ready_rhel, org) + assert sat_default_install.api.Capsule().search( + query={'search': f'name={cap_ready_rhel.hostname}'} + )[0] + + # no errors/failures in journald + result = cap_ready_rhel.execute( + r'journalctl --quiet --no-pager --boot --priority err -u foreman-proxy -u httpd -u postgresql -u pulpcore-api -u pulpcore-content -u pulpcore-worker* -u redis' + ) + assert len(result.stdout) == 0 + # no errors/failures /var/log/foreman-installer/satellite.log + result = cap_ready_rhel.execute( + r'grep "\[ERROR" --context=100 /var/log/foreman-installer/satellite.log' + ) + assert len(result.stdout) == 0 + # no errors/failures /var/log/foreman-installer/capsule.log + result = cap_ready_rhel.execute( + r'grep "\[ERROR" --context=100 /var/log/foreman-installer/capsule.log' + ) + assert len(result.stdout) == 0 + # no errors/failures in /var/log/httpd/* + result = cap_ready_rhel.execute(r'grep -iR "error" /var/log/httpd/*') + assert len(result.stdout) == 0 + # no errors/failures in /var/log/foreman-proxy/* + result = cap_ready_rhel.execute(r'grep -iR "error" /var/log/foreman-proxy/*') + assert len(result.stdout) == 0 + + result = cap_ready_rhel.cli.Health.check() + assert 'FAIL' not in result.stdout + + +@pytest.mark.e2e +@pytest.mark.tier1 +def test_foreman_rails_cache_store(sat_non_default_install): + """Test foreman-rails-cache-store option + + :id: 379a2fe8-1085-4a7f-8ac3-24c421412f12 + + :steps: + 1. Install Satellite. + 2. Verify that foreman-redis package is installed. + 3. Check /etc/foreman/settings.yaml + + :CaseImportance: Medium + + :customerscenario: true + + :BZ: 2063717, 2165092 + """ + # Verify foreman-rails-cache-store option works + assert sat_non_default_install.execute('rpm -q foreman-redis').status == 0 + settings_file = sat_non_default_install.load_remote_yaml_file(FOREMAN_SETTINGS_YML) + assert settings_file.rails_cache_store.type == 'redis' + + +@pytest.mark.e2e +@pytest.mark.tier1 +def test_content_guarded_distributions_option( + sat_default_install, sat_non_default_install, module_sca_manifest +): + """Verify foreman-proxy-content-pulpcore-hide-guarded-distributions option works + + :id: a9ceefbc-fc2d-415e-9461-1811fabc63dc + + :steps: + 1. Install Satellite. + 2. Verify that no content is listed on https://sat-fqdn/pulp/content/ + with default Satellite installation. + 3. Verify that content is not downloadable when content guard setting is disabled. + + :expectedresults: + 1. no content is listed on https://sat-fqdn/pulp/content/ + + :CaseImportance: Medium + + :customerscenario: true + + :BZ: 2063717, 2088559 + """ + # Verify that no content is listed on https://sat-fqdn/pulp/content/. + result = requests.get(f'https://{sat_default_install.hostname}/pulp/content/', verify=False) + assert 'Default_Organization' not in result.text + # Verify that content is not downloadable when content guard setting is disabled. + org = sat_non_default_install.api.Organization().create() + sat_non_default_install.upload_manifest(org.id, module_sca_manifest.content) + # sync ansible repo + product = sat_non_default_install.api.Product(name=PRDS['rhae'], organization=org.id).search()[ + 0 + ] + r_set = sat_non_default_install.api.RepositorySet( + name=REPOSET['rhae2.9_el8'], product=product + ).search()[0] + r_set.enable( + data={ + 'basearch': 'x86_64', + 'name': REPOSET['rhae2.9_el8'], + 'organization-id': org.id, + 'product_id': product.id, + 'releasever': '8', + } + ) + rh_repo = sat_non_default_install.api.Repository(name=REPOS['rhae2.9_el8']['name']).search( + query={'organization_id': org.id} + )[0] + rh_repo.sync() + assert ( + "403: [('PEM routines', 'get_name', 'no start line')]" + in sat_non_default_install.execute( + f'curl https://{sat_non_default_install.hostname}/pulp/content/{org.label}' + f'/Library/content/dist/layered/rhel8/x86_64/ansible/2.9/os/' + ).stdout + ) + + @pytest.mark.upgrade @pytest.mark.tier1 def test_positive_selinux_foreman_module(target_sat): @@ -1334,8 +1550,6 @@ def test_positive_selinux_foreman_module(target_sat): 1. Check "foreman-selinux" package availability on satellite. 2. Check SELinux foreman module on satellite. - :CaseLevel: System - :expectedresults: Foreman RPM and SELinux module are both present on the satellite """ rpm_result = target_sat.execute('rpm -q foreman-selinux') @@ -1380,8 +1594,6 @@ def test_positive_check_installer_hammer_ping(target_sat): :customerscenario: true :expectedresults: All services are active (running) - - :CaseLevel: System """ # check status reported by hammer ping command result = target_sat.execute('hammer ping') @@ -1431,80 +1643,6 @@ def test_installer_options_and_sections(filter): assert previous == current, msg -@pytest.mark.e2e -@pytest.mark.tier1 -@pytest.mark.parametrize("sat_ready_rhel", [settings.server.version.rhel_version], indirect=True) -def test_satellite_and_capsule_installation(sat_ready_rhel, cap_ready_rhel): - """Run a basic Satellite installation - - :id: bbab30a6-6861-494f-96dd-23b883c2c906 - - :steps: - 1. Get 2 RHEL hosts - 2. Configure satellite repos - 3. Enable satellite module - 4. Install satellite - 5. Run satellite-installer - 6. Configure capsule repos - 7. Enable capsule module - 8. Install and setup capsule - - :expectedresults: - 1. Correct satellite packaged is installed - 2. satellite-installer runs successfully - 3. satellite-maintain health check runs successfully - 4. Capsule is installed and setup correctly - - :CaseImportance: High - """ - sat_version = settings.server.version.release - # Register for RHEL8 repos, get Ohsnap repofile, and enable and download satellite - sat_ready_rhel.register_to_cdn() - sat_ready_rhel.download_repofile(product='satellite', release=settings.server.version.release) - sat_ready_rhel.execute('dnf -y module enable satellite:el8 && dnf -y install satellite') - installed_version = sat_ready_rhel.execute('rpm --query satellite').stdout - assert sat_version in installed_version - # Install Satellite - sat_ready_rhel.execute( - InstallerCommand( - installer_args=[ - 'scenario satellite', - f'foreman-initial-admin-password {settings.server.admin_password}', - ] - ).get_command(), - timeout='30m', - ) - result = sat_ready_rhel.execute( - r'grep "\[ERROR" --after-context=100 /var/log/foreman-installer/satellite.log' - ) - assert len(result.stdout) == 0 - result = sat_ready_rhel.cli.Health.check() - assert 'FAIL' not in result.stdout - # Get Capsule repofile, and enable and download satellite-capsule - cap_ready_rhel.register_to_cdn() - cap_ready_rhel.download_repofile(product='capsule', release=settings.server.version.release) - cap_ready_rhel.execute( - 'dnf -y module enable satellite-capsule:el8 && dnf -y install satellite-capsule' - ) - # Configure Satellite firewall to open communication - sat_ready_rhel.execute( - 'firewall-cmd --permanent --add-service RH-Satellite-6 && ' - 'firewall-cmd --add-service RH-Satellite-6' - ) - # Setup Capsule - org = sat_ready_rhel.api.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[0] - setup_capsule(sat_ready_rhel, cap_ready_rhel, org) - assert sat_ready_rhel.api.Capsule().search(query={'search': f'name={cap_ready_rhel.hostname}'})[ - 0 - ] - result = cap_ready_rhel.execute( - r'grep "\[ERROR" --after-context=100 /var/log/foreman-installer/satellite.log' - ) - assert len(result.stdout) == 0 - result = cap_ready_rhel.cli.Health.check() - assert 'FAIL' not in result.stdout - - @pytest.mark.stubbed @pytest.mark.tier3 def test_satellite_installation_on_ipv6(): @@ -1523,8 +1661,6 @@ def test_satellite_installation_on_ipv6(): 4: Satellite service restart should work. 5: After system reboot all the services comes to up state. - :CaseLevel: System - :CaseAutomation: NotAutomated """ @@ -1546,8 +1682,6 @@ def test_capsule_installation_on_ipv6(): 3. Satellite service restart should work. 4. After system reboot all the services come to up state. - :CaseLevel: System - :CaseAutomation: NotAutomated """ @@ -1570,199 +1704,6 @@ def test_installer_check_on_ipv6(): 1. Tuning parameter set successfully for medium size. 2. custom-hiera.yaml related changes should be successfully applied. - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier1 -def test_installer_verbose_stdout(): - """Look for Satellite installer verbose STDOUT - - :id: 5d0fb30a-4a63-41b3-bc6f-c4057942ce3c - - :steps: - 1. Install satellite package. - 2. Run Satellite installer - 3. Observe installer STDOUT. - - :expectedresults: - 1. Installer STDOUTs following groups hooks completion. - pre_migrations, boot, init, pre_values, pre_validations, pre_commit, pre, post - 2. Installer STDOUTs system configuration completion. - 3. Finally, Installer informs running satellite url, credentials, - external capsule installation pre-requisite, upgrade capsule instruction, - running internal capsule url, log file. - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier1 -def test_installer_answers_file(): - """Answers file to configure plugins and hooks - - :id: 5cb40e4b-1acb-49f9-a085-a7dead1664b5 - - :steps: - 1. Install satellte package - 2. Modify `/etc/foreman-installer/scenarios.d/satellite-answers.yaml` file to - configure hook/plugin on satellite - 3. Run Satellite installer - - :expectedresults: Installer configures plugins and hooks in answers file. - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier1 -def test_capsule_installer_verbose_stdout(): - """Look for Capsule installer verbose STDOUT - - :id: 323e85e3-2ad1-4018-aa35-1d51f1e7f5a2 - - :steps: - 1. Install capsule package. - 2. Run Satellite installer --scenario capsule - 3. Observe installer STDOUT. - - :expectedresults: - 1. Installer STDOUTs following groups hooks completion. - pre_migrations, boot, init, pre_values, pre_validations, pre_commit, pre, post - 2. Installer STDOUTs system configuration completion. - 3. Finally, Installer informs running capsule url, log file. - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_installer_timestamp_logs(): - """Look for Satellite installer timestamp based logs - - :id: 9b4d32f6-d471-4bdb-8a79-9bb20ecb86aa - - :steps: - 1. Install satellite package. - 2. Run Satellite installer - 3. Observe installer log file `/var/log/foreman-installer/satellite.log`. - - :expectedresults: - 1. Installer logs satellite installation with timestamps in following format - YYYY-MM-DD HH:MM:SS - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_capsule_installer_and_register(): - """Verify the capsule installation and their registration with the satellite. - - :id: efd03442-5a08-445d-b257-e4d346084379 - - :steps: - 1. Install the satellite. - 2. Add all the required cdn and custom repositories in satellite to - install the capsule. - 3. Create life-cycle environment,content view and activation key. - 4. Subscribe the capsule with created activation key. - 5. Run 'yum update -y' on capsule. - 6. Run 'yum install -y satellite-capsule' on capsule. - 7. Create a certificate on satellite for new installed capsule. - 8. Copy capsule certificate from satellite to capsule. - 9. Run the satellite-installer(copy the satellite-installer command from step7'th - generated output) command on capsule to integrate the capsule with satellite. - 10. Check the newly added capsule is reflected in the satellite or not. - 11. Check the capsule sync. - - :expectedresults: - - 1. Capsule integrate successfully with satellite. - 2. Capsule sync should be worked properly. - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_satellite_installer_logfile_check(): - """Verify the no ERROR or FATAL messages appears in the log file during the satellite - installation - - :id: c2f10f43-c52e-4f32-b3e9-7bc4b07e3b00 - - :steps: - 1. Configure all the repositories(custom and cdn) for satellite installation. - 2. Run yum update -y - 3. Run satellite-installer -y - 4. Check all the relevant log-files for ERROR/FATAL - - :expectedresults: No Unexpected ERROR/FATAL message should appear in the following log - files during the satellite-installation. - - 1. /var/log/messages, - 2. /var/log/foreman/production.log - 3. /var/log/foreman-installer/satellite.log - 4. /var/log/httpd, - 5. /var/log/candlepin - - :CaseLevel: System - - :CaseAutomation: NotAutomated - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_capsule_installer_logfile_check(): - """Verify the no ERROR or FATAL messages appears in the log file during the capsule - installation - - :id: cd505a5e-141e-47eb-98d8-a05acd74c3b3 - - :steps: - 1. Install the satellite. - 2. Add all the required cdn and custom repositories in satellite to install - the capsule. - 3. Create life-cycle environment,content view and activation key. - 4. Subscribe the capsule with created activation key. - 5. Run 'yum update -y' on capsule. - 6. Run 'yum install -y satellite-capsule' on capsule. - 7. Create a certificate on satellite for new installed capsule. - 8. Copy capsule certificate from satellite to capsule. - 9. Run the satellite-installer(copy the satellite-installer command from step-7'th - generated output) command on capsule to integrate the capsule with satellite. - 10. Check all the relevant log-files for ERROR/FATAL - - :expectedresults: No Unexpected ERROR/FATAL message should appear in the following log - files during the capsule-installation. - - 1. /var/log/messages - 2. /var/log/foreman-installer/capsule.log - 3. /var/log/httpd - 4. /var/log/foreman-proxy - - :CaseLevel: System - :CaseAutomation: NotAutomated """ @@ -1788,8 +1729,6 @@ def test_installer_cap_pub_directory_accessibility(capsule_configured): :CaseImportance: High - :CaseLevel: System - :BZ: 1860519 :customerscenario: true @@ -1802,7 +1741,10 @@ def test_installer_cap_pub_directory_accessibility(capsule_configured): https_curl_command = f'curl -i {capsule_configured.url}/pub/ -k' for command in [http_curl_command, https_curl_command]: accessibility_check = capsule_configured.execute(command) - assert 'HTTP/1.1 200 OK' or 'HTTP/2 200 ' in accessibility_check.stdout.split('\r\n') + assert ( + 'HTTP/1.1 200 OK' in accessibility_check.stdout + or 'HTTP/2 200' in accessibility_check.stdout + ) capsule_configured.get( local_path='custom-hiera-capsule.yaml', remote_path=f'{custom_hiera_location}', @@ -1812,10 +1754,38 @@ def test_installer_cap_pub_directory_accessibility(capsule_configured): assert 'Success!' in command_output.stdout for command in [http_curl_command, https_curl_command]: accessibility_check = capsule_configured.execute(command) - assert 'HTTP/1.1 200 OK' or 'HTTP/2 200 ' not in accessibility_check.stdout.split('\r\n') + assert 'HTTP/1.1 200 OK' not in accessibility_check.stdout + assert 'HTTP/2 200' not in accessibility_check.stdout capsule_configured.put( local_path='custom-hiera-capsule.yaml', remote_path=f'{custom_hiera_location}', ) command_output = capsule_configured.execute('satellite-installer', timeout='20m') assert 'Success!' in command_output.stdout + + +@pytest.mark.tier1 +@pytest.mark.build_sanity +@pytest.mark.first_sanity +@pytest.mark.pit_server +def test_satellite_installation(installer_satellite): + """Run a basic Satellite installation + + :id: 661206f3-2eec-403c-af26-3c5cadcd5766 + + :steps: + 1. Get RHEL Host + 2. Configure satellite repos + 3. Enable satellite module + 4. Install satellite + 5. Run satellite-installer + + :expectedresults: + 1. Correct satellite packaged is installed + 2. satellite-installer runs successfully + 3. no unexpected errors in logs + 4. satellite-maintain health check runs successfully + + :CaseImportance: Critical + """ + common_sat_install_assertions(installer_satellite) diff --git a/tests/foreman/longrun/test_inc_updates.py b/tests/foreman/longrun/test_inc_updates.py index fc3c7bf8c90..8ffb84fb36c 100644 --- a/tests/foreman/longrun/test_inc_updates.py +++ b/tests/foreman/longrun/test_inc_updates.py @@ -4,109 +4,109 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts-Content :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta -import pytest from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import ENVIRONMENT -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + DEFAULT_SUBSCRIPTION_NAME, + ENVIRONMENT, + FAKE_4_CUSTOM_PACKAGE, + PRDS, + REPOS, + REPOSET, +) pytestmark = [pytest.mark.run_in_one_thread] @pytest.fixture(scope='module') -def module_lce_library(module_manifest_org): +def module_lce_library(module_entitlement_manifest_org): """Returns the Library lifecycle environment from chosen organization""" return ( entities.LifecycleEnvironment() .search( - query={'search': f'name={ENVIRONMENT} and organization_id={module_manifest_org.id}'} + query={ + 'search': f'name={ENVIRONMENT} and ' + f'organization_id={module_entitlement_manifest_org.id}' + } )[0] .read() ) @pytest.fixture(scope='module') -def dev_lce(module_manifest_org): - return entities.LifecycleEnvironment(name='DEV', organization=module_manifest_org).create() +def dev_lce(module_entitlement_manifest_org): + return entities.LifecycleEnvironment( + name='DEV', organization=module_entitlement_manifest_org + ).create() @pytest.fixture(scope='module') -def qe_lce(module_manifest_org, dev_lce): - qe_lce = entities.LifecycleEnvironment( - name='QE', prior=dev_lce, organization=module_manifest_org +def qe_lce(module_entitlement_manifest_org, dev_lce): + return entities.LifecycleEnvironment( + name='QE', prior=dev_lce, organization=module_entitlement_manifest_org ).create() - return qe_lce @pytest.fixture(scope='module') -def rhel7_sat6tools_repo(module_manifest_org, module_target_sat): +def sat_client_repo(module_entitlement_manifest_org, module_target_sat): """Enable Sat tools repository""" - rhel7_sat6tools_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + sat_client_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, - org_id=module_manifest_org.id, + org_id=module_entitlement_manifest_org.id, product=PRDS['rhel'], - repo=REPOS['rhst7']['name'], - reposet=REPOSET['rhst7'], + repo=REPOS['rhsclient7']['name'], + reposet=REPOSET['rhsclient7'], releasever=None, ) - rhel7_sat6tools_repo = entities.Repository(id=rhel7_sat6tools_repo_id).read() - assert rhel7_sat6tools_repo.sync()['result'] == 'success' - return rhel7_sat6tools_repo + sat_client_repo = module_target_sat.api.Repository(id=sat_client_repo_id).read() + assert sat_client_repo.sync()['result'] == 'success' + return sat_client_repo @pytest.fixture(scope='module') -def custom_repo(module_manifest_org): +def custom_repo(module_entitlement_manifest_org): """Enable custom errata repository""" custom_repo = entities.Repository( url=settings.repos.yum_9.url, - product=entities.Product(organization=module_manifest_org).create(), + product=entities.Product(organization=module_entitlement_manifest_org).create(), ).create() assert custom_repo.sync()['result'] == 'success' return custom_repo @pytest.fixture(scope='module') -def module_cv(module_manifest_org, rhel7_sat6tools_repo, custom_repo): +def module_cv(module_entitlement_manifest_org, sat_client_repo, custom_repo): """Publish both repos into module CV""" module_cv = entities.ContentView( - organization=module_manifest_org, repository=[rhel7_sat6tools_repo.id, custom_repo.id] + organization=module_entitlement_manifest_org, + repository=[sat_client_repo.id, custom_repo.id], ).create() module_cv.publish() - module_cv = module_cv.read() - return module_cv + return module_cv.read() @pytest.fixture(scope='module') -def module_ak(module_manifest_org, module_cv, custom_repo, module_lce_library): +def module_ak(module_entitlement_manifest_org, module_cv, custom_repo, module_lce_library): """Create a module AK in Library LCE""" ak = entities.ActivationKey( content_view=module_cv, environment=module_lce_library, - organization=module_manifest_org, + organization=module_entitlement_manifest_org, ).create() # Fetch available subscriptions - subs = entities.Subscription(organization=module_manifest_org).search() + subs = entities.Subscription(organization=module_entitlement_manifest_org).search() assert len(subs) > 0 # Add default subscription to activation key sub_found = False @@ -117,21 +117,29 @@ def module_ak(module_manifest_org, module_cv, custom_repo, module_lce_library): assert sub_found # Enable RHEL product content in activation key ak.content_override( - data={'content_overrides': [{'content_label': REPOS['rhst7']['id'], 'value': '1'}]} + data={'content_overrides': [{'content_label': REPOS['rhsclient7']['id'], 'value': '1'}]} ) # Add custom subscription to activation key prod = custom_repo.product.read() - custom_sub = entities.Subscription(organization=module_manifest_org).search( + custom_sub = entities.Subscription(organization=module_entitlement_manifest_org).search( query={'search': f'name={prod.name}'} ) ak.add_subscriptions(data={'subscription_id': custom_sub[0].id}) + # Enable custom repo in activation key + all_content = ak.product_content(data={'content_access_mode_all': '1'})['results'] + for content in all_content: + if content['name'] == custom_repo.name: + content_label = content['label'] + ak.content_override( + data={'content_overrides': [{'content_label': content_label, 'value': '1'}]} + ) return ak @pytest.fixture(scope='module') def host( rhel7_contenthost_module, - module_manifest_org, + module_entitlement_manifest_org, dev_lce, qe_lce, custom_repo, @@ -140,16 +148,16 @@ def host( module_target_sat, ): # Create client machine and register it to satellite with rhel_7_partial_ak - rhel7_contenthost_module.install_katello_ca(module_target_sat) # Register, enable tools repo and install katello-host-tools. - rhel7_contenthost_module.register_contenthost(module_manifest_org.label, module_ak.name) - rhel7_contenthost_module.enable_repo(REPOS['rhst7']['id']) - rhel7_contenthost_module.install_katello_host_tools() + rhel7_contenthost_module.register( + module_entitlement_manifest_org, None, module_ak.name, module_target_sat + ) + rhel7_contenthost_module.enable_repo(REPOS['rhsclient7']['id']) # make a note of time for later wait_for_tasks, and include 4 mins margin of safety. timestamp = (datetime.utcnow() - timedelta(minutes=4)).strftime('%Y-%m-%d %H:%M') # AK added custom repo for errata package, just install it. rhel7_contenthost_module.execute(f'yum install -y {FAKE_4_CUSTOM_PACKAGE}') - rhel7_contenthost_module.execute('katello-package-upload') + rhel7_contenthost_module.execute('subscription-manager repos') # Wait for applicability update event (in case Satellite system slow) module_target_sat.wait_for_tasks( search_query='label = Actions::Katello::Applicability::Hosts::BulkGenerate' @@ -176,7 +184,9 @@ def get_applicable_errata(repo): @pytest.mark.tier4 @pytest.mark.upgrade -def test_positive_noapply_api(module_manifest_org, module_cv, custom_repo, host, dev_lce): +def test_positive_noapply_api( + module_entitlement_manifest_org, module_cv, custom_repo, host, dev_lce +): """Check if api incremental update can be done without actually applying it @@ -189,35 +199,30 @@ def test_positive_noapply_api(module_manifest_org, module_cv, custom_repo, host, Content view has a newer version :parametrized: yes - - :CaseLevel: System """ # Promote CV to new LCE versions = sorted(module_cv.read().version, key=lambda ver: ver.id) cvv = versions[-1].read() cvv.promote(data={'environment_ids': dev_lce.id}) - # Read CV to pick up LCE ID and next_version - module_cv = module_cv.read() - # Get the content view versions and use the recent one. API always - # returns the versions in ascending order (last in the list is most recent) - cv_versions = module_cv.version # Get the applicable errata errata_list = get_applicable_errata(custom_repo) assert len(errata_list) > 0 # Apply incremental update using the first applicable errata - entities.ContentViewVersion().incremental_update( + outval = entities.ContentViewVersion().incremental_update( data={ 'content_view_version_environments': [ { - 'content_view_version_id': cv_versions[-1].id, + 'content_view_version_id': cvv.id, 'environment_ids': [dev_lce.id], } ], 'add_content': {'errata_ids': [errata_list[0].id]}, } ) - # Re-read the content view to get the latest versions - module_cv = module_cv.read() - assert len(module_cv.version) > len(cv_versions) + assert outval['result'] == 'success' + assert ( + outval['action'] + == 'Incremental Update of 1 Content View Version(s) with 1 Package(s), and 1 Errata' + ) diff --git a/tests/foreman/longrun/test_n_1_upgrade.py b/tests/foreman/longrun/test_n_1_upgrade.py deleted file mode 100644 index 8251763de78..00000000000 --- a/tests/foreman/longrun/test_n_1_upgrade.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Test class for N-1 upgrade feature - -:Requirement: Satellite one version ahead from Capsule - -:CaseAutomation: ManualOnly - -:CaseLevel: System - -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional - -:CaseImportance: Critical - -:Upstream: No -""" -import pytest - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_n_1_setup(): - """ - Prepare the environment to test the N-1 Capsule sync scenarios. - - :id: 62c86858-4803-417a-80c7-0070df228355 - - :steps: - 1. Login Satellite with admin rights. - 2. Add the Red Hat and Custom Repository. - 3. Create the lifecycle environment. - 4. Create the Content view and select all the repository. - 5. Publish the content view and promote the content view with created lifecycle - environment. - 6. Create the activation key and add the content view. - 7. Add the subscription, if it is missing in the activation key. - - :expectedresults: Initial setup should complete successfully - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_capsule_optimize_sync(): - """ - Check the N-1 Capsule sync operation when the synchronization type is Optimize. - - :id: 482da3ab-a29f-46cd-91cc-d3bd7937bd11 - - :steps: - 1. Go to the infrastructure --> Capsules - 2. Add the content view in the capsules - 3. Click on the synchronization and select the optimize sync. - 4. Optimize sync starts successfully - - :expectedresults: - 1. Optimize sync should be start and complete successfully. - 2. All the contents properly populate on the capsule side. - 3. Event of the optimize sync populated in the Monitors--> task section. - 4. Check the events in the log section. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_capsule_complete_sync(): - """ - Check the N-1 Capsule sync operation when the synchronization type is Complete. - - :id: b8f7ecbf-e359-495e-acd9-3e0ba44d8c12 - - :steps: - 1. Go to the infrastructure --> Capsules - 2. Add the content view in the capsules - 3. Click on the synchronization and select the complete sync. - 4. Complete Sync starts successfully - - :expectedresults: - 1. Optimize sync should be start and complete successfully. - 2. All the contents properly populate on the capsule side. - 3. Event of the optimize sync populated in the Monitors--> task section. - 4. Check the events in log section. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_content_update_on_capsule_registered_host(): - """ - Check the packages update on the capsule registered content host. - - :id: d331c36f-f69b-4504-89d7-f87c808b1c16 - - :steps: - 1. Go to the hosts --> Content-hosts - 2. Add the content view in the capsules - 3. Click on "Register Content host" - 4. Select the N-1 capsule and copy the command mentioned like - a. curl --insecure --output katello-ca-consumer-latest.noarch.rpm - https://capsule.com/pub/katello-ca-consumer-latest.noarch.rpm - b. yum localinstall katello-ca-consumer-latest.noarch.rpm - 5. Download the katello-ca-consumer certificate and install it on the content host. - 6. Check the Custom repository's contents. - 7. Check the Red Hat repositories contents. - 8. Install the katello-agent from the custom repo. - 9. Install few packages from Red Hat repository. - - :expectedresults: - 1. Content host should be registered successfully from N-1 Capsule. - 2. All the contents should be properly reflected on the registered host side.. - 3. Katello-agent package should be successfully installed via custom repo. - 4. Few Red Hat Packages should be successfully installed via Red Hat repo. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_provisioning_from_n_1_capsule(): - """ - Check the RHEL7/8 provisioning from N-1 Capsule. - - :id: c65e4812-c461-41b7-975a-6ac34f398232 - - :steps: - 1. Create the activation key of all the required RHEL8 contents. - 2. Create the DNS name, subnet, and hostgroup. - 3. Provision the RHEL8 host from the N-1 Capsule. - - :expectedresults: - 1. Host provisioned successfully. - 2. katello-agent and puppet agent packages should be installed after provisioning - 3. Provisioned host should be registered. - 4. Remote job should be executed on that provisioned host successfully. - 5. Host counts should be updated for the puppet environment. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_puppet_environment_import(): - """ - Check the puppet environment import from satellite and N-1 capsule - - :id: 59facd73-60be-4eb0-b389-4d2ae6886c35 - - :steps: - 1. import the puppet environment from satellite. - 2. import the puppet environment from N-1 capsule. - - :expectedresults: - 1. Puppet environment should be imported successfully from satellite - as well as capsule. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_puppet_fact_update(): - """ - Verify the facts successfully fetched from the N-1 Capsule's provisioned host - - :id: dc15da39-c75d-4f1b-8590-3df36c6531de - - :steps: - 1. Register host with N-1 Capsule content source. - 2. Add the activation key with tool report. - 3. Install the puppte agent on the content host. - 4. Update some puppet facts in the smart class - - :expectedresults: - 1. Changed facts should be reflected on the content host - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_ansible_role_import(): - """ - Check the Ansible import operation from N-1 capsule - - :id: 8be51495-2398-45eb-a192-24a4ea09a1d7 - - :steps: - 1. Import ansible roles from N-1 capsule. - - :expectedresults: - 1. Roles import should work from N-1 Capsule. - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def rex_job_execution_from_n_1_capsule(): - """ - Check the updated ansible templates should work as expected from content-host - (part of n-1 capsule). - - :id: d5f4ab23-109f-43f4-934a-cc8f948211f1 - - :steps: - 1. Update the ansible template. - 2. Run the ansible roles on N-1 registered content host - - :expectedresults: - 1. Ansible job should be completed successfully. - """ diff --git a/tests/foreman/longrun/test_oscap.py b/tests/foreman/longrun/test_oscap.py index 1a4feef9288..ecf4da2857c 100644 --- a/tests/foreman/longrun/test_oscap.py +++ b/tests/foreman/longrun/test_oscap.py @@ -4,43 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High -:Upstream: No """ -import pytest from broker import Broker from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.cli.ansible import Ansible -from robottelo.cli.arfreport import Arfreport -from robottelo.cli.factory import make_hostgroup -from robottelo.cli.factory import make_scap_policy -from robottelo.cli.factory import setup_org_for_a_custom_repo -from robottelo.cli.host import Host -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.proxy import Proxy -from robottelo.cli.scapcontent import Scapcontent from robottelo.config import settings -from robottelo.constants import OSCAP_DEFAULT_CONTENT -from robottelo.constants import OSCAP_PERIOD -from robottelo.constants import OSCAP_PROFILE -from robottelo.constants import OSCAP_TARGET_CORES -from robottelo.constants import OSCAP_TARGET_MEMORY -from robottelo.constants import OSCAP_WEEKDAY +from robottelo.constants import ( + OSCAP_DEFAULT_CONTENT, + OSCAP_PERIOD, + OSCAP_PROFILE, + OSCAP_WEEKDAY, +) from robottelo.exceptions import ProxyError from robottelo.hosts import ContentHost - rhel6_content = OSCAP_DEFAULT_CONTENT['rhel6_content'] rhel7_content = OSCAP_DEFAULT_CONTENT['rhel7_content'] rhel8_content = OSCAP_DEFAULT_CONTENT['rhel8_content'] @@ -51,7 +36,7 @@ } -def fetch_scap_and_profile_id(scap_name, scap_profile): +def fetch_scap_and_profile_id(sat, scap_name, scap_profile): """Extracts the scap ID and scap profile id :param scap_name: Scap title @@ -60,7 +45,7 @@ def fetch_scap_and_profile_id(scap_name, scap_profile): :returns: scap_id and scap_profile_id """ - default_content = Scapcontent.info({'title': scap_name}, output_format='json') + default_content = sat.cli.Scapcontent.info({'title': scap_name}, output_format='json') scap_id = default_content['id'] scap_profile_ids = [ profile['id'] @@ -73,7 +58,7 @@ def fetch_scap_and_profile_id(scap_name, scap_profile): @pytest.fixture(scope='module') def default_proxy(module_target_sat): """Returns default capsule/proxy id""" - proxy = Proxy.list({'search': module_target_sat.hostname})[0] + proxy = module_target_sat.cli.Proxy.list({'search': module_target_sat.hostname})[0] p_features = set(proxy.get('features').split(', ')) if {'Ansible', 'Openscap'}.issubset(p_features): proxy_id = proxy.get('id') @@ -85,10 +70,7 @@ def default_proxy(module_target_sat): @pytest.fixture(scope='module') def lifecycle_env(module_org): """Create lifecycle environment""" - lce_env = entities.LifecycleEnvironment( - organization=module_org, name=gen_string('alpha') - ).create() - return lce_env + return entities.LifecycleEnvironment(organization=module_org, name=gen_string('alpha')).create() @pytest.fixture(scope='module') @@ -98,7 +80,7 @@ def content_view(module_org): @pytest.fixture(scope='module', autouse=True) -def activation_key(module_org, lifecycle_env, content_view): +def activation_key(module_target_sat, module_org, lifecycle_env, content_view): """Create activation keys""" repo_values = [ {'repo': settings.repos.satclient_repo.rhel8, 'akname': ak_name['rhel8']}, @@ -111,7 +93,7 @@ def activation_key(module_org, lifecycle_env, content_view): name=repo.get('akname'), environment=lifecycle_env, organization=module_org ).create() # Setup org for a custom repo for RHEL6, RHEL7 and RHEL8. - setup_org_for_a_custom_repo( + module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': repo.get('repo'), 'organization-id': module_org.id, @@ -123,19 +105,21 @@ def activation_key(module_org, lifecycle_env, content_view): @pytest.fixture(scope='module', autouse=True) -def update_scap_content(module_org): +def update_scap_content(module_org, module_target_sat): """Update default scap contents""" for content in rhel8_content, rhel7_content, rhel6_content: - content = Scapcontent.info({'title': content}, output_format='json') + content = module_target_sat.cli.Scapcontent.info({'title': content}, output_format='json') organization_ids = [content_org['id'] for content_org in content.get('organizations', [])] organization_ids.append(module_org.id) - Scapcontent.update({'title': content['title'], 'organization-ids': organization_ids}) + module_target_sat.cli.Scapcontent.update( + {'title': content['title'], 'organization-ids': organization_ids} + ) @pytest.mark.e2e @pytest.mark.upgrade @pytest.mark.tier4 -@pytest.mark.parametrize('distro', ['rhel6', 'rhel7', 'rhel8']) +@pytest.mark.parametrize('distro', ['rhel7', 'rhel8']) def test_positive_oscap_run_via_ansible( module_org, default_proxy, content_view, lifecycle_env, distro, target_sat ): @@ -152,7 +136,7 @@ def test_positive_oscap_run_via_ansible( 1. Create a valid scap content 2. Import Ansible role theforeman.foreman_scap_client 3. Import Ansible Variables needed for the role - 4. Create a scap policy with anisble as deploy option + 4. Create a scap policy with ansible as deploy option 5. Associate the policy with a hostgroup 6. Provision a host using the hostgroup 7. Configure REX and associate the Ansible role to created host @@ -164,10 +148,7 @@ def test_positive_oscap_run_via_ansible( :CaseImportance: Critical """ - if distro == 'rhel6': - rhel_repo = settings.repos.rhel6_os - profile = OSCAP_PROFILE['dsrhel6'] - elif distro == 'rhel7': + if distro == 'rhel7': rhel_repo = settings.repos.rhel7_os profile = OSCAP_PROFILE['security7'] else: @@ -177,7 +158,7 @@ def test_positive_oscap_run_via_ansible( hgrp_name = gen_string('alpha') policy_name = gen_string('alpha') # Creates host_group for rhel7 - make_hostgroup( + target_sat.cli_factory.hostgroup( { 'content-source-id': default_proxy, 'name': hgrp_name, @@ -185,11 +166,11 @@ def test_positive_oscap_run_via_ansible( } ) # Creates oscap_policy. - scap_id, scap_profile_id = fetch_scap_and_profile_id(content, profile) - Ansible.roles_import({'proxy-id': default_proxy}) - Ansible.variables_import({'proxy-id': default_proxy}) - role_id = Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') - make_scap_policy( + scap_id, scap_profile_id = fetch_scap_and_profile_id(target_sat, content, profile) + target_sat.cli.Ansible.roles_import({'proxy-id': default_proxy}) + target_sat.cli.Ansible.variables_import({'proxy-id': default_proxy}) + role_id = target_sat.cli.Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') + target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_id, 'hostgroups': hgrp_name, @@ -201,30 +182,14 @@ def test_positive_oscap_run_via_ansible( 'organizations': module_org.name, } ) - with Broker( - nick=distro, - host_class=ContentHost, - target_cores=OSCAP_TARGET_CORES, - target_memory=OSCAP_TARGET_MEMORY, - ) as vm: - host_name, _, host_domain = vm.hostname.partition('.') - vm.install_katello_ca(target_sat) - vm.register_contenthost(module_org.name, ak_name[distro]) - assert vm.subscribed - Host.set_parameter( - { - 'host': vm.hostname.lower(), - 'name': 'remote_execution_connect_by_ip', - 'value': 'True', - 'parameter-type': 'boolean', - } - ) - if distro not in ('rhel6', 'rhel7'): + with Broker(nick=distro, host_class=ContentHost, deploy_flavor=settings.flavors.default) as vm: + result = vm.register(module_org, None, ak_name[distro], target_sat) + assert result.status == 0, f'Failed to register host: {result.stderr}' + if distro not in ('rhel7'): vm.create_custom_repos(**rhel_repo) else: vm.create_custom_repos(**{distro: rhel_repo}) - vm.add_rex_key(satellite=target_sat) - Host.update( + target_sat.cli.Host.update( { 'name': vm.hostname.lower(), 'lifecycle-environment': lifecycle_env.name, @@ -235,17 +200,19 @@ def test_positive_oscap_run_via_ansible( 'ansible-role-ids': role_id, } ) - job_id = Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') + job_id = target_sat.cli.Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') target_sat.wait_for_tasks( f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"' ) try: - result = JobInvocation.info({'id': job_id})['success'] + result = target_sat.cli.JobInvocation.info({'id': job_id})['success'] assert result == '1' - except AssertionError: - output = ' '.join(JobInvocation.get_output({'id': job_id, 'host': vm.hostname})) + except AssertionError as err: + output = ' '.join( + target_sat.cli.JobInvocation.get_output({'id': job_id, 'host': vm.hostname}) + ) result = f'host output: {output}' - raise AssertionError(result) + raise AssertionError(result) from err result = vm.run('cat /etc/foreman_scap_client/config.yaml | grep profile') assert result.status == 0 # Runs the actual oscap scan on the vm/clients and @@ -253,7 +220,7 @@ def test_positive_oscap_run_via_ansible( vm.execute_foreman_scap_client() # Assert whether oscap reports are uploaded to # Satellite6. - result = Arfreport.list({'search': f'host={vm.hostname.lower()}'}) + result = target_sat.cli.Arfreport.list({'search': f'host={vm.hostname.lower()}'}) assert result is not None @@ -276,7 +243,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( 1. Create a valid scap content 2. Import Ansible role theforeman.foreman_scap_client 3. Import Ansible Variables needed for the role - 4. Create a scap policy with anisble as deploy option + 4. Create a scap policy with ansible as deploy option 5. Associate the policy with a hostgroup 6. Provision a host using the hostgroup 7. Harden the host by remediating it with DISA STIG security policy @@ -290,7 +257,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( hgrp_name = gen_string('alpha') policy_name = gen_string('alpha') # Creates host_group for rhel7 - make_hostgroup( + target_sat.cli_factory.hostgroup( { 'content-source-id': default_proxy, 'name': hgrp_name, @@ -299,12 +266,12 @@ def test_positive_oscap_run_via_ansible_bz_1814988( ) # Creates oscap_policy. scap_id, scap_profile_id = fetch_scap_and_profile_id( - OSCAP_DEFAULT_CONTENT['rhel7_content'], OSCAP_PROFILE['dsrhel7'] + target_sat, OSCAP_DEFAULT_CONTENT['rhel7_content'], OSCAP_PROFILE['dsrhel7'] ) - Ansible.roles_import({'proxy-id': default_proxy}) - Ansible.variables_import({'proxy-id': default_proxy}) - role_id = Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') - make_scap_policy( + target_sat.cli.Ansible.roles_import({'proxy-id': default_proxy}) + target_sat.cli.Ansible.variables_import({'proxy-id': default_proxy}) + role_id = target_sat.cli.Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') + target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_id, 'hostgroups': hgrp_name, @@ -316,24 +283,9 @@ def test_positive_oscap_run_via_ansible_bz_1814988( 'organizations': module_org.name, } ) - with Broker( - nick='rhel7', - host_class=ContentHost, - target_cores=OSCAP_TARGET_CORES, - target_memory=OSCAP_TARGET_MEMORY, - ) as vm: - host_name, _, host_domain = vm.hostname.partition('.') - vm.install_katello_ca(target_sat) - vm.register_contenthost(module_org.name, ak_name['rhel7']) - assert vm.subscribed - Host.set_parameter( - { - 'host': vm.hostname.lower(), - 'name': 'remote_execution_connect_by_ip', - 'value': 'True', - 'parameter-type': 'boolean', - } - ) + with Broker(nick='rhel7', host_class=ContentHost, deploy_flavor=settings.flavors.default) as vm: + result = vm.register(module_org, None, ak_name['rhel7'], target_sat) + assert result.status == 0, f'Failed to register host: {result.stderr}' vm.create_custom_repos(rhel7=settings.repos.rhel7_os) # Harden the rhel7 client with DISA STIG security policy vm.run('yum install -y scap-security-guide') @@ -342,8 +294,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( '--fetch-remote-resources --results-arf results.xml ' '/usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml', ) - vm.add_rex_key(satellite=target_sat) - Host.update( + target_sat.cli.Host.update( { 'name': vm.hostname.lower(), 'lifecycle-environment': lifecycle_env.name, @@ -354,17 +305,19 @@ def test_positive_oscap_run_via_ansible_bz_1814988( 'ansible-role-ids': role_id, } ) - job_id = Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') + job_id = target_sat.cli.Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') target_sat.wait_for_tasks( f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"' ) try: - result = JobInvocation.info({'id': job_id})['success'] + result = target_sat.cli.JobInvocation.info({'id': job_id})['success'] assert result == '1' - except AssertionError: - output = ' '.join(JobInvocation.get_output({'id': job_id, 'host': vm.hostname})) + except AssertionError as err: + output = ' '.join( + target_sat.cli.JobInvocation.get_output({'id': job_id, 'host': vm.hostname}) + ) result = f'host output: {output}' - raise AssertionError(result) + raise AssertionError(result) from err result = vm.run('cat /etc/foreman_scap_client/config.yaml | grep profile') assert result.status == 0 # Runs the actual oscap scan on the vm/clients and @@ -372,7 +325,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( vm.execute_foreman_scap_client() # Assert whether oscap reports are uploaded to # Satellite6. - result = Arfreport.list({'search': f'host={vm.hostname.lower()}'}) + result = target_sat.cli.Arfreport.list({'search': f'host={vm.hostname.lower()}'}) assert result is not None @@ -383,7 +336,7 @@ def test_positive_has_arf_report_summary_page(): :id: 25be7898-50c5-4825-adc7-978c7b4e3488 - :Steps: + :steps: 1. Make sure the oscap report with it's corresponding hostname is visible in the UI. 2. Click on the host name to access the oscap report. @@ -391,8 +344,6 @@ def test_positive_has_arf_report_summary_page(): :expectedresults: Oscap ARF reports should have summary page. :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -403,7 +354,7 @@ def test_positive_view_full_report_button(): :id: 5a41916d-66db-4d2f-8261-b83f833189b9 - :Steps: + :steps: 1. Make sure the oscap report with it's corresponding hostname is visible in the UI. 2. Click on the host name to access the oscap report. @@ -412,8 +363,6 @@ def test_positive_view_full_report_button(): actual HTML report. :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -425,7 +374,7 @@ def test_positive_download_xml_button(): :id: 07a5f495-a702-4ca4-b5a4-579a133f9181 - :Steps: + :steps: 1. Make sure the oscap report with it's corresponding hostname is visible in the UI. 2. Click on the host name to access the oscap report. @@ -434,8 +383,6 @@ def test_positive_download_xml_button(): the xml report. :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -447,15 +394,13 @@ def test_positive_select_oscap_proxy(): :id: d56576c8-6fab-4af6-91c1-6a56d9cca94b - :Steps: Choose the Oscap Proxy/capsule appropriately for the host or + :steps: Choose the Oscap Proxy/capsule appropriately for the host or host-groups. :expectedresults: Should have an Oscap-Proxy select box while filling hosts and host-groups form. :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -466,7 +411,7 @@ def test_positive_delete_multiple_arf_reports(): :id: c1a8ce02-f42f-4c48-893d-8f31432b5520 - :Steps: + :steps: 1. Run Oscap scans are run for multiple Hosts. 2. Make sure the oscap reports with it's corresponding hostnames are visible in the UI. @@ -476,8 +421,6 @@ def test_positive_delete_multiple_arf_reports(): :expectedresults: Multiple Oscap ARF reports can be deleted. :CaseAutomation: NotAutomated - - :CaseLevel: System """ @@ -491,6 +434,130 @@ def test_positive_reporting_emails_of_oscap_reports(): :expectedresults: Whether email reporting of oscap reports is possible. :CaseAutomation: NotAutomated + """ + + +@pytest.mark.parametrize('distro', ['rhel8']) +def test_positive_oscap_run_via_local_files( + module_org, default_proxy, content_view, lifecycle_env, distro, module_target_sat +): + """End-to-End Oscap run via local files deployed with ansible + + :id: 0dde5893-540c-4e03-a206-55fccdb2b9ca + + :parametrized: yes + + :customerscenario: true + + :setup: scap content, scap policy , Remote execution - :CaseLevel: System + :steps: + + 1. Create a valid scap content + 2. Import Ansible role theforeman.foreman_scap_client + 3. Create a scap policy with ansible as deploy option + 4. Associate the policy with a hostgroup + 5. Run the Ansible job and then trigger the Oscap job. + 6. Oscap must Utilize the local files for the client scan. + + :expectedresults: Oscap run should happen using the --localfile argument. + + :BZ: 2081777,2211952 + + :CaseImportance: Critical """ + SELECTED_ROLE = 'theforeman.foreman_scap_client' + file_name = 'security-data-oval-com.redhat.rhsa-RHEL8.xml.bz2' + download_url = 'https://www.redhat.com/security/data/oval/v2/RHEL8/rhel-8.oval.xml.bz2' + profile = OSCAP_PROFILE['ospp8'] + content = OSCAP_DEFAULT_CONTENT[f'{distro}_content'] + hgrp_name = gen_string('alpha') + policy_name = gen_string('alpha') + + module_target_sat.cli_factory.hostgroup( + { + 'content-source-id': default_proxy, + 'name': hgrp_name, + 'organizations': module_org.name, + } + ) + # Creates oscap_policy. + scap_id, scap_profile_id = fetch_scap_and_profile_id(module_target_sat, content, profile) + with Broker( + nick=distro, + host_class=ContentHost, + deploy_flavor=settings.flavors.default, + ) as vm: + vm.create_custom_repos( + **{ + 'baseos': settings.repos.rhel8_os.baseos, + 'appstream': settings.repos.rhel8_os.appstream, + 'sat_client': settings.repos['SATCLIENT_REPO'][distro.upper()], + } + ) + result = vm.register(module_org, None, ak_name[distro], module_target_sat) + assert result.status == 0, f'Failed to register host: {result.stderr}' + proxy_id = module_target_sat.nailgun_smart_proxy.id + target_host = vm.nailgun_host + module_target_sat.api.AnsibleRoles().sync( + data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]} + ) + role_id = ( + module_target_sat.api.AnsibleRoles() + .search(query={'search': f'name={SELECTED_ROLE}'})[0] + .id + ) + module_target_sat.api.Host(id=target_host.id).add_ansible_role( + data={'ansible_role_id': role_id} + ) + host_roles = target_host.list_ansible_roles() + assert host_roles[0]['name'] == SELECTED_ROLE + module_target_sat.cli_factory.make_scap_policy( + { + 'scap-content-id': scap_id, + 'hostgroups': hgrp_name, + 'deploy-by': 'ansible', + 'name': policy_name, + 'period': OSCAP_PERIOD['weekly'].lower(), + 'scap-content-profile-id': scap_profile_id, + 'weekday': OSCAP_WEEKDAY['friday'].lower(), + 'organizations': module_org.name, + } + ) + # The file here needs to be present on the client in order + # to perform the scan from the local-files. + vm.execute(f'curl -o {file_name} {download_url}') + module_target_sat.cli.Host.update( + { + 'name': vm.hostname, + 'lifecycle-environment': lifecycle_env.name, + 'content-view': content_view.name, + 'hostgroup': hgrp_name, + 'openscap-proxy-id': default_proxy, + 'organization': module_org.name, + } + ) + + template_id = ( + module_target_sat.api.JobTemplate() + .search(query={'search': 'name="Ansible Roles - Ansible Default"'})[0] + .id + ) + job = module_target_sat.api.JobInvocation().run( + synchronous=False, + data={ + 'job_template_id': template_id, + 'targeting_type': 'static_query', + 'search_query': f'name = {vm.hostname}', + }, + ) + module_target_sat.wait_for_tasks( + f'resource_type = JobInvocation and resource_id = {job["id"]}', + poll_timeout=1000, + ) + assert module_target_sat.api.JobInvocation(id=job['id']).read().succeeded == 1 + assert vm.run('cat /etc/foreman_scap_client/config.yaml | grep profile').status == 0 + # Runs the actual oscap scan on the vm/clients + # TODO: instead of running it on the client itself we should invoke a job from satellite + result = vm.execute_foreman_scap_client() + assert f"WARNING: Using local file '/root/{file_name}'" in result diff --git a/tests/foreman/longrun/test_provisioning_computeresource.py b/tests/foreman/longrun/test_provisioning_computeresource.py deleted file mode 100644 index f6dade88d5a..00000000000 --- a/tests/foreman/longrun/test_provisioning_computeresource.py +++ /dev/null @@ -1,325 +0,0 @@ -""" -:CaseLevel: Acceptance - -:CaseComponent: ComputeResources - -:Team: Rocket - -:TestType: Functional - -:CaseImportance: Critical - -:Upstream: No -""" -import pytest -from fauxfactory import gen_string -from wrapanapi import VMWareSystem - -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import make_compute_resource -from robottelo.cli.factory import make_host -from robottelo.cli.host import Host -from robottelo.config import settings -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import VMWARE_CONSTANTS - - -@pytest.fixture(scope="module") -def vmware(): - bridge = settings.vlan_networking.bridge - vmware = type("", (), {})() - vmware.vmware_server = settings.vmware.vcenter - vmware.vmware_password = settings.vmware.password - vmware.vmware_username = settings.vmware.username - vmware.vmware_datacenter = settings.vmware.datacenter - vmware.vmware_img_name = settings.vmware.image_name - vmware.vmware_img_arch = settings.vmware.image_arch - vmware.vmware_img_os = settings.vmware.image_os - vmware.vmware_img_user = settings.vmware.image_username - vmware.vmware_img_pass = settings.vmware.image_password - vmware.vmware_vm_name = settings.vmware.vm_name - vmware.current_interface = VMWARE_CONSTANTS.get('network_interfaces') % bridge - vmware.vmware_api = VMWareSystem( - hostname=vmware.vmware_server, - username=vmware.vmware_username, - password=vmware.vmware_password, - ) - vmware.vmware_net_id = vmware.vmware_api.get_network(vmware.current_interface)._moId - return vmware - - -@pytest.fixture(scope="module") -def provisioning(module_org, module_location, module_target_sat): - os = None - if hasattr(settings, 'rhev') and hasattr(settings.rhev, 'image_os') and settings.rhev.image_os: - os = settings.rhev.image_os - provisioning = type("", (), {})() - provisioning.org_name = module_org.name - provisioning.loc_name = module_location.name - provisioning.config_env = module_target_sat.api_factory.configure_provisioning( - compute=True, org=module_org, loc=module_location, os=os - ) - provisioning.os_name = provisioning.config_env['os'] - return provisioning - - -@pytest.fixture(scope="module") -def vmware_cr(provisioning, vmware): - return make_compute_resource( - { - 'name': gen_string('alpha'), - 'organizations': provisioning.org_name, - 'locations': provisioning.loc_name, - 'provider': FOREMAN_PROVIDERS['vmware'], - 'server': vmware.vmware_server, - 'user': vmware.vmware_username, - 'password': vmware.vmware_password, - 'datacenter': vmware.vmware_datacenter, - } - ) - - -@pytest.fixture(scope="function") -def tear_down(provisioning): - """Delete the hosts to free the resources""" - yield - hosts = Host.list({'organization': provisioning.org_name}) - for host in hosts: - Host.delete({'id': host['id']}) - - -@pytest.mark.on_premises_provisioning -@pytest.mark.vlan_networking -@pytest.mark.tier3 -def test_positive_provision_rhev_with_host_group(rhev, provisioning, target_sat, tear_down): - """Provision a host on RHEV compute resource with - the help of hostgroup. - - :Requirement: Computeresource RHV - - :CaseComponent: ComputeResources-RHEV - - :Team: Rocket - - :id: ba78868f-5cff-462f-a55d-f6aa4d11db52 - - :setup: Hostgroup and provisioning setup like domain, subnet etc. - - :steps: - - 1. Create a RHEV compute resource. - 2. Create a host on RHEV compute resource using the Hostgroup - 3. Use compute-attributes parameter to specify key-value parameters - regarding the virtual machine. - 4. Provision the host. - - :expectedresults: The host should be provisioned with host group - - :BZ: 1777992 - - :customerscenario: true - - :CaseAutomation: Automated - """ - name = gen_string('alpha') - rhv_cr = ComputeResource.create( - { - 'name': name, - 'provider': 'Ovirt', - 'user': rhev.rhev_username, - 'password': rhev.rhev_password, - 'datacenter': rhev.rhev_datacenter, - 'url': rhev.rhev_url, - 'ovirt-quota': rhev.quota, - 'organizations': provisioning.org_name, - 'locations': provisioning.loc_name, - } - ) - assert rhv_cr['name'] == name - host_name = gen_string('alpha').lower() - host = make_host( - { - 'name': f'{host_name}', - 'root-password': gen_string('alpha'), - 'organization': provisioning.org_name, - 'location': provisioning.loc_name, - 'pxe-loader': 'PXELinux BIOS', - 'hostgroup': provisioning.config_env['host_group'], - 'compute-resource-id': rhv_cr.get('id'), - 'compute-attributes': "cluster={}," - "cores=1," - "memory=1073741824," - "start=1".format(rhev.cluster_id), - 'ip': None, - 'mac': None, - 'interface': f"compute_name=nic1, compute_network={rhev.network_id}", - 'volume': "size_gb=10," "storage_domain={}," "bootable=True".format(rhev.storage_id), - 'provision-method': 'build', - } - ) - hostname = '{}.{}'.format(host_name, provisioning.config_env['domain']) - assert hostname == host['name'] - host_info = Host.info({'name': hostname}) - host_ip = host_info.get('network').get('ipv4-address') - # Check on RHV, if VM exists - assert rhev.rhv_api.does_vm_exist(hostname) - # Get the information of created VM - rhv_vm = rhev.rhv_api.get_vm(hostname) - # Assert of Satellite mac address for VM and Mac of VM created is same - assert host_info.get('network').get('mac') == rhv_vm.get_nics()[0].mac.address - # Start to run a ping check if network was established on VM - target_sat.ping_host(host=host_ip) - - -@pytest.mark.on_premises_provisioning -@pytest.mark.vlan_networking -@pytest.mark.tier3 -def test_positive_provision_vmware_with_host_group( - vmware, provisioning, tear_down, vmware_cr, target_sat -): - """Provision a host on vmware compute resource with - the help of hostgroup. - - :Requirement: Computeresource Vmware - - :CaseComponent: ComputeResources-VMWare - - :Team: Rocket - - :customerscenario: true - - :id: ae4d5949-f0e6-44ca-93b6-c5241a02b64b - - :setup: - - 1. Vaild vmware hostname ,credentials. - 2. Configure provisioning setup. - 3. Configure host group setup. - - :steps: - - 1. Go to "Hosts --> New host". - 2. Assign the host group to the host. - 3. Select the Deploy on as vmware Compute Resource. - 4. Provision the host. - - :expectedresults: The host should be provisioned with host group - - :CaseAutomation: Automated - - :CaseLevel: System - """ - host_name = gen_string('alpha').lower() - host = make_host( - { - 'name': f'{host_name}', - 'root-password': gen_string('alpha'), - 'organization': provisioning.org_name, - 'location': provisioning.loc_name, - 'hostgroup': provisioning.config_env['host_group'], - 'pxe-loader': 'PXELinux BIOS', - 'compute-resource-id': vmware_cr.get('id'), - 'compute-attributes': "cpus=2," - "corespersocket=2," - "memory_mb=4028," - "cluster={}," - "path=/Datacenters/{}/vm/QE," - "guest_id=rhel7_64Guest," - "scsi_controller_type=VirtualLsiLogicController," - "hardware_version=Default," - "start=1".format(VMWARE_CONSTANTS['cluster'], vmware.vmware_datacenter), - 'ip': None, - 'mac': None, - 'interface': "compute_network={}," - "compute_type=VirtualVmxnet3".format(vmware.vmware_net_id), - 'volume': "name=Hard disk," - "size_gb=10," - "thin=true," - "eager_zero=false," - "datastore={}".format(VMWARE_CONSTANTS['datastore'].split()[0]), - 'provision-method': 'build', - } - ) - hostname = '{}.{}'.format(host_name, provisioning.config_env['domain']) - assert hostname == host['name'] - # Check on Vmware, if VM exists - assert vmware.vmware_api.does_vm_exist(hostname) - host_info = Host.info({'name': hostname}) - host_ip = host_info.get('network').get('ipv4-address') - # Start to run a ping check if network was established on VM - target_sat.ping_host(host=host_ip) - - -@pytest.mark.on_premises_provisioning -@pytest.mark.vlan_networking -@pytest.mark.tier3 -def test_positive_provision_vmware_with_host_group_bootdisk( - vmware, provisioning, tear_down, vmware_cr, target_sat -): - """Provision a bootdisk based host on VMWare compute resource. - - :Requirement: Computeresource Vmware - - :CaseComponent: ComputeResources-VMWare - - :id: bc5f457d-c29a-4c62-bbdc-af8f4f813519 - - :bz: 1679225 - - :setup: - - 1. Vaild VMWare hostname, credentials. - 2. Configure provisioning setup. - 3. Configure host group setup. - - :steps: Using Hammer CLI, Provision a VM on VMWare with hostgroup and - provisioning method as `bootdisk`. - - :expectedresults: The host should be provisioned with provisioning type bootdisk - - :CaseAutomation: Automated - - :customerscenario: true - - :CaseLevel: System - """ - host_name = gen_string('alpha').lower() - host = make_host( - { - 'name': f'{host_name}', - 'root-password': gen_string('alpha'), - 'organization': provisioning.org_name, - 'location': provisioning.loc_name, - 'hostgroup': provisioning.config_env['host_group'], - 'pxe-loader': 'PXELinux BIOS', - 'compute-resource-id': vmware_cr.get('id'), - 'content-source-id': '1', - 'compute-attributes': "cpus=2," - "corespersocket=2," - "memory_mb=4028," - "cluster={}," - "path=/Datacenters/{}/vm/QE," - "guest_id=rhel7_64Guest," - "scsi_controllers=`type=VirtualLsiLogicController,key=1000'," - "hardware_version=Default," - "start=1".format(VMWARE_CONSTANTS['cluster'], vmware.vmware_datacenter), - "ip": None, - "mac": None, - 'interface': "compute_network={}," - "compute_type=VirtualVmxnet3".format(vmware.vmware_net_id), - 'volume': "name=Hard disk," - "size_gb=10," - "thin=true," - "eager_zero=false," - "datastore={}".format(VMWARE_CONSTANTS['datastore'].split()[0]), - 'provision-method': 'bootdisk', - } - ) - hostname = '{}.{}'.format(host_name, provisioning.config_env['domain']) - assert hostname == host['name'] - # Check on Vmware, if VM exists - assert vmware.vmware_api.does_vm_exist(hostname) - host_info = Host.info({'name': hostname}) - host_ip = host_info.get('network').get('ipv4-address') - # Start to run a ping check if network was established on VM - target_sat.ping_host(host=host_ip) diff --git a/tests/foreman/maintain/test_advanced.py b/tests/foreman/maintain/test_advanced.py index ab027452d48..68b00e5c071 100644 --- a/tests/foreman/maintain/test_advanced.py +++ b/tests/foreman/maintain/test_advanced.py @@ -4,73 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest import yaml -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings -from robottelo.constants import MAINTAIN_HAMMER_YML - -pytestmark = pytest.mark.destructive - - -# Common repositories for Satellite and Capsule -common_repos = ['rhel-8-for-x86_64-baseos-rpms', 'rhel-8-for-x86_64-appstream-rpms'] - -# Satellite repositories -sat_611_repos = [ - 'satellite-6.11-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.11-for-rhel-8-x86_64-rpms', -] + common_repos - -sat_612_repos = [ - 'satellite-6.12-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.12-for-rhel-8-x86_64-rpms', -] + common_repos - -sat_613_repos = [ - 'satellite-6.13-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.13-for-rhel-8-x86_64-rpms', -] + common_repos - -# Capsule repositories -cap_611_repos = [ - 'satellite-capsule-6.11-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.11-for-rhel-8-x86_64-rpms', -] + common_repos - -cap_612_repos = [ - 'satellite-capsule-6.12-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.12-for-rhel-8-x86_64-rpms', -] + common_repos - -cap_613_repos = [ - 'satellite-capsule-6.13-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6.13-for-rhel-8-x86_64-rpms', -] + common_repos - -sat_repos = { - '6.11': sat_611_repos, - '6.12': sat_612_repos, - '6.13': sat_613_repos, -} -cap_repos = { - '6.11': cap_611_repos, - '6.12': cap_612_repos, - '6.13': cap_613_repos, -} +from robottelo.config import robottelo_tmp_dir, settings +from robottelo.constants import MAINTAIN_HAMMER_YML, SAT_NON_GA_VERSIONS +from robottelo.hosts import get_sat_rhel_version, get_sat_version + +sat_x_y_release = f'{get_sat_version().major}.{get_sat_version().minor}' + + +def get_satellite_capsule_repos( + x_y_release=sat_x_y_release, product='satellite', os_major_ver=None +): + if os_major_ver is None: + os_major_ver = get_sat_rhel_version().major + if product == 'capsule': + product = 'satellite-capsule' + return [ + f'{product}-{x_y_release}-for-rhel-{os_major_ver}-x86_64-rpms', + f'satellite-maintenance-{x_y_release}-for-rhel-{os_major_ver}-x86_64-rpms', + f'rhel-{os_major_ver}-for-x86_64-baseos-rpms', + f'rhel-{os_major_ver}-for-x86_64-appstream-rpms', + ] def test_positive_advanced_run_service_restart(sat_maintain): @@ -112,6 +75,22 @@ def test_positive_advanced_run_hammer_setup(request, sat_maintain): :BZ: 1830355 """ + + @request.addfinalizer + def _finalize(): + result = sat_maintain.execute( + f'hammer -u admin -p admin user update --login admin --password {default_admin_pass}' + ) + assert result.status == 0 + # Make default admin creds available in MAINTAIN_HAMMER_YML + assert sat_maintain.cli.Advanced.run_hammer_setup().status == 0 + # Make sure default password available in MAINTAIN_HAMMER_YML + result = sat_maintain.execute( + f"grep -i ':password: {default_admin_pass}' {MAINTAIN_HAMMER_YML}" + ) + assert result.status == 0 + assert default_admin_pass in result.stdout + default_admin_pass = settings.server.admin_password result = sat_maintain.execute( f'hammer -u admin -p {default_admin_pass} user update --login admin --password admin' @@ -137,22 +116,9 @@ def test_positive_advanced_run_hammer_setup(request, sat_maintain): assert result.status == 0 assert 'admin' in result.stdout - @request.addfinalizer - def _finalize(): - result = sat_maintain.execute( - f'hammer -u admin -p admin user update --login admin --password {default_admin_pass}' - ) - assert result.status == 0 - # Make default admin creds available in MAINTAIN_HAMMER_YML - assert sat_maintain.cli.Advanced.run_hammer_setup().status == 0 - # Make sure default password available in MAINTAIN_HAMMER_YML - result = sat_maintain.execute( - f"grep -i ':password: {default_admin_pass}' {MAINTAIN_HAMMER_YML}" - ) - assert result.status == 0 - assert default_admin_pass in result.stdout - +@pytest.mark.e2e +@pytest.mark.upgrade def test_positive_advanced_run_packages(request, sat_maintain): """Packages install/downgrade/check-update/update using advanced procedure run @@ -166,6 +132,12 @@ def test_positive_advanced_run_packages(request, sat_maintain): :expectedresults: packages should install/downgrade/check-update/update. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') + # Setup custom_repo and install walrus package sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) result = sat_maintain.cli.Advanced.run_packages_install( @@ -195,11 +167,6 @@ def test_positive_advanced_run_packages(request, sat_maintain): assert result.status == 0 assert 'walrus-5.21-1' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') - @pytest.mark.parametrize( 'tasks_state', @@ -236,20 +203,6 @@ def test_positive_advanced_run_foreman_task_resume(sat_maintain): assert 'FAIL' not in result.stdout -def test_positive_advanced_run_foreman_tasks_ui_investigate(sat_maintain): - """Run foreman-tasks-ui-investigate using advanced procedure run - - :id: 3b4f69c6-c0a1-42e3-a099-8a6e26280d17 - - :steps: - 1. Run satellite-maintain advanced procedure run foreman-tasks-ui-investigate - - :expectedresults: procedure foreman-tasks-ui-investigate should work. - """ - result = sat_maintain.cli.Advanced.run_foreman_tasks_ui_investigate(env_var='echo " " | ') - assert result.status == 0 - - @pytest.mark.e2e def test_positive_advanced_run_sync_plan(setup_sync_plan, sat_maintain): """Run sync-plans-enable and sync-plans-disable using advanced procedure run @@ -283,49 +236,6 @@ def test_positive_advanced_run_sync_plan(setup_sync_plan, sat_maintain): assert len(enable_sync_ids) == len(data_yml[':default'][':sync_plans'][':enabled']) -@pytest.mark.include_capsule -def test_positive_advanced_by_tag_check_migrations(sat_maintain): - """Run pre-migrations and post-migrations using advanced procedure by-tag - - :id: 65cacca0-f142-4a63-a644-01f76120f16c - - :parametrized: yes - - :steps: - 1. Run satellite-maintain advanced procedure by-tag pre-migrations - 2. Run satellite-maintain advanced procedure by-tag post-migrations - - :expectedresults: procedures of pre-migrations tag and post-migrations tag should work. - """ - result = sat_maintain.cli.AdvancedByTag.pre_migrations() - assert 'FAIL' not in result.stdout - rules = sat_maintain.execute('nft list tables') - assert 'FOREMAN_MAINTAIN_TABLE' in rules.stdout - - result = sat_maintain.cli.AdvancedByTag.post_migrations() - assert 'FAIL' not in result.stdout - rules = sat_maintain.execute('nft list tables') - assert 'FOREMAN_MAINTAIN_TABLE' not in rules.stdout - - -@pytest.mark.include_capsule -def test_positive_advanced_by_tag_restore_confirmation(sat_maintain): - """Run restore_confirmation using advanced procedure by-tag - - :id: f9e10352-04fb-49ba-8346-5b02e64fd028 - - :parametrized: yes - - :steps: - 1. Run satellite-maintain advanced procedure by-tag restore - - :expectedresults: procedure restore_confirmation should work. - """ - result = sat_maintain.cli.AdvancedByTag.restore(options={'assumeyes': True}) - assert 'FAIL' not in result.stdout - assert result.status == 0 - - def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_org): """Verify that sync plan is disabled and enabled with hammer defaults set. @@ -342,7 +252,8 @@ def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_o :customerscenario: true """ - sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': 1}) + + sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) sync_plans = [] for name in ['plan1', 'plan2']: @@ -350,6 +261,16 @@ def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_o sat_maintain.api.SyncPlan(enabled=True, name=name, organization=module_org).create() ) + @request.addfinalizer + def _finalize(): + sat_maintain.cli.Defaults.delete({'param-name': 'organization_id'}) + sync_plans[1].delete() + sync_plan = sat_maintain.api.SyncPlan(organization=module_org.id).search( + query={'search': f'name="{sync_plans[0]}"'} + ) + if sync_plan: + sync_plans[0].delete() + result = sat_maintain.cli.Advanced.run_sync_plans_disable() assert 'FAIL' not in result.stdout assert result.status == 0 @@ -360,11 +281,6 @@ def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_o assert 'FAIL' not in result.stdout assert result.status == 0 - @request.addfinalizer - def _finalize(): - sat_maintain.cli.Defaults.delete({'param-name': 'organization_id'}) - sync_plans[1].delete() - @pytest.mark.e2e def test_positive_satellite_repositories_setup(sat_maintain): @@ -379,36 +295,21 @@ def test_positive_satellite_repositories_setup(sat_maintain): :expectedresults: Required Satellite repositories for install/upgrade should get enabled """ - supported_versions = ['6.11', '6.12'] - for ver in supported_versions: - result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': ver}) + sat_version = ".".join(sat_maintain.version.split('.')[0:2]) + result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': sat_version}) + if sat_version not in SAT_NON_GA_VERSIONS: assert result.status == 0 assert 'FAIL' not in result.stdout result = sat_maintain.execute('yum repolist') - for repo in sat_repos[ver]: + for repo in get_satellite_capsule_repos(sat_version): assert repo in result.stdout - # 6.13 till not GA - result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': '6.13'}) - assert result.status == 1 - assert 'FAIL' in result.stdout - for repo in sat_repos['6.13']: - assert repo in result.stdout - - # Verify that all required beta repositories gets enabled - # maintain beta repo is unavailable for EL8 https://bugzilla.redhat.com/show_bug.cgi?id=2106750 - sat_beta_repo = ['satellite-6-beta-for-rhel-8-x86_64-rpms'] + common_repos - missing_beta_el8_repos = ['satellite-maintenance-6-beta-for-rhel-8-x86_64-rpms'] - result = sat_maintain.cli.Advanced.run_repositories_setup( - options={'version': '6.12'}, env_var='FOREMAN_MAINTAIN_USE_BETA=1' - ) - assert result.status != 0 - assert 'FAIL' in result.stdout - for repo in missing_beta_el8_repos: - assert f"Error: '{repo}' does not match a valid repository ID" in result.stdout - result = sat_maintain.execute('yum repolist') - for repo in sat_beta_repo: - assert repo in result.stdout + # for non-ga versions + else: + assert result.status == 1 + assert 'FAIL' in result.stdout + for repo in get_satellite_capsule_repos(sat_version): + assert repo in result.stdout @pytest.mark.e2e @@ -427,36 +328,17 @@ def test_positive_capsule_repositories_setup(sat_maintain): :expectedresults: Required Capsule repositories should get enabled """ - supported_versions = ['6.11', '6.12'] - for ver in supported_versions: - result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': ver}) + sat_version = ".".join(sat_maintain.version.split('.')[0:2]) + result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': sat_version}) + if sat_version not in SAT_NON_GA_VERSIONS: assert result.status == 0 assert 'FAIL' not in result.stdout result = sat_maintain.execute('yum repolist') - for repo in cap_repos[ver]: + for repo in get_satellite_capsule_repos(sat_version, 'capsule'): + assert repo in result.stdout + # for non-ga versions + else: + assert result.status == 1 + assert 'FAIL' in result.stdout + for repo in get_satellite_capsule_repos(sat_version, 'capsule'): assert repo in result.stdout - - # 6.13 till not GA - result = sat_maintain.cli.Advanced.run_repositories_setup(options={'version': '6.13'}) - assert result.status == 1 - assert 'FAIL' in result.stdout - for repo in cap_repos['6.13']: - assert repo in result.stdout - - # Verify that all required beta repositories gets enabled - # maintain beta repo is unavailable for EL8 https://bugzilla.redhat.com/show_bug.cgi?id=2106750 - cap_beta_repo = common_repos - missing_beta_el8_repos = [ - 'satellite-capsule-6-beta-for-rhel-8-x86_64-rpms', - 'satellite-maintenance-6-beta-for-rhel-8-x86_64-rpms', - ] - result = sat_maintain.cli.Advanced.run_repositories_setup( - options={'version': '6.12'}, env_var='FOREMAN_MAINTAIN_USE_BETA=1' - ) - assert result.status != 0 - assert 'FAIL' in result.stdout - for repo in missing_beta_el8_repos: - assert f"Error: '{repo}' does not match a valid repository ID" in result.stdout - result = sat_maintain.execute('yum repolist') - for repo in cap_beta_repo: - assert repo in result.stdout diff --git a/tests/foreman/maintain/test_backup_restore.py b/tests/foreman/maintain/test_backup_restore.py index 5d0ea7e0e5d..f81b268e844 100644 --- a/tests/foreman/maintain/test_backup_restore.py +++ b/tests/foreman/maintain/test_backup_restore.py @@ -4,22 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No + """ import re -import pytest from fauxfactory import gen_string +import pytest from robottelo import constants from robottelo.config import settings @@ -33,10 +29,10 @@ BASIC_FILES = {"config_files.tar.gz", ".config.snar", "metadata.yml"} +OFFLINE_FILES = {"pgsql_data.tar.gz", ".postgres.snar"} | BASIC_FILES +ONLINE_SAT_FILES = {"candlepin.dump", "foreman.dump", "pulpcore.dump"} | BASIC_FILES +ONLINE_CAPS_FILES = {"pulpcore.dump"} | BASIC_FILES CONTENT_FILES = {"pulp_data.tar", ".pulp.snar"} -OFFLINE_FILES = {"pgsql_data.tar.gz", ".postgres.snar"} -ONLINE_SAT_FILES = {"candlepin.dump", "foreman.dump", "pulpcore.dump"} -ONLINE_CAPS_FILES = {"pulpcore.dump"} NODIR_MSG = "ERROR: parameter 'BACKUP_DIR': no value provided" @@ -46,22 +42,18 @@ assert_msg = "Some required backup files are missing" -def get_exp_files(sat_maintain, backup_type): +def get_exp_files(sat_maintain, backup_type, skip_pulp=False): if type(sat_maintain) is Satellite: - if sat_maintain.is_remote_db(): - expected_files = BASIC_FILES | ONLINE_SAT_FILES - else: - expected_files = ( - BASIC_FILES | OFFLINE_FILES - if backup_type == 'offline' - else BASIC_FILES | ONLINE_SAT_FILES - ) - else: + # for remote db you get always online backup regardless specified backup type expected_files = ( - BASIC_FILES | OFFLINE_FILES - if backup_type == 'offline' - else BASIC_FILES | ONLINE_CAPS_FILES + ONLINE_SAT_FILES + if backup_type == 'online' or sat_maintain.is_remote_db() + else OFFLINE_FILES ) + else: + expected_files = ONLINE_CAPS_FILES if backup_type == 'online' else OFFLINE_FILES + if not skip_pulp: + expected_files = expected_files | CONTENT_FILES return expected_files @@ -101,7 +93,7 @@ def test_positive_backup_preserve_directory( files = [i for i in files if not re.compile(r'^\.*$').search(i)] expected_files = get_exp_files(sat_maintain, backup_type) - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + assert set(files).issuperset(expected_files), assert_msg @pytest.mark.include_capsule @@ -127,6 +119,10 @@ def test_positive_backup_split_pulp_tar( 1. backup succeeds 2. expected files are present in the backup 3. size of the pulp_data.tar smaller than provided value + + :customerscenario: true + + :BZ: 2164413 """ subdir = f'{BACKUP_DIR}backup-{gen_string("alpha")}' instance = 'satellite' if type(sat_maintain) is Satellite else 'capsule' @@ -145,7 +141,7 @@ def test_positive_backup_split_pulp_tar( files = [i for i in files if not re.compile(r'^\.*$').search(i)] expected_files = get_exp_files(sat_maintain, backup_type) - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + assert set(files).issuperset(expected_files), assert_msg # Check the split works result = sat_maintain.execute(f'du {backup_dir}/pulp_data.tar') @@ -155,7 +151,7 @@ def test_positive_backup_split_pulp_tar( @pytest.mark.include_capsule @pytest.mark.parametrize('backup_type', ['online', 'offline']) -def test_positive_backup_caspule_features( +def test_positive_backup_capsule_features( sat_maintain, setup_backup_tests, module_synced_repos, backup_type ): """Take a backup with capsule features as dns, tftp, dhcp, openscap @@ -189,7 +185,7 @@ def test_positive_backup_caspule_features( files = [i for i in files if not re.compile(r'^\.*$').search(i)] expected_files = get_exp_files(sat_maintain, backup_type) - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + assert set(files).issuperset(expected_files), assert_msg @pytest.mark.include_capsule @@ -272,12 +268,12 @@ def test_positive_backup_offline_logical(sat_maintain, setup_backup_tests, modul if type(sat_maintain) is Satellite: if sat_maintain.is_remote_db(): - expected_files = BASIC_FILES | ONLINE_SAT_FILES + expected_files = ONLINE_SAT_FILES | CONTENT_FILES else: - expected_files = BASIC_FILES | OFFLINE_FILES | ONLINE_SAT_FILES + expected_files = OFFLINE_FILES | ONLINE_SAT_FILES | CONTENT_FILES else: - expected_files = BASIC_FILES | OFFLINE_FILES | ONLINE_CAPS_FILES - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + expected_files = OFFLINE_FILES | ONLINE_CAPS_FILES | CONTENT_FILES + assert set(files).issuperset(expected_files), assert_msg @pytest.mark.include_capsule @@ -364,29 +360,7 @@ def test_negative_backup_maintenance_mode(sat_maintain, setup_backup_tests): @pytest.mark.include_capsule -def test_negative_restore_nodir(sat_maintain, setup_backup_tests): - """Try to run restore with no source dir provided - - :id: dadc4e32-c0b8-427f-a449-4ae66fe09268 - - :parametrized: yes - - :steps: - 1. try to run restore with no path argument provided - - :expectedresults: - 1. should fail with appropriate error message - """ - result = sat_maintain.cli.Restore.run( - backup_dir='', - options={'assumeyes': True, 'plaintext': True}, - ) - assert result.status != 0 - assert NODIR_MSG in str(result.stderr) - - -@pytest.mark.include_capsule -def test_negative_restore_baddir(sat_maintain, setup_backup_tests): +def test_negative_restore_baddir_nodir(sat_maintain, setup_backup_tests): """Try to run restore with non-existing source dir provided :id: 65ccc0d0-ca43-4877-9b29-50037e378ca5 @@ -395,6 +369,7 @@ def test_negative_restore_baddir(sat_maintain, setup_backup_tests): :steps: 1. try to run restore with non-existing path provided + 2. try to run restore without a backup dir :expectedresults: 1. should fail with appropriate error message @@ -406,6 +381,12 @@ def test_negative_restore_baddir(sat_maintain, setup_backup_tests): ) assert result.status != 0 assert BADDIR_MSG in str(result.stdout) + result = sat_maintain.cli.Restore.run( + backup_dir='', + options={'assumeyes': True, 'plaintext': True}, + ) + assert result.status != 0 + assert NODIR_MSG in str(result.stderr) @pytest.mark.parametrize('backup_type', ['online', 'offline']) @@ -459,7 +440,7 @@ def test_positive_puppet_backup_restore( files = [i for i in files if not re.compile(r'^\.*$').search(i)] expected_files = get_exp_files(sat_maintain, backup_type) - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + assert set(files).issuperset(expected_files), assert_msg # Run restore sat_maintain.execute('rm -rf /var/lib/pulp/media/artifact') @@ -491,6 +472,7 @@ def test_positive_puppet_backup_restore( @pytest.mark.e2e +@pytest.mark.upgrade @pytest.mark.include_capsule @pytest.mark.parametrize('skip_pulp', [False, True], ids=['include_pulp', 'skip_pulp']) @pytest.mark.parametrize('backup_type', ['online', 'offline']) @@ -523,7 +505,9 @@ def test_positive_backup_restore( 4. system health check succeeds 5. content is present after restore - :BZ: 2172540 + :customerscenario: true + + :BZ: 2172540, 1978764, 1979045 """ subdir = f'{BACKUP_DIR}backup-{gen_string("alpha")}' instance = 'satellite' if type(sat_maintain) is Satellite else 'capsule' @@ -531,6 +515,7 @@ def test_positive_backup_restore( backup_dir=subdir, backup_type=backup_type, options={'assumeyes': True, 'plaintext': True, 'skip-pulp-content': skip_pulp}, + timeout='30m', ) assert result.status == 0 assert 'FAIL' not in result.stdout @@ -540,11 +525,8 @@ def test_positive_backup_restore( files = sat_maintain.execute(f'ls -a {backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] - expected_files = get_exp_files(sat_maintain, backup_type) - if not skip_pulp: - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg - else: - assert set(files).issuperset(expected_files), assert_msg + expected_files = get_exp_files(sat_maintain, backup_type, skip_pulp) + assert set(files).issuperset(expected_files), assert_msg # Run restore if not skip_pulp: @@ -562,6 +544,10 @@ def test_positive_backup_restore( ) assert result.status == 0 + result = sat_maintain.cli.Service.status() + assert 'FAIL' not in result.stdout + assert result.status == 0 + # Check that content is present after restore if type(sat_maintain) is Satellite: repo = sat_maintain.api.Repository().search( @@ -642,10 +628,8 @@ def test_positive_backup_restore_incremental( files = sat_maintain.execute(f'ls -a {inc_backup_dir}').stdout.split('\n') files = [i for i in files if not re.compile(r'^\.*$').search(i)] - expected_files = ( - BASIC_FILES | OFFLINE_FILES if backup_type == 'offline' else BASIC_FILES | ONLINE_SAT_FILES - ) - assert set(files).issuperset(expected_files | CONTENT_FILES), assert_msg + expected_files = get_exp_files(sat_maintain, backup_type) + assert set(files).issuperset(expected_files), assert_msg # restore initial backup and check system health result = sat_maintain.cli.Restore.run( @@ -687,30 +671,3 @@ def test_positive_backup_restore_incremental( query={'search': f'name="{secondary_repo.name}"'} )[0] assert repo.id == secondary_repo.id - - -@pytest.mark.stubbed -def test_positive_backup_restore_snapshot(): - """Take the snapshot backup of a server, restore it, check for content - - :id: dcf3b815-97ed-4c2e-9f2d-5eedd8591c98 - - :setup: - 1. satellite installed on an LVM-based storage with sufficient free extents - - :steps: - 1. create the snapshot backup (with/without pulp) - 2. check that appropriate files are created - 3. restore the backup (installer --reset-data is run in this step) - 4. check system health - 5. check the content was restored - - :expectedresults: - 1. backup succeeds - 2. expected files are present in the backup - 3. restore succeeds - 4. system health check succeeds - 5. content is present after restore - - :CaseAutomation: NotAutomated - """ diff --git a/tests/foreman/maintain/test_health.py b/tests/foreman/maintain/test_health.py index 72f6656b0f9..e8bc468d410 100644 --- a/tests/foreman/maintain/test_health.py +++ b/tests/foreman/maintain/test_health.py @@ -4,25 +4,20 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import time -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings - +from robottelo.utils.installer import InstallerCommand upstream_url = { 'foreman_repo': 'https://yum.theforeman.org/releases/nightly/el8/x86_64/', @@ -83,6 +78,7 @@ def test_positive_list_health_check_by_tags(sat_maintain): @pytest.mark.e2e +@pytest.mark.upgrade @pytest.mark.include_capsule def test_positive_health_check(sat_maintain): """Verify satellite-maintain health check @@ -95,11 +91,17 @@ def test_positive_health_check(sat_maintain): 1. Run satellite-maintain health check :expectedresults: Health check should pass. + + :BZ: 1956210 + + :customerscenario: true """ result = sat_maintain.cli.Health.check(options={'assumeyes': True}) assert result.status == 0 if 'paused tasks in the system' not in result.stdout: assert 'FAIL' not in result.stdout + result = sat_maintain.execute('tail /var/log/foreman-proxy/proxy.log') + assert 'sslv3 alert bad certificate' not in result.stdout @pytest.mark.include_capsule @@ -118,7 +120,12 @@ def test_positive_health_check_by_tags(sat_maintain): result = sat_maintain.cli.Health.list_tags().stdout output = [i.split("]\x1b[0m")[0] for i in result.split("\x1b[36m[") if i] for tag in output: - assert sat_maintain.cli.Health.check(options={'tags': tag, 'assumeyes': True}).status == 0 + assert ( + sat_maintain.cli.Health.check( + options={'tags': tag, 'assumeyes': True, 'whitelist': 'non-rh-packages'} + ).status + == 0 + ) @pytest.mark.include_capsule @@ -134,7 +141,9 @@ def test_positive_health_check_pre_upgrade(sat_maintain): :expectedresults: Pre-upgrade health checks should pass. """ - result = sat_maintain.cli.Health.check(options={'tags': 'pre-upgrade'}) + result = sat_maintain.cli.Health.check( + options={'tags': 'pre-upgrade', 'whitelist': 'non-rh-packages'} + ) assert result.status == 0 assert 'FAIL' not in result.stdout @@ -157,30 +166,30 @@ def test_positive_health_check_server_ping(sat_maintain): assert 'FAIL' not in result.stdout -def test_negative_health_check_server_ping(sat_maintain, request): - """Verify hammer ping check +@pytest.mark.usefixtures('start_satellite_services') +def test_health_check_server_ping(sat_maintain): + """Verify health check server-ping :id: ecdc5bfb-2adf-49f6-948d-995dae34bcd3 :steps: - 1. Run satellite maintain service stop - 2. Run satellite-maintain health check --label server-ping - 3. Run satellite maintain service start + 1. Run satellite-maintain health check --label server-ping + 2. Run satellite-maintain service stop + 3. Run satellite-maintain health check --label server-ping :expectedresults: server-ping health check should pass """ + result = sat_maintain.cli.Health.check(options={'label': 'server-ping', 'assumeyes': True}) + assert result.status == 0 + assert 'FAIL' not in result.stdout assert sat_maintain.cli.Service.stop().status == 0 result = sat_maintain.cli.Health.check(options={'label': 'server-ping', 'assumeyes': True}) assert result.status == 0 assert 'FAIL' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.cli.Service.start().status == 0 - @pytest.mark.include_capsule -def test_positive_health_check_upstream_repository(sat_maintain, request): +def test_negative_health_check_upstream_repository(sat_maintain, request): """Verify upstream repository check :id: 349fcf33-2d25-4628-a6af-cff53e624b25 @@ -190,8 +199,15 @@ def test_positive_health_check_upstream_repository(sat_maintain, request): :steps: 1. Run satellite-maintain health check --label check-upstream-repository - :expectedresults: check-upstream-repository health check should pass. + :expectedresults: check-upstream-repository health check should fail. """ + + @request.addfinalizer + def _finalize(): + for name in upstream_url: + sat_maintain.execute(f'rm -fr /etc/yum.repos.d/{name}.repo') + sat_maintain.execute('dnf clean all') + for name, url in upstream_url.items(): sat_maintain.create_custom_repos(**{name: url}) result = sat_maintain.cli.Health.check( @@ -200,21 +216,14 @@ def test_positive_health_check_upstream_repository(sat_maintain, request): assert result.status == 0 assert 'System has upstream foreman_repo,puppet_repo repositories enabled' in result.stdout assert 'FAIL' in result.stdout - for name in upstream_url.keys(): + for name in upstream_url: result = sat_maintain.execute(f'cat /etc/yum.repos.d/{name}.repo') if name == 'fedorapeople_repo': assert 'enabled=1' in result.stdout elif name in ['foreman_repo', 'puppet_repo']: assert 'enabled=0' in result.stdout - @request.addfinalizer - def _finalize(): - for name, url in upstream_url.items(): - sat_maintain.execute(f'rm -fr /etc/yum.repos.d/{name}.repo') - sat_maintain.execute('dnf clean all') - -@pytest.mark.include_capsule def test_positive_health_check_available_space(sat_maintain): """Verify available-space check @@ -224,24 +233,13 @@ def test_positive_health_check_available_space(sat_maintain): :steps: 1. Run satellite-maintain health check --label available-space + 2. Run satellite-maintain health check --label available-space-cp :expectedresults: available-space health check should pass. """ result = sat_maintain.cli.Health.check(options={'label': 'available-space'}) assert 'FAIL' not in result.stdout assert result.status == 0 - - -def test_positive_health_check_available_space_candlepin(sat_maintain): - """Verify available-space-cp check - - :id: 382a2bf3-a3da-4e46-b370-a443450f93b7 - - :steps: - 1. Run satellite-maintain health check --label available-space-cp - - :expectedresults: available-space-cp health check should pass. - """ result = sat_maintain.cli.Health.check(options={'label': 'available-space-cp'}) assert 'FAIL' not in result.stdout assert result.status == 0 @@ -263,15 +261,16 @@ def test_positive_hammer_defaults_set(sat_maintain, request): :customerscenario: true """ - sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': 1}) - result = sat_maintain.cli.Health.check(options={'assumeyes': True}) - assert result.status == 0 - assert 'FAIL' not in result.stdout @request.addfinalizer def _finalize(): sat_maintain.cli.Defaults.delete({'param-name': 'organization_id'}) + sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': 1}) + result = sat_maintain.cli.Health.check(options={'assumeyes': True}) + assert result.status == 0 + assert 'FAIL' not in result.stdout + @pytest.mark.include_capsule def test_positive_health_check_hotfix_installed(sat_maintain, request): @@ -291,6 +290,13 @@ def test_positive_health_check_hotfix_installed(sat_maintain, request): :expectedresults: check-hotfix-installed check should detect modified file and installed hotfix. """ + + @request.addfinalizer + def _finalize(): + sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo') + sat_maintain.execute('dnf remove -y hotfix-package') + assert sat_maintain.execute(f'sed -i "/#modifying_file/d" {fpath.stdout}').status == 0 + # Verify check-hotfix-installed without hotfix package. result = sat_maintain.cli.Health.check(options={'label': 'check-hotfix-installed'}) assert result.status == 0 @@ -310,12 +316,6 @@ def test_positive_health_check_hotfix_installed(sat_maintain, request): assert 'WARNING' in result.stdout assert 'hotfix-package' in result.stdout - @request.addfinalizer - def _finalize(): - sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo') - sat_maintain.execute('dnf remove -y hotfix-package') - assert sat_maintain.execute(f'sed -i "/#modifying_file/d" {fpath.stdout}').status == 0 - @pytest.mark.include_capsule def test_positive_health_check_validate_yum_config(sat_maintain): @@ -352,7 +352,7 @@ def test_positive_health_check_validate_yum_config(sat_maintain): @pytest.mark.include_capsule -def test_positive_health_check_epel_repository(request, sat_maintain): +def test_negative_health_check_epel_repository(request, sat_maintain): """Verify check-non-redhat-repository. :id: ce2d7278-d7b7-4f76-9923-79be831c0368 @@ -366,8 +366,13 @@ def test_positive_health_check_epel_repository(request, sat_maintain): :BZ: 1755755 - :expectedresults: check-non-redhat-repository health check should pass. + :expectedresults: check-non-redhat-repository health check should fail. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y epel-release').status == 0 + epel_repo = 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm' sat_maintain.execute(f'dnf install -y {epel_repo}') result = sat_maintain.cli.Health.check(options={'label': 'check-non-redhat-repository'}) @@ -375,10 +380,6 @@ def test_positive_health_check_epel_repository(request, sat_maintain): assert result.status == 1 assert 'FAIL' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y epel-release').status == 0 - def test_positive_health_check_old_foreman_tasks(sat_maintain): """Verify check-old-foreman-tasks. @@ -399,12 +400,14 @@ def test_positive_health_check_old_foreman_tasks(sat_maintain): :expectedresults: check-old-foreman-tasks health check should pass. """ - rake_command = 'foreman-rake console <<< ' - find_task = '\'t = ForemanTasks::Task.where(state: "stopped").first;' - update_task = "t.started_at = t.started_at - 31.day;t.save(:validate => false)'" error_message = 'paused or stopped task(s) older than 30 days' delete_message = 'Deleted old tasks:' - sat_maintain.execute(rake_command + find_task + update_task) + rake_command = ( + 't = ForemanTasks::Task.where(state: "stopped").first;' + 't.started_at = t.started_at - 32.day;' + 't.save(:validate => false)' + ) + sat_maintain.execute(f"foreman-rake console <<< '{rake_command}'") result = sat_maintain.cli.Health.check( options={'label': 'check-old-foreman-tasks', 'assumeyes': True} ) @@ -472,6 +475,14 @@ def test_positive_health_check_tftp_storage(sat_maintain, request): :expectedresults: check-tftp-storage health check should pass. """ + + @request.addfinalizer + def _finalize(): + sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '360'}) + assert ( + sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '360' + ) + sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '2'}) assert sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '2' files_to_delete = [ @@ -505,88 +516,6 @@ def test_positive_health_check_tftp_storage(sat_maintain, request): assert result.status == 0 assert 'FAIL' not in result.stdout - @request.addfinalizer - def _finalize(): - sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '360'}) - assert ( - sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '360' - ) - - -def test_positive_health_check_postgresql_checkpoint_segments(sat_maintain): - """Verify check-postgresql-checkpoint-segments - - :id: 963a5b47-168a-4443-9fdf-bba59c9b0e97 - - :steps: - 1. Have an invalid /etc/foreman-installer/custom-hiera.yaml file - 2. Run satellite-maintain health check --label check-postgresql-checkpoint-segments. - 3. Assert that check-postgresql-checkpoint-segments gives proper - error message saying an invalid yaml file - 4. Make /etc/foreman-installer/custom-hiera.yaml file valid - 5. Add config_entries section in /etc/foreman-installer/custom-hiera.yaml - 6. Run satellite-maintain health check --label check-postgresql-checkpoint-segments. - 7. Assert that check-postgresql-checkpoint-segments fails. - 8. Add checkpoint_segments parameter in /etc/foreman-installer/custom-hiera.yaml - 9. Run satellite-maintain health check --label check-postgresql-checkpoint-segments. - 10. Assert that check-postgresql-checkpoint-segments fails. - 11. Remove config_entries section from /etc/foreman-installer/custom-hiera.yaml - 12. Run satellite-maintain health check --label check-postgresql-checkpoint-segments. - 13. Assert that check-postgresql-checkpoint-segments pass. - - :BZ: 1894149, 1899322 - - :customerscenario: true - - :expectedresults: check-postgresql-checkpoint-segments health check should pass. - - :CaseImportance: High - """ - custom_hiera = '/etc/foreman-installer/custom-hiera.yaml' - # Create invalid yaml file - sat_maintain.execute(f'sed -i "s/---/----/g" {custom_hiera}') - result = sat_maintain.cli.Health.check( - options={'label': 'check-postgresql-checkpoint-segments'} - ) - - assert f'File {custom_hiera} is not a yaml file.' in result.stdout - assert 'FAIL' in result.stdout - assert result.status == 1 - # Make yaml file valid - sat_maintain.execute(f'sed -i "s/----/---/g" {custom_hiera}') - # Add config_entries section - sat_maintain.execute(f'sed -i "$ a postgresql::server::config_entries:" {custom_hiera}') - # Run check-postgresql-checkpoint-segments check. - result = sat_maintain.cli.Health.check( - options={'label': 'check-postgresql-checkpoint-segments'} - ) - - assert "ERROR: 'postgresql::server::config_entries' cannot be null." in result.stdout - assert 'Please remove it from following file and re-run the command.' in result.stdout - assert result.status == 1 - assert 'FAIL' in result.stdout - # Add checkpoint_segments - sat_maintain.execute(fr'sed -i "$ a\ checkpoint_segments: 32" {custom_hiera}') - # Run check-postgresql-checkpoint-segments check. - result = sat_maintain.cli.Health.check( - options={'label': 'check-postgresql-checkpoint-segments'} - ) - - assert "ERROR: Tuning option 'checkpoint_segments' found." in result.stdout - assert 'Please remove it from following file and re-run the command.' in result.stdout - assert result.status == 1 - assert 'FAIL' in result.stdout - # Remove config_entries section - sat_maintain.execute( - fr'sed -i "/postgresql::server::config_entries\|checkpoint_segments: 32/d" {custom_hiera}' - ) - # Run check-postgresql-checkpoint-segments check. - result = sat_maintain.cli.Health.check( - options={'label': 'check-postgresql-checkpoint-segments'} - ) - assert result.status == 0 - assert 'FAIL' not in result.stdout - @pytest.mark.include_capsule def test_positive_health_check_env_proxy(sat_maintain): @@ -621,8 +550,7 @@ def test_positive_health_check_env_proxy(sat_maintain): assert 'FAIL' not in result.stdout -@pytest.mark.stubbed -def test_positive_health_check_foreman_proxy_verify_dhcp_config_syntax(): +def test_positive_health_check_foreman_proxy_verify_dhcp_config_syntax(sat_maintain): """Verify foreman-proxy-verify-dhcp-config-syntax :id: 43ca5cc7-9888-490d-b1ba-f3298e737039 @@ -646,8 +574,50 @@ def test_positive_health_check_foreman_proxy_verify_dhcp_config_syntax(): :CaseImportance: Medium - :CaseAutomation: NotAutomated + :CaseAutomation: Automated """ + # Set dhcp.yml to `:use_provider: dhcp_isc` + sat_maintain.execute( + r"sed -i '/:use_provider: dhcp_infoblox/c\:use_provider: dhcp_isc'" + " /etc/foreman-proxy/settings.d/dhcp.yml" + ) + result = sat_maintain.execute("cat /etc/foreman-proxy/settings.d/dhcp.yml") + assert ':use_provider: dhcp_isc' in result.stdout + # Run health list and check and verify nothing comes back + result = sat_maintain.cli.Health.list() + assert 'foreman-proxy-verify-dhcp-config-syntax' not in result.stdout + result = sat_maintain.cli.Health.check( + options={'label': 'foreman-proxy-verify-dhcp-config-syntax'} + ) + assert 'No scenario matching label' + assert 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout + # Enable DHCP + installer = sat_maintain.install( + InstallerCommand('enable-foreman-proxy-plugin-dhcp-remote-isc', 'foreman-proxy-dhcp true') + ) + assert 'Success!' in installer.stdout + # Run health list and check and verify check is made + result = sat_maintain.cli.Health.list() + assert 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout + result = sat_maintain.cli.Health.check( + options={'label': 'foreman-proxy-verify-dhcp-config-syntax'} + ) + assert 'OK' in result.stdout + # Set dhcp.yml `:use_provider: dhcp_infoblox` + sat_maintain.execute( + r"sed -i '/:use_provider: dhcp_isc/c\:use_provider: dhcp_infoblox'" + " /etc/foreman-proxy/settings.d/dhcp.yml" + ) + result = sat_maintain.execute("cat /etc/foreman-proxy/settings.d/dhcp.yml") + assert ':use_provider: dhcp_infoblox' in result.stdout + # Run health list and check and verify nothing comes back + result = sat_maintain.cli.Health.list() + assert 'foreman-proxy-verify-dhcp-config-syntax' not in result.stdout + result = sat_maintain.cli.Health.check( + options={'label': 'foreman-proxy-verify-dhcp-config-syntax'} + ) + assert 'No scenario matching label' + assert 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout def test_positive_remove_job_file(sat_maintain): @@ -696,10 +666,20 @@ def test_positive_health_check_corrupted_roles(sat_maintain, request): :BZ: 1703041, 1908846 """ - # Check the filter created to verify the role, resource type, and permissions assigned. role_name = 'test_role' resource_type = gen_string("alpha") sat_maintain.cli.Role.create(options={'name': role_name}) + + @request.addfinalizer + def _finalize(): + resource_type = r"'\''Host'\''" + sat_maintain.execute( + f'''sudo su - postgres -c "psql -d foreman -c 'UPDATE permissions SET + resource_type = {resource_type} WHERE name = {permission_name};'"''' + ) + sat_maintain.cli.Role.delete(options={'name': role_name}) + + # Check the filter created to verify the role, resource type, and permissions assigned. sat_maintain.cli.Filter.create( options={'role': role_name, 'permissions': ['view_hosts', 'console_hosts']} ) @@ -721,15 +701,6 @@ def test_positive_health_check_corrupted_roles(sat_maintain, request): result = sat_maintain.cli.Filter.list(options={'search': role_name}, output_format='yaml') assert result.count('Id') == 4 - @request.addfinalizer - def _finalize(): - resource_type = r"'\''Host'\''" - sat_maintain.execute( - f'''sudo su - postgres -c "psql -d foreman -c 'UPDATE permissions SET - resource_type = {resource_type} WHERE name = {permission_name};'"''' - ) - sat_maintain.cli.Role.delete(options={'name': role_name}) - @pytest.mark.include_capsule def test_positive_health_check_non_rh_packages(sat_maintain, request): @@ -754,54 +725,23 @@ def test_positive_health_check_non_rh_packages(sat_maintain, request): :CaseImportance: High """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + assert sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo').status == 0 + sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) assert ( sat_maintain.cli.Packages.install(packages='walrus', options={'assumeyes': True}).status == 0 ) result = sat_maintain.cli.Health.check({'label': 'non-rh-packages'}) - assert 'Found 1 unexpected non Red Hat Package(s) installed!' in result.stdout + assert 'unexpected non Red Hat Package(s) installed!' in result.stdout assert 'walrus-5.21-1.noarch' in result.stdout assert result.status == 78 assert 'WARNING' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - assert sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo').status == 0 - - -@pytest.mark.stubbed -def test_positive_health_check_available_space_postgresql12(): - """Verify warnings when available space in /var/opt/rh/rh-postgresql12/ - is less than consumed space of /var/lib/pgsql/ - - :id: 283e627d-6afc-49cb-afdb-5b77a91bbd1e - - :parametrized: yes - - :setup: - 1. Have some data under /var/lib/pgsql (upgrade templates have ~565Mib data) - 2. Create dir /var/opt/rh/rh-postgresql12/ and mount a partition of ~300Mib - to this dir (less than /var/lib/pgsql). - - :steps: - 1. satellite-maintain health check --label available-space-for-postgresql12 - 2. Verify Warning or Error is displayed when enough space is not - available under /var/opt/rh/rh-postgresql12/ - - :BZ: 1898108, 1973363 - - :expectedresults: Verify warnings when available space in /var/opt/rh/rh-postgresql12/ - is less than consumed space of /var/lib/pgsql/ - - :CaseImportance: High - - :customerscenario: true - - :CaseAutomation: ManualOnly - """ - def test_positive_health_check_duplicate_permissions(sat_maintain): """Verify duplicate-permissions check diff --git a/tests/foreman/maintain/test_maintenance_mode.py b/tests/foreman/maintain/test_maintenance_mode.py index 8f24e62281a..b8d5024e1b1 100644 --- a/tests/foreman/maintain/test_maintenance_mode.py +++ b/tests/foreman/maintain/test_maintenance_mode.py @@ -4,25 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest import yaml from robottelo.config import robottelo_tmp_dir -pytestmark = pytest.mark.destructive - @pytest.mark.e2e @pytest.mark.tier2 @@ -54,6 +47,11 @@ def test_positive_maintenance_mode(request, sat_maintain, setup_sync_plan): to disable/enable sync-plan, stop/start crond.service and is able to add FOREMAN_MAINTAIN_TABLE rule in nftables. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.cli.MaintenanceMode.stop().status == 0 + enable_sync_ids = setup_sync_plan data_yml_path = '/var/lib/foreman-maintain/data.yml' local_data_yml_path = f'{robottelo_tmp_dir}/data.yml' @@ -149,7 +147,3 @@ def test_positive_maintenance_mode(request, sat_maintain, setup_sync_plan): assert 'OK' in result.stdout assert result.status == 1 assert 'Maintenance mode is Off' in result.stdout - - @request.addfinalizer - def _finalize(): - assert sat_maintain.cli.MaintenanceMode.stop().status == 0 diff --git a/tests/foreman/maintain/test_offload_DB.py b/tests/foreman/maintain/test_offload_DB.py index e9913918f96..0e1b3ced9d9 100644 --- a/tests/foreman/maintain/test_offload_DB.py +++ b/tests/foreman/maintain/test_offload_DB.py @@ -4,17 +4,12 @@ :CaseAutomation: ManualOnly -:CaseLevel: Integration - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -28,12 +23,10 @@ def test_offload_internal_db_to_external_db_host(): :id: d07235c8-4584-469a-a87d-ace4dadb0a1f - :Steps: Run satellite-installer with foreman, candlepin and pulpcore options + :steps: Run satellite-installer with foreman, candlepin and pulpcore options referring to external DB host :expectedresults: Installed successful, all services running - :CaseLevel: Integration - - :CaseComponent: Installer + :CaseComponent: Installation """ diff --git a/tests/foreman/maintain/test_packages.py b/tests/foreman/maintain/test_packages.py index 74adba1bf33..f0ae22d6d10 100644 --- a/tests/foreman/maintain/test_packages.py +++ b/tests/foreman/maintain/test_packages.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest @@ -141,6 +136,9 @@ def test_positive_lock_package_versions_with_installer(sat_maintain): @pytest.mark.e2e +@pytest.mark.upgrade +@pytest.mark.pit_client +@pytest.mark.pit_server @pytest.mark.include_capsule def test_positive_fm_packages_install(request, sat_maintain): """Verify whether packages install/update work as expected. @@ -162,6 +160,15 @@ def test_positive_fm_packages_install(request, sat_maintain): :expectedresults: Packages get install/update when lock/unlocked. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y zsh').status == 0 + if sat_maintain.__class__.__name__ == 'Satellite': + result = sat_maintain.install(InstallerCommand('lock-package-versions')) + assert result.status == 0 + assert 'Success!' in result.stdout + # Test whether packages are locked or not result = sat_maintain.install(InstallerCommand('lock-package-versions')) assert result.status == 0 @@ -218,14 +225,6 @@ def test_positive_fm_packages_install(request, sat_maintain): assert result.status == 0 assert 'Use foreman-maintain packages install/update ' not in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y zsh').status == 0 - if sat_maintain.__class__.__name__ == 'Satellite': - result = sat_maintain.install(InstallerCommand('lock-package-versions')) - assert result.status == 0 - assert 'Success!' in result.stdout - @pytest.mark.include_capsule def test_positive_fm_packages_update(request, sat_maintain): @@ -246,6 +245,12 @@ def test_positive_fm_packages_update(request, sat_maintain): :customerscenario: true """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') + # Setup custom_repo and packages update sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) disableplugin = '--disableplugin=foreman-protector' @@ -265,27 +270,3 @@ def test_positive_fm_packages_update(request, sat_maintain): result = sat_maintain.execute('rpm -qa walrus') assert result.status == 0 assert 'walrus-5.21-1' in result.stdout - - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') - - -@pytest.mark.stubbed -def test_positive_fm_packages_sat_installer(sat_maintain): - """Verify satellite-installer is not executed after install/update - of satellite-maintain/rubygem-foreman_maintain package - - :id: d73971a1-68b4-4ab2-a87c-76cc5ff80a39 - - :steps: - 1. satellite-maintain packages install/update satellite-maintain/rubygem-foreman_maintain - - :BZ: 1825841 - - :expectedresults: satellite-installer shouldn't be executed after install/update - of satellite-maintain/rubygem-foreman_maintain package - - :CaseAutomation: ManualOnly - """ diff --git a/tests/foreman/maintain/test_service.py b/tests/foreman/maintain/test_service.py index 248c0d4f3f4..5fece7c6aaf 100644 --- a/tests/foreman/maintain/test_service.py +++ b/tests/foreman/maintain/test_service.py @@ -4,27 +4,47 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest +from wait_for import wait_for from robottelo.config import settings -from robottelo.constants import HAMMER_CONFIG -from robottelo.constants import MAINTAIN_HAMMER_YML -from robottelo.constants import SATELLITE_ANSWER_FILE - -pytestmark = pytest.mark.destructive +from robottelo.constants import ( + HAMMER_CONFIG, + MAINTAIN_HAMMER_YML, + SATELLITE_ANSWER_FILE, +) +from robottelo.hosts import Satellite + +SATELLITE_SERVICES = [ + "dynflow-sidekiq@.service", + "foreman-proxy.service", + "foreman.service", + "httpd.service", + "postgresql.service", + "pulpcore-api.service", + "pulpcore-content.service", + "pulpcore-worker@.service", + "redis.service", + "tomcat.service", +] + +CAPSULE_SERVICES = [ + "foreman-proxy.service", + "httpd.service", + "postgresql.service", + "pulpcore-api.service", + "pulpcore-content.service", + "pulpcore-worker@.service", + "redis.service", +] @pytest.fixture @@ -64,10 +84,13 @@ def test_positive_service_list(sat_maintain): result = sat_maintain.cli.Service.list() assert 'FAIL' not in result.stdout assert result.status == 0 + services = SATELLITE_SERVICES if type(sat_maintain) is Satellite else CAPSULE_SERVICES + for service in services: + assert service in result.stdout -@pytest.mark.e2e @pytest.mark.include_capsule +@pytest.mark.usefixtures('start_satellite_services') def test_positive_service_stop_start(sat_maintain): """Start/Stop services using satellite-maintain service subcommand @@ -97,8 +120,10 @@ def test_positive_service_stop_start(sat_maintain): assert result.status == 0 -@pytest.mark.e2e +@pytest.mark.stream +@pytest.mark.upgrade @pytest.mark.include_capsule +@pytest.mark.usefixtures('start_satellite_services') def test_positive_service_stop_restart(sat_maintain): """Disable services using satellite-maintain service @@ -130,8 +155,8 @@ def test_positive_service_stop_restart(sat_maintain): assert result.status == 0 -@pytest.mark.stream @pytest.mark.include_capsule +@pytest.mark.usefixtures('start_satellite_services') def test_positive_service_enable_disable(sat_maintain): """Enable/Disable services using satellite-maintain service subcommand @@ -145,19 +170,45 @@ def test_positive_service_enable_disable(sat_maintain): :expectedresults: Services should enable/disable - :BZ: 1995783 + :BZ: 1995783, 2055790 :customerscenario: true """ + result = sat_maintain.cli.Service.stop() + assert 'FAIL' not in result.stdout + assert result.status == 0 result = sat_maintain.cli.Service.disable() assert 'FAIL' not in result.stdout assert result.status == 0 result = sat_maintain.cli.Service.enable() assert 'FAIL' not in result.stdout assert result.status == 0 + sat_maintain.power_control(state='reboot') + if isinstance(sat_maintain, Satellite): + result, _ = wait_for( + sat_maintain.cli.Service.status, + func_kwargs={'options': {'brief': True, 'only': 'foreman.service'}}, + fail_condition=lambda res: "FAIL" in res.stdout, + handle_exception=True, + delay=30, + timeout=300, + ) + assert 'FAIL' not in sat_maintain.cli.Base.ping() + else: + result, _ = wait_for( + sat_maintain.cli.Service.status, + func_kwargs={'options': {'brief': True}}, + fail_condition=lambda res: "FAIL" in res.stdout, + handle_exception=True, + delay=30, + timeout=300, + ) + assert 'FAIL' not in result.stdout + assert result.status == 0 -def test_positive_foreman_service(request, sat_maintain): +@pytest.mark.usefixtures('start_satellite_services') +def test_positive_foreman_service(sat_maintain): """Validate httpd service should work as expected even stopping of the foreman service :id: 08a29ea2-2e49-11eb-a22b-d46d6dd3b5b2 @@ -175,13 +226,12 @@ def test_positive_foreman_service(request, sat_maintain): assert 'foreman' in result.stdout result = sat_maintain.cli.Service.status(options={'only': 'httpd'}) assert result.status == 0 - result = sat_maintain.cli.Health.check(options={'assumeyes': True}) + result = sat_maintain.cli.Health.check( + options={'assumeyes': True, 'whitelist': 'check-tftp-storage'} + ) assert result.status == 0 assert 'foreman' in result.stdout - - @request.addfinalizer - def _finalize(): - assert sat_maintain.cli.Service.start(options={'only': 'foreman'}).status == 0 + assert sat_maintain.cli.Service.start(options={'only': 'foreman'}).status == 0 @pytest.mark.include_capsule @@ -193,11 +243,11 @@ def test_positive_status_clocale(sat_maintain): :parametrized: yes :steps: - 1. Run LC_ALL=C satellite-maintain service stop + 1. Run LC_ALL=C.UTF-8 satellite-maintain service status :expectedresults: service status works with C locale """ - assert sat_maintain.cli.Service.status(env_var='LC_ALL=C').status == 0 + assert sat_maintain.cli.Service.status(env_var='LC_ALL=C.UTF-8').status == 0 def test_positive_service_restart_without_hammer_config(missing_hammer_config, sat_maintain): @@ -219,31 +269,6 @@ def test_positive_service_restart_without_hammer_config(missing_hammer_config, s assert result.status == 0 -def test_positive_satellite_maintain_service_list_sidekiq(sat_maintain): - """List sidekiq services with service list - - :id: 5acb68a9-c430-485d-bb45-b499adc90927 - - :steps: - 1. Run satellite-maintain service list - 2. Run satellite-maintain service restart - - :expectedresults: Sidekiq services should list and should restart. - - :CaseImportance: Medium - """ - result = sat_maintain.cli.Service.list() - assert 'FAIL' not in result.stdout - assert result.status == 0 - assert 'dynflow-sidekiq@.service' in result.stdout - - result = sat_maintain.cli.Service.restart() - assert 'FAIL' not in result.stdout - assert result.status == 0 - for service in ['orchestrator', 'worker', 'worker-hosts-queue']: - assert f'dynflow-sidekiq@{service}' in result.stdout - - def test_positive_status_rpmsave(request, sat_maintain): """Verify satellite-maintain service status doesn't contain any backup files like .rpmsave, or any file with .yml which don't exist as services. diff --git a/tests/foreman/maintain/test_upgrade.py b/tests/foreman/maintain/test_upgrade.py index ed378096913..1e544ee6751 100644 --- a/tests/foreman/maintain/test_upgrade.py +++ b/tests/foreman/maintain/test_upgrade.py @@ -4,23 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest from robottelo.config import settings - -pytestmark = pytest.mark.destructive +from robottelo.constants import INSTALLER_CONFIG_FILE, SATELLITE_VERSION def last_y_stream_version(release): @@ -66,6 +60,10 @@ def test_positive_satellite_maintain_upgrade_list(sat_maintain): @pytest.mark.include_capsule +@pytest.mark.skipif( + (settings.server.version.release == 'stream'), + reason='Upgrade path is not available for stream yet', +) def test_positive_repositories_validate(sat_maintain): """Test repositories-validate pre-upgrade check is skipped when system is subscribed using custom activationkey. @@ -100,12 +98,22 @@ def test_positive_repositories_validate(sat_maintain): @pytest.mark.parametrize( 'custom_host', [ - {'deploy_rhel_version': '8', 'deploy_flavor': 'satqe-ssd.disk.xxxl'}, - {'deploy_rhel_version': '8', 'deploy_flavor': 'satqe-ssd.standard.std'}, + { + 'deploy_rhel_version': settings.server.version.rhel_version, + 'deploy_flavor': 'satqe-ssd.disk.xxxl', + }, + { + 'deploy_rhel_version': settings.server.version.rhel_version, + 'deploy_flavor': 'satqe-ssd.standard.std', + }, ], ids=['default', 'medium'], indirect=True, ) +@pytest.mark.skipif( + (settings.server.version.release == 'stream'), + reason='Upgrade path is not available for stream yet', +) def test_negative_pre_upgrade_tuning_profile_check(request, custom_host): """Negative test that verifies a satellite with less than tuning profile hardware requirements fails on pre-upgrade check. @@ -120,20 +128,31 @@ def test_negative_pre_upgrade_tuning_profile_check(request, custom_host): :expectedresults: Pre-upgrade check fails. """ profile = request.node.callspec.id + rhel_major = custom_host.os_version.major sat_version = ".".join(settings.server.version.release.split('.')[0:2]) - # Register to CDN for RHEL8 repos, download and enable last y stream's ohsnap repos, + # Register to CDN for RHEL repos, download and enable last y stream's ohsnap repos, # and enable the satellite module and install it on the host custom_host.register_to_cdn() - last_y_stream = last_y_stream_version(settings.server.version.release) + last_y_stream = last_y_stream_version( + SATELLITE_VERSION if sat_version == 'stream' else sat_version + ) custom_host.download_repofile(product='satellite', release=last_y_stream) - custom_host.execute('dnf -y module enable satellite:el8 && dnf -y install satellite') - # Install without system checks to get around installer checks custom_host.execute( - f'satellite-installer --scenario satellite --disable-system-checks --tuning {profile}', + f'dnf -y module enable satellite:el{rhel_major} && dnf -y install satellite' + ) + # Install with development tuning profile to get around installer checks + custom_host.execute( + 'satellite-installer --scenario satellite --tuning development', timeout='30m', ) + # Change to correct tuning profile (default or medium) + custom_host.execute( + f'sed -i "s/tuning: development/tuning: {profile}/g" {INSTALLER_CONFIG_FILE}' + ) # Get current Satellite version's repofile - custom_host.download_repofile(product='satellite', release=sat_version) + custom_host.download_repofile( + product='satellite', release=sat_version, snap=settings.server.version.snap + ) # Run satellite-maintain to have it self update to the newest version, # however, this will not actually execute the command after updating custom_host.execute('satellite-maintain upgrade list-versions') @@ -150,27 +169,31 @@ def test_negative_pre_upgrade_tuning_profile_check(request, custom_host): ) -@pytest.mark.stubbed @pytest.mark.include_capsule -def test_positive_self_update_for_zstream(sat_maintain): - """Test satellite-maintain self-upgrade to update maintain packages from zstream repo. +def test_positive_self_update_maintain_package(sat_maintain): + """satellite-maintain attempts to update itself when a command is run :id: 1c566768-fd73-4fe6-837b-26709a1ebed9 :parametrized: yes :steps: - 1. Run satellite-maintain upgrade check/run command. - 2. Run satellite-maintain upgrade check/run command with disable-self-upgrade option. + 1. Run satellite-maintain upgrade list-versions/check/run command. + 2. Run satellite-maintain upgrade list-versions/check/run command + with disable-self-upgrade option. :expectedresults: - 1. Update satellite-maintain package to latest version and gives message to re-run command. - 2. If disable-self-upgrade option is used then it should skip self-upgrade step for zstream + 1. satellite-maintain tries to update rubygem-foreman_maintain to a newer available version + 2. If disable-self-upgrade option is used then it should skip self-upgrade step :BZ: 1649329 - - :CaseAutomation: ManualOnly """ + result = sat_maintain.cli.Upgrade.list_versions() + assert result.status == 0 + assert 'Checking for new version of satellite-maintain...' in result.stdout + result = sat_maintain.cli.Upgrade.list_versions(options={'disable-self-upgrade': True}) + assert result.status == 0 + assert 'Checking for new version of satellite-maintain...' not in result.stdout @pytest.mark.stubbed diff --git a/tests/foreman/sanity/test_bvt.py b/tests/foreman/sanity/test_bvt.py new file mode 100644 index 00000000000..7da1ccc3d8e --- /dev/null +++ b/tests/foreman/sanity/test_bvt.py @@ -0,0 +1,111 @@ +"""Build Validation tests + +:Requirement: Sanity + +:CaseAutomation: Automated + +:CaseComponent: BVT + +:Team: JPL + +:CaseImportance: Critical + +""" +import re + +import pytest + +from robottelo.config import settings +from robottelo.utils.ohsnap import ohsnap_snap_rpms + +pytestmark = [pytest.mark.build_sanity] + + +def test_installed_packages_with_versions(target_sat): + """Compare the packages that suppose to be installed from repo vs installed packages + + :id: 08913caa-5084-444f-a37d-63da205acc74 + + :expectedresults: All the satellite packages and their installed versions should be + identical to ohsnap as SOT + """ + # Raw Data from Ohsnap and Satellite Server + ohsnap_rpms = ohsnap_snap_rpms( + ohsnap=settings.ohsnap, + sat_version=settings.server.version.release, + snap_version=settings.server.version.snap, + os_major=settings.server.version.rhel_version, + ) + # Suggested Comment: + # rpm -qa --queryformat "" + installed_rpms = target_sat.execute('rpm -qa').stdout + installed_rpms = installed_rpms.splitlines() + + # Changing both the datas to comparable formats + + # Formatting Regex Patters + split_by_version = r'-\d+[-._]*\d*' + namever_pattern = r'.*-\d+[-._]*\d*' + + # Formatted Ohsnap Data + ohsnp_rpm_names = [re.split(split_by_version, rpm)[0] for rpm in ohsnap_rpms] + ohsnap_rpm_name_vers = [re.findall(namever_pattern, rpm)[0] for rpm in ohsnap_rpms] + + # Comparing installed rpms with ohsnap data + mismatch_versions = [] + for rpm in installed_rpms: + installed_rpm_name = re.split(split_by_version, rpm)[0] + if installed_rpm_name in ohsnp_rpm_names: + installed_rpm_name_ver = re.findall(namever_pattern, rpm)[0] + if installed_rpm_name_ver not in ohsnap_rpm_name_vers: + mismatch_versions.append(rpm) + + if mismatch_versions: + pytest.fail(f'Some RPMs are found with mismatch versions. They are {mismatch_versions}') + assert not mismatch_versions + + +def test_all_interfaces_are_accessible(target_sat): + """API, CLI and UI interfaces are accessible + + :id: 0a212120-8e49-4489-a1a4-4272004e16dc + + :expectedresults: All three satellite interfaces are accessible + """ + errors = {} + # API Interface + try: + api_org = target_sat.api.Organization(id=1).read() + assert api_org + assert api_org.name == 'Default Organization' + except Exception as api_exc: + errors['api'] = api_exc + + # CLI Interface + try: + cli_org = target_sat.cli.Org.info({'id': 1}) + assert cli_org + assert cli_org['name'] == 'Default Organization' + except Exception as cli_exc: + errors['cli'] = cli_exc + + # UI Interface + try: + with target_sat.ui_session() as session: + ui_org = session.organization.read('Default Organization', widget_names='primary') + assert ui_org + assert ui_org['primary']['name'] == 'Default Organization' + except Exception as ui_exc: + errors['ui'] = ui_exc + + # Final Exception + if errors: + pytest.fail( + '\n'.join( + [ + f'Interface {interface} interaction failed with error {err}' + for interface, err in errors.items() + ] + ) + ) + assert True diff --git a/tests/foreman/sys/test_dynflow.py b/tests/foreman/sys/test_dynflow.py index 5a0acaf0e54..2b758bcb208 100644 --- a/tests/foreman/sys/test_dynflow.py +++ b/tests/foreman/sys/test_dynflow.py @@ -2,19 +2,14 @@ :CaseAutomation: Automated -:CaseLevel: Component - -:CaseComponent: Dynflow +:CaseComponent: TasksPlugin :Team: Endeavour -:Requirement: Dynflow - -:TestType: Functional +:Requirement: TasksPlugin :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/foreman/sys/test_fam.py b/tests/foreman/sys/test_fam.py index 7a41ffba451..f4500a2b599 100644 --- a/tests/foreman/sys/test_fam.py +++ b/tests/foreman/sys/test_fam.py @@ -4,23 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: System - -:TestType: Functional - :CaseImportance: High :CaseComponent: AnsibleCollection :Team: Platform -:Upstream: No """ +from broker import Broker import pytest -from robottelo.constants import FAM_MODULE_PATH -from robottelo.constants import FOREMAN_ANSIBLE_MODULES -from robottelo.constants import RH_SAT_ROLES +from robottelo.config import settings +from robottelo.constants import ( + FAM_MODULE_PATH, + FAM_ROOT_DIR, + FAM_TEST_PLAYBOOKS, + FOREMAN_ANSIBLE_MODULES, + RH_SAT_ROLES, +) @pytest.fixture @@ -39,6 +40,42 @@ def sync_roles(target_sat): target_sat.cli.Ansible.roles_delete({'id': role_id}) +@pytest.fixture(scope='module') +def setup_fam(module_target_sat, module_sca_manifest): + # Execute AAP WF for FAM setup + Broker().execute(workflow='fam-test-setup', source_vm=module_target_sat.name) + + # Edit Makefile to not try to rebuild the collection when tests run + module_target_sat.execute(f"sed -i '/^live/ s/$(MANIFEST)//' {FAM_ROOT_DIR}/Makefile") + + # Upload manifest to test playbooks directory + module_target_sat.put(str(module_sca_manifest.path), str(module_sca_manifest.name)) + module_target_sat.execute( + f'mv {module_sca_manifest.name} {FAM_ROOT_DIR}/tests/test_playbooks/data' + ) + + # Edit config file + config_file = f'{FAM_ROOT_DIR}/tests/test_playbooks/vars/server.yml' + module_target_sat.execute( + f'cp {FAM_ROOT_DIR}/tests/test_playbooks/vars/server.yml.example {config_file}' + ) + module_target_sat.execute( + f'sed -i "s/foreman.example.com/{module_target_sat.hostname}/g" {config_file}' + ) + module_target_sat.execute( + f'sed -i "s/rhsm_pool_id:.*/rhsm_pool_id: {settings.subscription.rhn_poolid}/g" {config_file}' + ) + module_target_sat.execute( + f'''sed -i 's/rhsm_username:.*/rhsm_username: "{settings.subscription.rhn_username}"/g' {config_file}''' + ) + module_target_sat.execute( + f'''sed -i 's|subscription_manifest_path:.*|subscription_manifest_path: "data/{module_sca_manifest.name}"|g' {config_file}''' + ) + module_target_sat.execute( + f'''sed -i 's/rhsm_password:.*/rhsm_password: "{settings.subscription.rhn_password}"/g' {config_file}''' + ) + + @pytest.mark.pit_server @pytest.mark.run_in_one_thread def test_positive_ansible_modules_installation(target_sat): @@ -48,7 +85,6 @@ def test_positive_ansible_modules_installation(target_sat): :expectedresults: ansible-collection-redhat-satellite package is available and supported modules are contained - """ # list installed modules result = target_sat.execute(f'ls {FAM_MODULE_PATH} | grep .py$ | sed "s/.[^.]*$//"') @@ -74,9 +110,25 @@ def test_positive_import_run_roles(sync_roles, target_sat): :id: d3379fd3-b847-43ce-a51f-c02170e7b267 :expectedresults: fam roles import and run successfully - """ roles = sync_roles.get('roles') target_sat.cli.Host.ansible_roles_assign({'ansible-roles': roles, 'name': target_sat.hostname}) play = target_sat.cli.Host.ansible_roles_play({'name': target_sat.hostname}) assert 'Ansible roles are being played' in play[0]['message'] + + +@pytest.mark.e2e +@pytest.mark.parametrize('ansible_module', FAM_TEST_PLAYBOOKS) +def test_positive_run_modules_and_roles(module_target_sat, setup_fam, ansible_module): + """Run all FAM modules and roles on the Satellite + + :id: b595756f-627c-44ea-b738-aa17ff5b1d39 + + :expectedresults: All modules and roles run successfully + """ + # Execute test_playbook + result = module_target_sat.execute( + f'export NO_COLOR=True && . ~/localenv/bin/activate && cd {FAM_ROOT_DIR} && make livetest_{ansible_module}' + ) + assert 'PASSED' in result.stdout + assert result.status == 0 diff --git a/tests/foreman/sys/test_katello_certs_check.py b/tests/foreman/sys/test_katello_certs_check.py index a0c6eabe8cf..640add5a0ae 100644 --- a/tests/foreman/sys/test_katello_certs_check.py +++ b/tests/foreman/sys/test_katello_certs_check.py @@ -4,23 +4,66 @@ :CaseAutomation: Automated -:CaseLevel: System - -:CaseComponent: Certificates +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No - """ import re import pytest +from robottelo.config import settings +from robottelo.utils.installer import InstallerCommand + + +@pytest.mark.e2e +def test_positive_install_sat_with_katello_certs(certs_data, sat_ready_rhel): + """Install Satellite with custom certs. + + :id: 47e3a57f-d7a2-40d2-bbc7-d1bb3d79a7e1 + + :steps: + + 1. Generate the custom certs on RHEL machine + 2. Install satellite with custom certs + 3. Assert output does not report SSL certificate error + 4. Assert all services are running + + + :expectedresults: Satellite should be installed using the custom certs. + + :CaseAutomation: Automated + """ + sat_ready_rhel.download_repofile( + product='satellite', + release=settings.server.version.release, + snap=settings.server.version.snap, + ) + sat_ready_rhel.register_to_cdn() + sat_ready_rhel.execute('dnf -y update') + result = sat_ready_rhel.execute( + 'dnf -y module enable satellite:el8 && dnf -y install satellite' + ) + assert result.status == 0 + command = InstallerCommand( + scenario='satellite', + certs_server_cert=f'/root/{certs_data["cert_file_name"]}', + certs_server_key=f'/root/{certs_data["key_file_name"]}', + certs_server_ca_cert=f'/root/{certs_data["ca_bundle_file_name"]}', + ).get_command() + result = sat_ready_rhel.execute(command, timeout='30m') + assert result.status == 0 + # assert no hammer ping SSL cert error + result = sat_ready_rhel.execute('hammer ping') + assert 'SSL certificate verification failed' not in result.stdout + assert result.stdout.count('Status:') == result.stdout.count(' ok') + # assert all services are running + result = sat_ready_rhel.execute('satellite-maintain health check --label services-up -y') + assert result.status == 0, 'Not all services are running' + @pytest.mark.run_in_one_thread class TestKatelloCertsCheck: @@ -139,7 +182,7 @@ def test_katello_certs_check_output_wildcard_inputs(self, cert_setup_teardown): result = target_sat.execute(command) self.validate_output(result, cert_data) - @pytest.mark.parametrize('error, cert_file, key_file, ca_file', invalid_inputs) + @pytest.mark.parametrize(('error', 'cert_file', 'key_file', 'ca_file'), invalid_inputs) @pytest.mark.tier1 def test_katello_certs_check_output_invalid_input( self, @@ -215,7 +258,7 @@ def test_negative_check_expiration_of_certificate(self, cert_setup_teardown): assert message == check break else: - assert False, f'Failed, Unable to find message "{message}" in result' + pytest.fail(f'Failed, Unable to find message "{message}" in result') target_sat.execute("date -s 'last year'") @pytest.mark.stubbed diff --git a/tests/foreman/sys/test_pulp3_filesystem.py b/tests/foreman/sys/test_pulp3_filesystem.py index ab24ca52e17..ac747ca5f10 100644 --- a/tests/foreman/sys/test_pulp3_filesystem.py +++ b/tests/foreman/sys/test_pulp3_filesystem.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: System - :CaseComponent: Pulp :team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import json from datetime import datetime +import json import pytest @@ -32,15 +27,13 @@ def test_selinux_status(target_sat): :expectedresults: SELinux is enabled and there are no denials - :customerscenario: true - - :BZ: 2131031 + :BZ: 2263294 """ # check SELinux is enabled result = target_sat.execute('getenforce') assert 'Enforcing' in result.stdout # check there are no SELinux denials - if not is_open('BZ:2131031'): + if not is_open('BZ:2263294'): result = target_sat.execute('ausearch --input-logs -m avc -ts today --raw') assert result.status == 1, 'Some SELinux denials were found in journal.' diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py new file mode 100644 index 00000000000..8874519a63b --- /dev/null +++ b/tests/foreman/ui/test_acs.py @@ -0,0 +1,512 @@ +"""Tests for Alternate Content Sources UI + +:Requirement: AlternateContentSources + +:CaseAutomation: Automated + +:CaseComponent: AlternateContentSources + +:Team: Phoenix-content + +:CaseImportance: High + +""" +import pytest + +from robottelo import constants +from robottelo.constants import REPO_TYPE +from robottelo.constants.repos import CUSTOM_FILE_REPO +from robottelo.utils.datafactory import gen_string + +ssl_name, product_name, product_label, product_description, repository_name = ( + gen_string('alpha') for _ in range(5) +) +repos_to_enable = ['rhae2.9_el8'] + + +@pytest.fixture(scope='class') +def acs_setup(class_target_sat, class_sca_manifest_org): + """ + This fixture creates all the necessary data for the test to run. + It creates an organization, content credentials, product and repositories. + """ + class_target_sat.api.ContentCredential( + name=ssl_name, + content=gen_string('alpha'), + organization=class_sca_manifest_org.id, + content_type="cert", + ).create() + + product = class_target_sat.api.Product( + name=product_name, organization=class_sca_manifest_org.id + ).create() + + class_target_sat.api.Repository( + product=product, content_type=REPO_TYPE['file'], url=CUSTOM_FILE_REPO + ).create() + + for repo in repos_to_enable: + class_target_sat.cli.RepositorySet.enable( + { + 'organization-id': class_sca_manifest_org.id, + 'name': constants.REPOS[repo]['reposet'], + 'product': constants.REPOS[repo]['product'], + 'releasever': constants.REPOS[repo]['version'], + 'basearch': constants.DEFAULT_ARCHITECTURE, + } + ) + + return class_target_sat, class_sca_manifest_org + + +@pytest.mark.tier2 +def test_acs_subpath_not_required(acs_setup): + """ + This test verifies that the subpath field isn't mandatory for ACS creation. + + :id: 232d944a-a7c1-4387-ab01-7e20b2bbebfa + + :steps: + 1. Create an ACS of each type where subpath field is present in the UI + + :expectedresults: + Subpath field isn't required in the creation of any ACS where it's present, + so ACS should all create successfully + + :BZ: 2119112 + + :customerscenario: True + """ + class_target_sat, class_sca_manifest_org = acs_setup + + with class_target_sat.ui_session() as session: + session.organization.select(org_name=class_sca_manifest_org.name) + + # Create an ACS of each configturation that displays the subpath field in UI + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name=gen_string('alpha'), + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + none_auth=True, + ) + + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name=gen_string('alpha'), + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + none_auth=True, + ) + + session.acs.create_new_acs( + rhui_type=True, + name=gen_string('alpha'), + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://rhui-server.example.com/pulp/content', + none_auth=True, + ) + + +class TestAllAcsTypes: + """ + Test class insuring fixture is ran once before + test_check_all_acs_types_can_be_created + """ + + pytestmark = pytest.mark.usefixtures('acs_setup') + + def gen_params(): + """ + This function generates parameters that are used in test_check_all_acs_types_can_be_created. + """ + + parameters_dict = { + '_common': { + 'use_http_proxies': True, + }, + 'custom': { + '_common': { + 'custom_type': True, + 'base_url': 'https://test.com/', + 'subpaths': ['test/'], + 'capsules_to_add': 'class_target_sat.hostname', + }, + 'yum_manual_auth': { + 'content_type': 'yum', + 'name': 'customYumManualAuth', + 'description': 'customYumManualAuthDesc', + 'manual_auth': True, + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'username': 'test', + 'password': 'test', + }, + 'yum_content_auth': { + 'content_type': 'yum', + 'name': 'customYumContentAuth', + 'description': 'customYumContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + 'verify_ssl': True, + 'ca_cert': ssl_name, + }, + 'yum_none_auth': { + 'content_type': 'yum', + 'name': 'customYumNoneAuth', + 'description': 'customYumNoneAuthDesc', + 'none_auth': True, + }, + 'file_manual_auth': { + 'content_type': 'file', + 'name': 'customFileManualAuth', + 'description': 'customFileManualAuthDesc', + 'manual_auth': True, + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'username': 'test', + 'password': 'test', + }, + 'file_content_auth': { + 'content_type': 'file', + 'name': 'customFileContentAuth', + 'description': 'customFileContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + 'verify_ssl': True, + 'ca_cert': ssl_name, + }, + 'file_none_auth': { + 'content_type': 'file', + 'name': 'customFileNoneAuth', + 'description': 'customFileNoneAuthDesc', + 'none_auth': True, + }, + }, + 'simplified': { + '_common': {'simplified_type': True}, + 'yum': { + 'content_type': 'yum', + 'name': 'simpleYum', + 'description': 'simpleYumDesc', + 'capsules_to_add': 'class_target_sat.hostname', + 'products_to_add': [ + constants.REPOS[repo]['product'] for repo in repos_to_enable + ], + }, + 'file': { + 'content_type': 'file', + 'name': 'simpleFile', + 'description': 'simpleFileDesc', + 'add_all_capsules': True, + 'products_to_add': product_name, + }, + }, + 'rhui': { + '_common': { + 'rhui_type': True, + 'base_url': 'https://test.com/pulp/content', + 'subpaths': ['test/', 'test2/'], + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'capsules_to_add': 'class_target_sat.hostname', + }, + 'yum_none_auth': { + 'name': 'rhuiYumNoneAuth', + 'description': 'rhuiYumNoneAuthDesc', + 'none_auth': True, + }, + 'yum_content_auth': { + 'name': 'rhuiYumContentAuth', + 'description': 'rhuiYumContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + }, + }, + } + + ids = [] + vals = [] + # This code creates a list of scenario IDs and values for each scenario. + # It loops through the keys in the parameters dictionary, and uses the keys to create a scenario ID + # and then it uses the scenario ID to access the scenario values from the parameters dictionary. + # The code then adds the scenario values to the list of scenario values. + for acs in parameters_dict: + if not acs.startswith('_'): + for cnt in parameters_dict[acs]: + if not cnt.startswith('_'): + scenario = ( + parameters_dict[acs][cnt] + | parameters_dict.get('_common', {}) + | parameters_dict[acs].get('_common', {}) + ) + ids.append(f'{acs}_{cnt}') + vals.append(scenario) + return (vals, ids) + + @pytest.mark.parametrize('scenario', gen_params()[0], ids=gen_params()[1]) + def test_check_all_acs_types_can_be_created(session, scenario, acs_setup): + """ + This test creates all possible ACS types. + + :id: 6bfad272-3ff8-4780-b346-1229d70524b1 + + :parametrized: yes + + :steps: + 1. Select an organization + 2. Create ACSes + :expectedresults: + This test should create all Aleternate Content Sources + """ + + class_target_sat, class_sca_manifest_org = acs_setup + vals = scenario + + # Replace the placeholder in 'capsules_to_add' with the hostname of the Satellite under test + for val in vals: + if 'capsules_to_add' in val: + vals['capsules_to_add'] = class_target_sat.hostname + + with class_target_sat.ui_session() as session: + session.organization.select(org_name=class_sca_manifest_org.name) + session.acs.create_new_acs(**vals) + + +class TestAcsE2e: + """ + Test class insuring fixture is ran once before + test_acs_positive_end_to_end + """ + + pytestmark = pytest.mark.usefixtures('acs_setup') + + @pytest.mark.e2e + def test_acs_positive_end_to_end(self, session, acs_setup): + """ + Create, update, delete and refresh ACSes. + + :id: 047452cc-5a9f-4473-96b1-d5b6830b7d6b + + :steps: + 1. Select an organization + 2. Create ACSes (randomly selected ones) + 3. Test refresh + 4. Test renaming and changing description + 5. Test editing capsules + 6. Test editing urls and subpaths + 7. Test editing credentials + 8. Test editing products + 9. Create ACS on which deletion is going to be tested + 10. Test deletion + :expectedresults: + This test should create some + Aleternate Content Sources and asserts that actions + were made correctly on them. + """ + + class_target_sat, class_sca_manifest_org = acs_setup + + with class_target_sat.ui_session() as session: + session.organization.select(org_name=class_sca_manifest_org.name) + + # Create ACS using "Simplified" option with content type of "File" + session.acs.create_new_acs( + simplified_type=True, + content_type='file', + name='simpleFileTest', + description='simpleFileTestDesc', + add_all_capsules=True, + use_http_proxies=True, + products_to_add=product_name, + ) + + # Create ACS using "Simplified" option with content type of "Yum" + session.acs.create_new_acs( + simplified_type=True, + content_type='yum', + name='simpleYumTest', + description='simpleYumTestDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + products_to_add=[constants.REPOS[repo]['product'] for repo in repos_to_enable], + ) + + # Create ACS using "Custom" option with content type of "File" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileManualTestAuth', + description='customFileManualTestAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "File" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileContentAuthTest', + description='customFileContentAuthTestDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumNoneAuthTest', + description='customYumNoneAuthTestDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) + + # Refresh ACS and check that last refresh time is updated + session.acs.refresh_acs(acs_name='simpleFileTest') + simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFileTest') + assert simple_file_refreshed['details']['last_refresh'] in [ + 'less than a minute ago', + '1 minute ago', + ] + + # Rename and change description of ACS and then check that it was changed + simple_file_renamed = session.acs.edit_acs_details( + acs_name_to_edit='simpleFileTest', + new_acs_name='simpleFileTestRenamed', + new_description='simpleFileTestRenamedDesc', + ) + simple_file_renamed = session.acs.get_row_drawer_content( + acs_name='simpleFileTestRenamed' + ) + assert ( + simple_file_renamed['details']['details_stack_content']['name'] + == 'simpleFileTestRenamed' + ) + assert ( + simple_file_renamed['details']['details_stack_content']['description'] + == 'simpleFileTestRenamedDesc' + ) + + # Edit ACS capsules + custom_file_edited_capsules = session.acs.edit_capsules( + acs_name_to_edit='customFileContentAuthTest', + remove_all=True, + use_http_proxies=False, + ) + custom_file_edited_capsules = session.acs.get_row_drawer_content( + acs_name='customFileContentAuthTest' + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content']['capsules_list'] + == [] + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content'][ + 'use_http_proxies' + ] + == 'false' + ) + + # Edit ACS urls and subpaths + custom_yum_edited_url = session.acs.edit_url_subpaths( + acs_name_to_edit='customYumNoneAuthTest', + new_url='https://testNEW.com', + new_subpaths=['test/', 'testNEW/'], + ) + custom_yum_edited_url = session.acs.get_row_drawer_content( + acs_name='customYumNoneAuthTest' + ) + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['url'] + == 'https://testNEW.com' + ) + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content'][ + 'subpaths' + ] + == 'test/,testNEW/' + ) + + # Edit ACS credentials + custom_file_edited_credentials = session.acs.edit_credentials( + acs_name_to_edit='customFileManualTestAuth', + verify_ssl=False, + manual_auth=True, + username='changedUserName', + ) + custom_file_edited_credentials = session.acs.get_row_drawer_content( + acs_name='customFileManualTestAuth' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content'][ + 'verify_ssl' + ] + == 'false' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content'][ + 'username' + ] + == 'changedUserName' + ) + + # Edit ACS products + simple_yum_edited_products = session.acs.edit_products( + acs_name_to_edit='simpleYumTest', + remove_all=True, + ) + simple_yum_edited_products = session.acs.get_row_drawer_content( + acs_name='simpleYumTest' + ) + assert ( + simple_yum_edited_products['products']['products_stack_content']['products_list'] + == [] + ) + + # Create ACS on which deletion is going to be tested + session.acs.create_new_acs( + rhui_type=True, + name='testAcsToBeDeleted', + description='testAcsToBeDeleted', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + none_auth=True, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Delete ACS and check if trying to read it afterwards fails + session.acs.delete_acs(acs_name='testAcsToBeDeleted') + with pytest.raises(ValueError): # noqa: PT011 - TODO determine better exception + session.acs.get_row_drawer_content(acs_name='testAcsToBeDeleted') diff --git a/tests/foreman/ui/test_activationkey.py b/tests/foreman/ui/test_activationkey.py index f6421c8c0b1..5dc53f39c1e 100644 --- a/tests/foreman/ui/test_activationkey.py +++ b/tests/foreman/ui/test_activationkey.py @@ -4,51 +4,40 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ActivationKeys :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest -from airgun.session import Session from broker import Broker from fauxfactory import gen_string -from nailgun import entities +import pytest from robottelo import constants -from robottelo.cli.factory import setup_org_for_a_custom_repo from robottelo.config import settings from robottelo.hosts import ContentHost -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_data_list +from robottelo.utils.datafactory import parametrized, valid_data_list @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_end_to_end_crud(session, module_org): +def test_positive_end_to_end_crud(session, module_org, module_target_sat): """Perform end to end testing for activation key component :id: b6b98c45-e41e-4c7a-9be4-997273b7e24d :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') new_name = gen_string('alpha') - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() cv.publish() with session: # Create activation key with content view and LCE assigned @@ -79,7 +68,12 @@ def test_positive_end_to_end_crud(session, module_org): indirect=True, ) def test_positive_end_to_end_register( - session, function_entitlement_manifest_org, repos_collection, rhel7_contenthost, target_sat + session, + function_entitlement_manifest_org, + default_location, + repos_collection, + rhel7_contenthost, + target_sat, ): """Create activation key and use it during content host registering @@ -88,21 +82,22 @@ def test_positive_end_to_end_register( :expectedresults: Content host was registered successfully using activation key, association is reflected on webUI - :CaseLevel: System - :parametrized: yes :CaseImportance: High """ org = function_entitlement_manifest_org - lce = entities.LifecycleEnvironment(organization=org).create() + lce = target_sat.api.LifecycleEnvironment(organization=org).create() repos_collection.setup_content(org.id, lce.id, upload_manifest=False) ak_name = repos_collection.setup_content_data['activation_key']['name'] - repos_collection.setup_virtual_machine(rhel7_contenthost) + repos_collection.setup_virtual_machine(rhel7_contenthost, install_katello_agent=False) with session: session.organization.select(org.name) - chost = session.contenthost.read(rhel7_contenthost.hostname, widget_names='details') + session.location.select(default_location.name) + chost = session.contenthost.read_legacy_ui( + rhel7_contenthost.hostname, widget_names='details' + ) assert chost['details']['registered_by'] == f'Activation Key {ak_name}' ak_values = session.activationkey.read(ak_name, widget_names='content_hosts') assert len(ak_values['content_hosts']['table']) == 1 @@ -120,8 +115,6 @@ def test_positive_create_with_cv(session, module_org, cv_name, target_sat): :parametrized: yes :expectedresults: Activation key is created - - :CaseLevel: Integration """ name = gen_string('alpha') env_name = gen_string('alpha') @@ -149,8 +142,6 @@ def test_positive_search_scoped(session, module_org, target_sat): :BZ: 1259374 - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') @@ -178,17 +169,15 @@ def test_positive_search_scoped(session, module_org, target_sat): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_create_with_host_collection(session, module_org): +def test_positive_create_with_host_collection(session, module_org, module_target_sat): """Create Activation key with Host Collection :id: 0e4ad2b4-47a7-4087-828f-2b0535a97b69 :expectedresults: Activation key is created - - :CaseLevel: Integration """ name = gen_string('alpha') - hc = entities.HostCollection(organization=module_org).create() + hc = module_target_sat.api.HostCollection(organization=module_org).create() with session: session.activationkey.create({'name': name, 'lce': {constants.ENVIRONMENT: True}}) assert session.activationkey.search(name)[0]['Name'] == name @@ -205,8 +194,6 @@ def test_positive_create_with_envs(session, module_org, target_sat): :id: f75e994a-6da1-40a3-9685-f8387388b3f0 :expectedresults: Activation key is created - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -235,25 +222,23 @@ def test_positive_add_host_collection_non_admin(module_org, test_name, target_sa listed :BZ: 1473212 - - :CaseLevel: Integration """ ak_name = gen_string('alpha') - hc = entities.HostCollection(organization=module_org).create() + hc = target_sat.api.HostCollection(organization=module_org).create() # Create non-admin user with specified permissions - roles = [entities.Role().create()] + roles = [target_sat.api.Role().create()] user_permissions = { 'Katello::ActivationKey': constants.PERMISSIONS['Katello::ActivationKey'], 'Katello::HostCollection': constants.PERMISSIONS['Katello::HostCollection'], } - viewer_role = entities.Role().search(query={'search': 'name="Viewer"'})[0] + viewer_role = target_sat.api.Role().search(query={'search': 'name="Viewer"'})[0] roles.append(viewer_role) target_sat.api_factory.create_role_permissions(roles[0], user_permissions) password = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( admin=False, role=roles, password=password, organization=[module_org] ).create() - with Session(test_name, user=user.login, password=password) as session: + with target_sat.ui_session(test_name, user=user.login, password=password) as session: session.activationkey.create({'name': ak_name, 'lce': {constants.ENVIRONMENT: True}}) assert session.activationkey.search(ak_name)[0]['Name'] == ak_name session.activationkey.add_host_collection(ak_name, hc.name) @@ -271,25 +256,23 @@ def test_positive_remove_host_collection_non_admin(module_org, test_name, target :expectedresults: Activation key is created, removed host collection is not listed - - :CaseLevel: Integration """ ak_name = gen_string('alpha') - hc = entities.HostCollection(organization=module_org).create() + hc = target_sat.api.HostCollection(organization=module_org).create() # Create non-admin user with specified permissions - roles = [entities.Role().create()] + roles = [target_sat.api.Role().create()] user_permissions = { 'Katello::ActivationKey': constants.PERMISSIONS['Katello::ActivationKey'], 'Katello::HostCollection': constants.PERMISSIONS['Katello::HostCollection'], } - viewer_role = entities.Role().search(query={'search': 'name="Viewer"'})[0] + viewer_role = target_sat.api.Role().search(query={'search': 'name="Viewer"'})[0] roles.append(viewer_role) target_sat.api_factory.create_role_permissions(roles[0], user_permissions) password = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( admin=False, role=roles, password=password, organization=[module_org] ).create() - with Session(test_name, user=user.login, password=password) as session: + with target_sat.ui_session(test_name, user=user.login, password=password) as session: session.activationkey.create({'name': ak_name, 'lce': {constants.ENVIRONMENT: True}}) assert session.activationkey.search(ak_name)[0]['Name'] == ak_name session.activationkey.add_host_collection(ak_name, hc.name) @@ -308,8 +291,6 @@ def test_positive_delete_with_env(session, module_org, target_sat): :id: b6019881-3d6e-4b75-89f5-1b62aff3b1ca :expectedresults: Activation key is deleted - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -332,8 +313,6 @@ def test_positive_delete_with_cv(session, module_org, target_sat): :id: 7e40e1ed-8314-406b-9451-05f64806a6e6 :expectedresults: Activation key is deleted - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -358,8 +337,6 @@ def test_positive_update_env(session, module_org, target_sat): :id: 895cda6a-bb1e-4b94-a858-95f0be78a17b :expectedresults: Activation key is updated - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -389,14 +366,12 @@ def test_positive_update_cv(session, module_org, cv2_name, target_sat): :parametrized: yes - :Steps: + :steps: 1. Create Activation key 2. Update the Content view with another Content view which has custom products :expectedresults: Activation key is updated - - :CaseLevel: Integration """ name = gen_string('alpha') env1_name = gen_string('alpha') @@ -422,22 +397,19 @@ def test_positive_update_cv(session, module_org, cv2_name, target_sat): @pytest.mark.run_in_one_thread -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 def test_positive_update_rh_product(function_entitlement_manifest_org, session, target_sat): """Update Content View in an Activation key :id: 9b0ac209-45de-4cc4-97e8-e191f3f37239 - :Steps: + :steps: 1. Create an activation key 2. Update the content view with another content view which has RH products :expectedresults: Activation key is updated - - :CaseLevel: Integration """ name = gen_string('alpha') env1_name = gen_string('alpha') @@ -479,7 +451,6 @@ def test_positive_update_rh_product(function_entitlement_manifest_org, session, @pytest.mark.run_in_one_thread -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 def test_positive_add_rh_product(function_entitlement_manifest_org, session, target_sat): """Test that RH product can be associated to Activation Keys @@ -487,8 +458,6 @@ def test_positive_add_rh_product(function_entitlement_manifest_org, session, tar :id: d805341b-6d2f-4e16-8cb4-902de00b9a6c :expectedresults: RH products are successfully associated to Activation key - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -524,8 +493,6 @@ def test_positive_add_custom_product(session, module_org, target_sat): :expectedresults: Custom products are successfully associated to Activation key - - :CaseLevel: Integration """ name = gen_string('alpha') cv_name = gen_string('alpha') @@ -548,7 +515,6 @@ def test_positive_add_custom_product(session, module_org, target_sat): @pytest.mark.run_in_one_thread -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_add_rh_and_custom_products( @@ -558,15 +524,13 @@ def test_positive_add_rh_and_custom_products( :id: 3d8876fa-1412-47ca-a7a4-bce2e8baf3bc - :Steps: + :steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key 3. Associate custom product(s) to Activation Key :expectedresults: RH/Custom product is successfully associated to Activation key - - :CaseLevel: Integration """ name = gen_string('alpha') rh_repo = { @@ -579,8 +543,8 @@ def test_positive_add_rh_and_custom_products( custom_product_name = gen_string('alpha') repo_name = gen_string('alpha') org = function_entitlement_manifest_org - product = entities.Product(name=custom_product_name, organization=org).create() - repo = entities.Repository(name=repo_name, product=product).create() + product = target_sat.api.Product(name=custom_product_name, organization=org).create() + repo = target_sat.api.Repository(name=repo_name, product=product).create() rhel_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=org.id, @@ -590,7 +554,7 @@ def test_positive_add_rh_and_custom_products( releasever=rh_repo['releasever'], ) for repo_id in [rhel_repo_id, repo.id]: - entities.Repository(id=repo_id).sync() + target_sat.api.Repository(id=repo_id).sync() with session: session.organization.select(org.name) session.activationkey.create( @@ -612,7 +576,6 @@ def test_positive_add_rh_and_custom_products( @pytest.mark.run_in_one_thread -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_fetch_product_content(target_sat, function_entitlement_manifest_org, session): @@ -624,8 +587,6 @@ def test_positive_fetch_product_content(target_sat, function_entitlement_manifes assigned as Activation Key's product content :BZ: 1426386, 1432285 - - :CaseLevel: Integration """ org = function_entitlement_manifest_org rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( @@ -636,19 +597,21 @@ def test_positive_fetch_product_content(target_sat, function_entitlement_manifes reposet=constants.REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() - custom_product = entities.Product(organization=org).create() - custom_repo = entities.Repository( + custom_product = target_sat.api.Product(organization=org).create() + custom_repo = target_sat.api.Repository( name=gen_string('alphanumeric').upper(), # first letter is always # uppercase on product content page, workarounding it for # successful checks product=custom_product, ).create() custom_repo.sync() - cv = entities.ContentView(organization=org, repository=[rh_repo_id, custom_repo.id]).create() + cv = target_sat.api.ContentView( + organization=org, repository=[rh_repo_id, custom_repo.id] + ).create() cv.publish() - ak = entities.ActivationKey(content_view=cv, organization=org).create() + ak = target_sat.api.ActivationKey(content_view=cv, organization=org).create() with session: session.organization.select(org.name) for subscription in (constants.DEFAULT_SUBSCRIPTION_NAME, custom_product.name): @@ -661,7 +624,7 @@ def test_positive_fetch_product_content(target_sat, function_entitlement_manifes @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_access_non_admin_user(session, test_name): +def test_positive_access_non_admin_user(session, test_name, target_sat): """Access activation key that has specific name and assigned environment by user that has filter configured for that specific activation key @@ -673,29 +636,29 @@ def test_positive_access_non_admin_user(session, test_name): admin user :BZ: 1463813 - - :CaseLevel: Integration """ ak_name = gen_string('alpha') non_searchable_ak_name = gen_string('alpha') - org = entities.Organization().create() + org = target_sat.api.Organization().create() envs_list = ['STAGING', 'DEV', 'IT', 'UAT', 'PROD'] for name in envs_list: - entities.LifecycleEnvironment(name=name, organization=org).create() + target_sat.api.LifecycleEnvironment(name=name, organization=org).create() env_name = random.choice(envs_list) - cv = entities.ContentView(organization=org).create() + cv = target_sat.api.ContentView(organization=org).create() cv.publish() content_view_version = cv.read().version[0] content_view_version.promote( - data={'environment_ids': [entities.LifecycleEnvironment(name=env_name).search()[0].id]} + data={ + 'environment_ids': [target_sat.api.LifecycleEnvironment(name=env_name).search()[0].id] + } ) # Create new role - role = entities.Role().create() + role = target_sat.api.Role().create() # Create filter with predefined activation keys search criteria envs_condition = ' or '.join(['environment = ' + s for s in envs_list]) - entities.Filter( + target_sat.api.Filter( organization=[org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( filters={'name': 'view_activation_keys'}, query={'search': 'resource_type="Katello::ActivationKey"'}, ), @@ -704,20 +667,24 @@ def test_positive_access_non_admin_user(session, test_name): ).create() # Add permissions for Organization and Location - entities.Filter( - permission=entities.Permission().search(query={'search': 'resource_type="Organization"'}), + target_sat.api.Filter( + permission=target_sat.api.Permission().search( + query={'search': 'resource_type="Organization"'} + ), role=role, ).create() - entities.Filter( - permission=entities.Permission().search(query={'search': 'resource_type="Location"'}), + target_sat.api.Filter( + permission=target_sat.api.Permission().search(query={'search': 'resource_type="Location"'}), role=role, ).create() # Create new user with a configured role - default_loc = entities.Location().search(query={'search': f'name="{constants.DEFAULT_LOC}"'})[0] + default_loc = target_sat.api.Location().search( + query={'search': f'name="{constants.DEFAULT_LOC}"'} + )[0] user_login = gen_string('alpha') user_password = gen_string('alpha') - entities.User( + target_sat.api.User( role=[role], admin=False, login=user_login, @@ -738,7 +705,7 @@ def test_positive_access_non_admin_user(session, test_name): env_name ][env_name] - with Session(test_name, user=user_login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user_login, password=user_password) as session: session.organization.select(org.name) session.location.select(constants.DEFAULT_LOC) assert session.activationkey.search(ak_name)[0]['Name'] == ak_name @@ -749,7 +716,7 @@ def test_positive_access_non_admin_user(session, test_name): @pytest.mark.tier2 -def test_positive_remove_user(session, module_org, test_name): +def test_positive_remove_user(session, module_org, test_name, module_target_sat): """Delete any user who has previously created an activation key and check that activation key still exists @@ -762,9 +729,11 @@ def test_positive_remove_user(session, module_org, test_name): ak_name = gen_string('alpha') # Create user password = gen_string('alpha') - user = entities.User(admin=True, default_organization=module_org, password=password).create() + user = module_target_sat.api.User( + admin=True, default_organization=module_org, password=password + ).create() # Create Activation Key using new user credentials - with Session(test_name, user.login, password) as non_admin_session: + with module_target_sat.ui_session(test_name, user.login, password) as non_admin_session: non_admin_session.activationkey.create( {'name': ak_name, 'lce': {constants.ENVIRONMENT: True}} ) @@ -776,7 +745,7 @@ def test_positive_remove_user(session, module_org, test_name): @pytest.mark.tier2 -def test_positive_add_docker_repo_cv(session, module_org): +def test_positive_add_docker_repo_cv(session, module_org, module_target_sat): """Add docker repository to a non-composite content view and publish it. Then create an activation key and associate it with the Docker content view. @@ -785,16 +754,14 @@ def test_positive_add_docker_repo_cv(session, module_org): :expectedresults: Content view with docker repo can be added to activation key - - :CaseLevel: Integration """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = entities.Repository( + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = module_target_sat.api.Repository( content_type=constants.REPO_TYPE['docker'], - product=entities.Product(organization=module_org).create(), + product=module_target_sat.api.Product(organization=module_org).create(), url=constants.CONTAINER_REGISTRY_HUB, ).create() - content_view = entities.ContentView( + content_view = module_target_sat.api.ContentView( composite=False, organization=module_org, repository=[repo] ).create() content_view.publish() @@ -811,7 +778,7 @@ def test_positive_add_docker_repo_cv(session, module_org): @pytest.mark.tier2 -def test_positive_add_docker_repo_ccv(session, module_org): +def test_positive_add_docker_repo_ccv(session, module_org, module_target_sat): """Add docker repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the composite Docker content @@ -821,22 +788,20 @@ def test_positive_add_docker_repo_ccv(session, module_org): :expectedresults: Docker-based content view can be added to activation key - - :CaseLevel: Integration """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = entities.Repository( + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = module_target_sat.api.Repository( content_type=constants.REPO_TYPE['docker'], - product=entities.Product(organization=module_org).create(), + product=module_target_sat.api.Product(organization=module_org).create(), url=constants.CONTAINER_REGISTRY_HUB, ).create() - content_view = entities.ContentView( + content_view = module_target_sat.api.ContentView( composite=False, organization=module_org, repository=[repo] ).create() content_view.publish() cvv = content_view.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( component=[cvv], composite=True, organization=module_org ).create() composite_cv.publish() @@ -859,7 +824,7 @@ def test_positive_add_host(session, module_org, rhel6_contenthost, target_sat): :id: 886e9ea5-d917-40e0-a3b1-41254c4bf5bf - :Steps: + :steps: 1. Create Activation key 2. Create different hosts 3. Associate the hosts to Activation key @@ -867,11 +832,9 @@ def test_positive_add_host(session, module_org, rhel6_contenthost, target_sat): :expectedresults: Hosts are successfully associated to Activation key :parametrized: yes - - :CaseLevel: System """ - ak = entities.ActivationKey( - environment=entities.LifecycleEnvironment( + ak = target_sat.api.ActivationKey( + environment=target_sat.api.LifecycleEnvironment( name=constants.ENVIRONMENT, organization=module_org ).search()[0], organization=module_org, @@ -895,7 +858,7 @@ def test_positive_delete_with_system(session, rhel6_contenthost, target_sat): :id: 86cd070e-cf46-4bb1-b555-e7cb42e4dc9f - :Steps: + :steps: 1. Create an Activation key 2. Register systems to it 3. Delete the Activation key @@ -903,14 +866,12 @@ def test_positive_delete_with_system(session, rhel6_contenthost, target_sat): :expectedresults: Activation key is deleted :parametrized: yes - - :CaseLevel: System """ name = gen_string('alpha') cv_name = gen_string('alpha') env_name = gen_string('alpha') product_name = gen_string('alpha') - org = entities.Organization().create() + org = target_sat.api.Organization().create() # Helper function to create and promote CV to next environment repo_id = target_sat.api_factory.create_sync_custom_repo( product_name=product_name, org_id=org.id @@ -937,7 +898,7 @@ def test_negative_usage_limit(session, module_org, target_sat): :id: 9fe2d661-66f8-46a4-ae3f-0a9329494bdd - :Steps: + :steps: 1. Create Activation key 2. Update Usage Limit to a finite number 3. Register Systems to match the Usage Limit @@ -945,8 +906,6 @@ def test_negative_usage_limit(session, module_org, target_sat): Limit :expectedresults: System Registration fails. Appropriate error shown - - :CaseLevel: System """ name = gen_string('alpha') hosts_limit = '1' @@ -980,8 +939,6 @@ def test_positive_add_multiple_aks_to_system(session, module_org, rhel6_contenth :expectedresults: Multiple Activation keys are attached to a system :parametrized: yes - - :CaseLevel: System """ key_1_name = gen_string('alpha') key_2_name = gen_string('alpha') @@ -1044,15 +1001,13 @@ def test_positive_host_associations(session, target_sat): :customerscenario: true :BZ: 1344033, 1372826, 1394388 - - :CaseLevel: System """ - org = entities.Organization().create() - org_entities = setup_org_for_a_custom_repo( + org = target_sat.api.Organization().create() + org_entities = target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_1.url, 'organization-id': org.id} ) - ak1 = entities.ActivationKey(id=org_entities['activationkey-id']).read() - ak2 = entities.ActivationKey( + ak1 = target_sat.api.ActivationKey(id=org_entities['activationkey-id']).read() + ak2 = target_sat.api.ActivationKey( content_view=org_entities['content-view-id'], environment=org_entities['lifecycle-environment-id'], organization=org.id, @@ -1076,7 +1031,7 @@ def test_positive_host_associations(session, target_sat): assert ak2['content_hosts']['table'][0]['Name'] == vm2.hostname -@pytest.mark.skip_if_not_set('clients', 'fake_manifest') +@pytest.mark.skip_if_not_set('clients') @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url') def test_positive_service_level_subscription_with_custom_product( @@ -1108,17 +1063,15 @@ def test_positive_service_level_subscription_with_custom_product( :BZ: 1394357 :parametrized: yes - - :CaseLevel: System """ org = function_entitlement_manifest_org - entities_ids = setup_org_for_a_custom_repo( + entities_ids = target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_1.url, 'organization-id': org.id} ) - product = entities.Product(id=entities_ids['product-id']).read() - activation_key = entities.ActivationKey(id=entities_ids['activationkey-id']).read() + product = target_sat.api.Product(id=entities_ids['product-id']).read() + activation_key = target_sat.api.ActivationKey(id=entities_ids['activationkey-id']).read() # add the default RH subscription - subscription = entities.Subscription(organization=org).search( + subscription = target_sat.api.Subscription(organization=org).search( query={'search': f'name="{constants.DEFAULT_SUBSCRIPTION_NAME}"'} )[0] activation_key.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) @@ -1147,28 +1100,25 @@ def test_positive_service_level_subscription_with_custom_product( @pytest.mark.run_in_one_thread -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 -def test_positive_delete_manifest(session, function_entitlement_manifest_org): +def test_positive_delete_manifest(session, function_entitlement_manifest_org, target_sat): """Check if deleting a manifest removes it from Activation key :id: 512d8e41-b937-451e-a9c6-840457d3d7d4 - :Steps: + :steps: 1. Create Activation key 2. Associate a manifest to the Activation Key 3. Delete the manifest :expectedresults: Deleting a manifest removes it from the Activation key - - :CaseLevel: Integration """ org = function_entitlement_manifest_org # Create activation key - activation_key = entities.ActivationKey(organization=org).create() + activation_key = target_sat.api.ActivationKey(organization=org).create() # Associate a manifest to the activation key - subscription = entities.Subscription(organization=org).search( + subscription = target_sat.api.Subscription(organization=org).search( query={'search': f'name="{constants.DEFAULT_SUBSCRIPTION_NAME}"'} )[0] activation_key.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) @@ -1191,7 +1141,7 @@ def test_positive_delete_manifest(session, function_entitlement_manifest_org): assert not ak['subscriptions']['resources']['assigned'] -@pytest.mark.skip_if_not_set('clients', 'fake_manifest') +@pytest.mark.skip_if_not_set('clients') @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url') def test_positive_ak_with_custom_product_on_rhel6(session, rhel6_contenthost, target_sat): @@ -1201,7 +1151,7 @@ def test_positive_ak_with_custom_product_on_rhel6(session, rhel6_contenthost, ta :customerscenario: true - :Steps: + :steps: 1. Create a custom repo 2. Create ak and add custom repo to ak 3. Add subscriptions to the ak @@ -1210,8 +1160,6 @@ def test_positive_ak_with_custom_product_on_rhel6(session, rhel6_contenthost, ta :expectedresults: Host is registered successfully :bz: 2038388 - - :CaseLevel: Integration """ org = target_sat.api.Organization().create() entities_ids = target_sat.cli_factory.setup_org_for_a_custom_repo( @@ -1227,3 +1175,38 @@ def test_positive_ak_with_custom_product_on_rhel6(session, rhel6_contenthost, ta ak = session.activationkey.read(ak.name, widget_names='content_hosts') assert len(ak['content_hosts']['table']) == 1 assert ak['content_hosts']['table'][0]['Name'] == rhel6_contenthost.hostname + + +def test_positive_custom_products_disabled_by_default_ak( + session, + default_location, + setup_content, +): + """Verify that repositories can be filtered by type and that repositories + are Disabled by default on activation keys + + :id: 3e6793ff-3501-47f6-8cd6-e24a60bdcb73 + + :steps: + 1. Create custom product and upload repository + 2. Attach to activation key + 3. Filter Repositories by "Custom" type + 4. Assert that custom product is visible and disabled by default + 5. Filter Repositories by "Red Hat" type + 6. Assert custom product is not visible + + :expectedresults: Custom products should be filtered and Disabled by default. + + :BZ: 1265120 + + :customerscenario: true + """ + ak, org, custom_repo = setup_content + with session: + session.organization.select(org.name) + session.location.select(default_location.name) + repo1 = session.activationkey.get_repos(ak.name, repo_type="Custom") + assert repo1[0]['Repository Name'] == custom_repo.name + assert repo1[0]['Status'] == 'Disabled' + repo2 = session.activationkey.get_repos(ak.name, repo_type="Red Hat") + assert repo2[0]['Repository Name'] != custom_repo.name diff --git a/tests/foreman/ui/test_ansible.py b/tests/foreman/ui/test_ansible.py index 591a5b7480a..22a1d36f945 100644 --- a/tests/foreman/ui/test_ansible.py +++ b/tests/foreman/ui/test_ansible.py @@ -4,25 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Ansible +:CaseComponent: Ansible-ConfigurationManagement :Team: Rocket -:TestType: Functional - -:CaseImportance: High +:CaseImportance: Critical -:Upstream: No """ +from fauxfactory import gen_string import pytest import yaml -from fauxfactory import gen_string from robottelo import constants -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings +from robottelo.config import robottelo_tmp_dir, settings def test_positive_create_and_delete_variable(target_sat): @@ -30,7 +24,7 @@ def test_positive_create_and_delete_variable(target_sat): :id: 7006d7c7-788a-4447-a564-d6b03ec06aaf - :Steps: + :steps: 1. Import Ansible roles if none have been imported yet. 2. Create an Ansible variable with only a name and an assigned Ansible role. @@ -41,14 +35,15 @@ def test_positive_create_and_delete_variable(target_sat): :expectedresults: The variable is successfully created and deleted. """ key = gen_string('alpha') - role = 'redhat.satellite.activation_keys' + + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) with target_sat.ui_session() as session: - if not session.ansibleroles.imported_roles_count: - session.ansibleroles.import_all_roles() session.ansiblevariables.create( { 'key': key, - 'ansible_role': role, + 'ansible_role': SELECTED_ROLE, } ) assert session.ansiblevariables.search(key)[0]['Name'] == key @@ -61,7 +56,7 @@ def test_positive_create_variable_with_overrides(target_sat): :id: 90acea37-4c2f-42e5-92a6-0c88148f4fb6 - :Steps: + :steps: 1. Import Ansible roles if none have been imported yet. 2. Create an Anible variable, populating all fields on the creation form. @@ -70,15 +65,16 @@ def test_positive_create_variable_with_overrides(target_sat): :expectedresults: The variable is successfully created. """ key = gen_string('alpha') - role = 'redhat.satellite.activation_keys' + + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) with target_sat.ui_session() as session: - if not session.ansibleroles.imported_roles_count: - session.ansibleroles.import_all_roles() session.ansiblevariables.create_with_overrides( { 'key': key, 'description': 'this is a description', - 'ansible_role': role, + 'ansible_role': SELECTED_ROLE, 'parameter_type': 'integer', 'default_value': '11', 'validator_type': 'list', @@ -95,6 +91,7 @@ def test_positive_create_variable_with_overrides(target_sat): assert session.ansiblevariables.search(key)[0]['Name'] == key +@pytest.mark.pit_server @pytest.mark.no_containers @pytest.mark.rhel_ver_match('[^6]') def test_positive_config_report_ansible(session, target_sat, module_org, rhel_contenthost): @@ -114,7 +111,7 @@ def test_positive_config_report_ansible(session, target_sat, module_org, rhel_co 1. Host should be assigned the proper role. 2. Job report should be created. - :CaseImportance: Critical + :CaseComponent: Ansible-RemoteExecution """ SELECTED_ROLE = 'RedHatInsights.insights-client' if rhel_contenthost.os_version.major <= 7: @@ -166,7 +163,7 @@ def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_cont :customerscenario: true - :Steps: + :steps: 1. Register a content host with satellite 2. Create a custom role and import into satellite 3. Assign that role to a host @@ -179,8 +176,15 @@ def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_cont :BZ: 2155392 - :CaseAutomation: Automated + :CaseComponent: Ansible-RemoteExecution """ + + @request.addfinalizer + def _finalize(): + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') + SELECTED_ROLE = 'custom_role' playbook = f'{robottelo_tmp_dir}/playbook.yml' data = { @@ -234,12 +238,6 @@ def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_cont session.configreport.delete(rhel_contenthost.hostname) assert len(session.configreport.read()['table']) == 0 - @request.addfinalizer - def _finalize(): - result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) - assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] - target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') - @pytest.mark.tier2 def test_positive_host_role_information(target_sat, function_host): @@ -248,9 +246,7 @@ def test_positive_host_role_information(target_sat, function_host): :id: 7da913ef-3b43-4bfa-9a45-d895431c8b56 - :CaseLevel: System - - :Steps: + :steps: 1. Register a RHEL host to Satellite. 2. Import all roles available by default. 3. Assign one role to the RHEL host. @@ -258,7 +254,6 @@ def test_positive_host_role_information(target_sat, function_host): 5. Select the 'Ansible' tab, then the 'Inventory' sub-tab. :expectedresults: Roles assigned directly to the Host are visible on the subtab. - """ SELECTED_ROLE = 'RedHatInsights.insights-client' @@ -280,6 +275,80 @@ def test_positive_host_role_information(target_sat, function_host): assert all_assigned_roles_table[0]["Name"] == SELECTED_ROLE +@pytest.mark.rhel_ver_match('8') +def test_positive_assign_ansible_role_variable_on_host( + target_sat, rhel_contenthost, module_activation_key, module_org, module_location, request +): + """Verify ansible variable is added to the role and attached to the host. + + :id: 7ec4fe19-5a08-4b10-bb4e-7327dd68699a + + :BZ: 2170727 + + :customerscenario: true + + :steps: + 1. Create an Ansible variable with array type and set the default value. + 2. Enable both 'Merge Overrides' and 'Merge Default'. + 3. Add the variable to a role and attach the role to the host. + 4. Verify that ansible role and variable is added to the host. + + :expectedresults: The role and variable is successfully added to the host. + """ + + @request.addfinalizer + def _finalize(): + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + + key = gen_string('alpha') + SELECTED_ROLE = 'redhat.satellite.activation_keys' + proxy_id = target_sat.nailgun_smart_proxy.id + target_sat.api.AnsibleRoles().sync(data={'proxy_id': proxy_id, 'role_names': [SELECTED_ROLE]}) + command = target_sat.api.RegistrationCommand( + organization=module_org, + location=module_location, + activation_keys=[module_activation_key.name], + ).create() + result = rhel_contenthost.execute(command) + assert result.status == 0, f'Failed to register host: {result.stderr}' + target_host = rhel_contenthost.nailgun_host + default_value = '[\"test\"]' + parameter_type = 'array' + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + session.ansiblevariables.create_with_overrides( + { + 'key': key, + 'ansible_role': SELECTED_ROLE, + 'override': 'true', + 'parameter_type': parameter_type, + 'default_value': default_value, + 'validator_type': None, + 'attribute_order': 'domain \n fqdn \n hostgroup \n os', + 'merge_default': 'true', + 'merge_overrides': 'true', + 'matcher_section.params': [ + { + 'attribute_type': {'matcher_key': 'os', 'matcher_value': 'fedora'}, + 'value': '[\'13\']', + } + ], + } + ) + result = target_sat.cli.Host.ansible_roles_assign( + {'id': target_host.id, 'ansible-roles': SELECTED_ROLE} + ) + assert 'Ansible roles were assigned' in result[0]['message'] + values = session.host_new.get_details(rhel_contenthost.hostname, 'ansible')['ansible'][ + 'variables' + ]['table'] + assert (key, SELECTED_ROLE, default_value, parameter_type) in [ + (var['Name'], var['Ansible role'], var['Value'], var['Type']) for var in values + ] + + @pytest.mark.stubbed @pytest.mark.tier2 def test_positive_role_variable_information(self): @@ -288,9 +357,7 @@ def test_positive_role_variable_information(self): :id: 4ab2813a-6b83-4907-b104-0473465814f5 - :CaseLevel: System - - :Steps: + :steps: 1. Register a RHEL host to Satellite. 2. Import all roles available by default. 3. Create a host group and assign one of the Ansible roles to the host group. @@ -302,7 +369,6 @@ def test_positive_role_variable_information(self): 9. Select the 'Ansible' tab, then the 'Variables' sub-tab. :expectedresults: The variables information for the given Host is visible. - """ @@ -313,9 +379,7 @@ def test_positive_assign_role_in_new_ui(self): :id: 044f38b4-cff2-4ddc-b93c-7e9f2826d00d - :CaseLevel: System - - :Steps: + :steps: 1. Register a RHEL host to Satellite. 2. Import all roles available by default. 3. Navigate to the new UI for the given Host. @@ -324,7 +388,6 @@ def test_positive_assign_role_in_new_ui(self): 6. Using the popup, assign a role to the Host. :expectedresults: The Role is successfully assigned to the Host, and shows up on the UI - """ @@ -335,9 +398,7 @@ def test_positive_remove_role_in_new_ui(self): :id: d6de5130-45f6-4349-b490-fbde2aed082c - :CaseLevel: System - - :Steps: + :steps: 1. Register a RHEL host to Satellite. 2. Import all roles available by default. 3. Assign a role to the host. @@ -348,5 +409,4 @@ def test_positive_remove_role_in_new_ui(self): :expectedresults: The Role is successfully removed from the Host, and no longer shows up on the UI - """ diff --git a/tests/foreman/ui/test_architecture.py b/tests/foreman/ui/test_architecture.py index ac7067449ba..045d7f06cc6 100644 --- a/tests/foreman/ui/test_architecture.py +++ b/tests/foreman/ui/test_architecture.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: Low -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest @pytest.mark.tier2 @@ -30,8 +25,6 @@ def test_positive_end_to_end(session): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_audit.py b/tests/foreman/ui/test_audit.py index c9262320ab5..5f6b87780d5 100644 --- a/tests/foreman/ui/test_audit.py +++ b/tests/foreman/ui/test_audit.py @@ -1,24 +1,18 @@ """Test class for Audit UI -:Requirement: Audit +:Requirement: AuditLog :CaseAutomation: Automated -:CaseLevel: Medium - :CaseComponent: AuditLog :Team: Endeavour -:TestType: Functional - -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest -from robottelo.constants import ANY_CONTEXT from robottelo.constants import ENVIRONMENT @@ -41,8 +35,6 @@ def test_positive_create_event(session, module_org, module_location): :CaseAutomation: Automated - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1730360 @@ -88,8 +80,6 @@ def test_positive_audit_comment(session, module_org): :CaseAutomation: Automated - :CaseLevel: Component - :CaseImportance: Low """ name = gen_string('alpha') @@ -123,9 +113,9 @@ def test_positive_update_event(session, module_org): :CaseAutomation: Automated - :CaseLevel: Integration - :CaseImportance: Medium + + :bz: 2222890 """ name = gen_string('alpha') new_name = gen_string('alpha') @@ -155,8 +145,6 @@ def test_positive_delete_event(session, module_org): :CaseAutomation: Automated - :CaseLevel: Component - :CaseImportance: Medium """ architecture = entities.Architecture().create() @@ -182,8 +170,6 @@ def test_positive_add_event(session, module_org): :CaseAutomation: Automated - :CaseLevel: Integration - :CaseImportance: Medium """ cv = entities.ContentView(organization=module_org).create() @@ -199,47 +185,3 @@ def test_positive_add_event(session, module_org): assert values['action_summary'][0]['column0'] == 'Added {}/{} to {}'.format( ENVIRONMENT, cv.name, cv.name ) - - -@pytest.mark.skip_if_open("BZ:1701118") -@pytest.mark.skip_if_open("BZ:1701132") -@pytest.mark.tier2 -def test_positive_create_role_filter(session, module_org, target_sat): - """Update a role with new filter and check that corresponding event - appeared in the audit log - - :id: 74679c0d-7ef1-4ab1-8282-9377c6cabb9f - - :customerscenario: true - - :expectedresults: audit log has an entry for a new filter that was - added to the role - - :BZ: 1425977, 1701118, 1701132 - - :CaseAutomation: Automated - - :CaseLevel: Integration - - :CaseImportance: Medium - """ - role = entities.Role(organization=[module_org]).create() - with session: - session.organization.select(org_name=ANY_CONTEXT['org']) - values = session.audit.search(f'type=role and organization={module_org.name}') - assert values['action_type'] == 'create' - assert values['resource_type'] == 'ROLE' - assert values['resource_name'] == role.name - target_sat.api_factory.create_role_permissions( - role, {'Architecture': ['view_architectures', 'edit_architectures']} - ) - values = session.audit.search('type=filter') - assert values['action_type'] == 'added' - assert values['resource_type'] == 'Filter' - assert values['resource_name'] == '{} and {} / {}'.format( - 'view_architectures', 'edit_architectures', role.name - ) - assert len(values['action_summary']) == 1 - assert values['action_summary'][0]['column0'] == 'Added {} and {} to {}'.format( - 'view_architectures', 'edit_architectures', role.name - ) diff --git a/tests/foreman/ui/test_bookmarks.py b/tests/foreman/ui/test_bookmarks.py index f33483a0c42..096f8fdf7cd 100644 --- a/tests/foreman/ui/test_bookmarks.py +++ b/tests/foreman/ui/test_bookmarks.py @@ -4,54 +4,46 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Search :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from airgun.exceptions import NoSuchElementException -from airgun.session import Session +from airgun.exceptions import DisabledWidgetError, NoSuchElementException from fauxfactory import gen_string -from nailgun import entities +import pytest from robottelo.config import user_nailgun_config -from robottelo.constants import BOOKMARK_ENTITIES -from robottelo.utils.issue_handlers import is_open +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION @pytest.fixture( - scope='module', params=BOOKMARK_ENTITIES, ids=(i['name'] for i in BOOKMARK_ENTITIES) + scope='module', + params=BOOKMARK_ENTITIES_SELECTION, + ids=(i['name'] for i in BOOKMARK_ENTITIES_SELECTION), ) def ui_entity(module_org, module_location, request): """Collects the list of all applicable UI entities for testing and does all required preconditions. """ entity = request.param + entity_name, entity_setup = entity['name'], entity.get('setup') + # Skip the entities, which can't be tested ATM (not implemented in + # airgun) + skip = entity.get('skip_for_ui') + if skip: + pytest.skip(f'{entity_name} not implemented in airgun') # Some pages require at least 1 existing entity for search bar to # appear. Creating 1 entity for such pages - entity_name, entity_setup = entity['name'], entity.get('setup') if entity_setup: - # Skip the entities, which can't be tested ATM (not implemented in - # airgun or have open BZs) - skip = entity.get('skip_for_ui') - if isinstance(skip, (tuple, list)): - open_issues = {issue for issue in skip if is_open(issue)} - pytest.skip(f'There is/are an open issue(s) {open_issues} with entity {entity_name}') # entities with 1 organization and location if entity_name in ('Host',): entity_setup(organization=module_org, location=module_location).create() # entities with no organizations and locations elif entity_name in ( 'ComputeProfile', - 'GlobalParameter', 'HardwareModel', 'UserGroup', ): @@ -71,8 +63,6 @@ def test_positive_end_to_end(session, ui_entity): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') @@ -98,14 +88,16 @@ def test_positive_end_to_end(session, ui_entity): @pytest.mark.tier2 -def test_positive_create_bookmark_public(session, ui_entity, default_viewer_role, test_name): +def test_positive_create_bookmark_public( + session, ui_entity, default_viewer_role, test_name, module_target_sat +): """Create and check visibility of the (non)public bookmarks :id: 93139529-7690-429b-83fe-3dcbac4f91dc :Setup: Create a non-admin user with 'viewer' role - :Steps: + :steps: 1. Navigate to the entity page 2. Choose "bookmark this search" from the search drop-down menu @@ -120,19 +112,19 @@ def test_positive_create_bookmark_public(session, ui_entity, default_viewer_role :expectedresults: No errors, public bookmarks is displayed for all users, non-public bookmark is displayed for creator but not for different user - - :CaseLevel: Integration """ public_name = gen_string('alphanumeric') nonpublic_name = gen_string('alphanumeric') with session: - ui_lib = getattr(session, ui_entity['name'].lower()) + ui_lib = getattr(session, ui_entity['session_name']) for name in (public_name, nonpublic_name): ui_lib.create_bookmark( {'name': name, 'query': gen_string('alphanumeric'), 'public': name == public_name} ) assert any(d['Name'] == name for d in session.bookmark.search(name)) - with Session(test_name, default_viewer_role.login, default_viewer_role.password) as session: + with module_target_sat.ui_session( + test_name, default_viewer_role.login, default_viewer_role.password + ) as session: assert any(d['Name'] == public_name for d in session.bookmark.search(public_name)) assert not session.bookmark.search(nonpublic_name) @@ -151,7 +143,7 @@ def test_positive_update_bookmark_public( public and one private 2. Create a non-admin user with 'viewer' role - :Steps: + :steps: 1. Login to Satellite server (establish a UI session) as the pre-created user @@ -177,8 +169,6 @@ def test_positive_update_bookmark_public( :expectedresults: New public bookmark is listed, and the private one is hidden - :CaseLevel: Integration - :BZ: 2141187 :customerscenario: true @@ -193,7 +183,7 @@ def test_positive_update_bookmark_public( controller=ui_entity['controller'], public=name == public_name, ).create() - with Session( + with target_sat.ui_session( test_name, default_viewer_role.login, default_viewer_role.password ) as non_admin_session: assert any(d['Name'] == public_name for d in non_admin_session.bookmark.search(public_name)) @@ -201,7 +191,7 @@ def test_positive_update_bookmark_public( with session: session.bookmark.update(public_name, {'public': False}) session.bookmark.update(nonpublic_name, {'public': True}) - with Session( + with target_sat.ui_session( test_name, default_viewer_role.login, default_viewer_role.password ) as non_admin_session: assert any( @@ -211,7 +201,7 @@ def test_positive_update_bookmark_public( @pytest.mark.tier2 -def test_negative_delete_bookmark(ui_entity, default_viewer_role, test_name): +def test_negative_delete_bookmark(ui_entity, default_viewer_role, test_name, module_target_sat): """Simple removal of a bookmark query without permissions :id: 1a94bf2b-bcc6-4663-b70d-e13244a0783b @@ -222,18 +212,18 @@ def test_negative_delete_bookmark(ui_entity, default_viewer_role, test_name): 2. Create a non-admin user without destroy_bookmark role (e.g. viewer) - :Steps: + :steps: 1. Login to Satellite server (establish a UI session) as a non-admin user 2. List the bookmarks (Navigate to Administer -> Bookmarks) :expectedresults: The delete buttons are not displayed - - :CaseLevel: Integration """ - bookmark = entities.Bookmark(controller=ui_entity['controller'], public=True).create() - with Session( + bookmark = module_target_sat.api.Bookmark( + controller=ui_entity['controller'], public=True + ).create() + with module_target_sat.ui_session( test_name, default_viewer_role.login, default_viewer_role.password ) as non_admin_session: assert non_admin_session.bookmark.search(bookmark.name)[0]['Name'] == bookmark.name @@ -243,7 +233,7 @@ def test_negative_delete_bookmark(ui_entity, default_viewer_role, test_name): @pytest.mark.tier2 -def test_negative_create_with_duplicate_name(session, ui_entity): +def test_negative_create_with_duplicate_name(session, ui_entity, module_target_sat): """Create bookmark with duplicate name :id: 18168c9c-bdd1-4839-a506-cf9b06c4ab44 @@ -252,24 +242,30 @@ def test_negative_create_with_duplicate_name(session, ui_entity): 1. Create a bookmark of a random name with random query. - :Steps: + :steps: 1. Create new bookmark with duplicate name. :expectedresults: Bookmark can't be created, submit button is disabled :BZ: 1920566, 1992652 - - :CaseLevel: Integration """ query = gen_string('alphanumeric') - bookmark = entities.Bookmark(controller=ui_entity['controller'], public=True).create() + bookmark = module_target_sat.api.Bookmark( + controller=ui_entity['controller'], public=True + ).create() with session: existing_bookmark = session.bookmark.search(bookmark.name)[0] assert existing_bookmark['Name'] == bookmark.name ui_lib = getattr(session, ui_entity['name'].lower()) - # this fails but does not raise UI error, BZ#1992652 closed wontfix - ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) + # this fails but does not raise UI error in old style dialog, BZ#1992652 closed + # wontfix, but new style dialog raises error, both situations occur + old_ui = ui_entity.get('old_ui') + if old_ui: + ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) + else: + with pytest.raises((DisabledWidgetError, NoSuchElementException)): + ui_lib.create_bookmark({'name': bookmark.name, 'query': query, 'public': True}) # assert there are no duplicate bookmarks new_search = session.bookmark.search(bookmark.name) assert len(new_search) == 1 diff --git a/tests/foreman/ui/test_branding.py b/tests/foreman/ui/test_branding.py index e6d481ca3f3..8fe30b1c2ef 100644 --- a/tests/foreman/ui/test_branding.py +++ b/tests/foreman/ui/test_branding.py @@ -4,20 +4,14 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Branding :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from airgun.session import Session @pytest.mark.e2e @@ -27,18 +21,16 @@ def test_verify_satellite_login_screen_info(target_sat): :id: f48110ad-29b4-49b1-972a-a70075a05732 - :Steps: Get the info from the login screen + :steps: Get the info from the login screen :expectedresults: 1. Correct Satellite version 2. If 'Beta' is present, should fail until a GA snap is received - :CaseLevel: System - :BZ: 1315849, 1367495, 1372436, 1502098, 1540710, 1582476, 1724738, 1959135, 2076979, 1687250, 1686540, 1742872, 1805642, 2105949 """ - with Session(login=False) as session: + with target_sat.ui_session(login=False) as session: version = session.login.read_sat_version() assert f'Version {target_sat.version}' == version['login_text'] assert 'Beta' not in version['login_text'], '"Beta" should not be there' diff --git a/tests/foreman/ui/test_computeprofiles.py b/tests/foreman/ui/test_computeprofiles.py index efa011af4a8..31075e7c1fb 100644 --- a/tests/foreman/ui/test_computeprofiles.py +++ b/tests/foreman/ui/test_computeprofiles.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest @pytest.mark.tier2 @@ -30,8 +25,6 @@ def test_positive_end_to_end(session, module_location, module_org): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_computeresource.py b/tests/foreman/ui/test_computeresource.py index 151143e2edc..7bd9f2552ce 100644 --- a/tests/foreman/ui/test_computeresource.py +++ b/tests/foreman/ui/test_computeresource.py @@ -4,30 +4,21 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: ComputeResources :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from nailgun import entities +import pytest from wait_for import wait_for -from robottelo.config import setting_is_set -from robottelo.config import settings -from robottelo.constants import COMPUTE_PROFILE_LARGE -from robottelo.constants import DEFAULT_LOC -from robottelo.constants import FOREMAN_PROVIDERS +from robottelo.config import setting_is_set, settings +from robottelo.constants import COMPUTE_PROFILE_LARGE, DEFAULT_LOC, FOREMAN_PROVIDERS from robottelo.utils.datafactory import gen_string - # TODO mark this on the module with a lambda for skip condition # so that this is executed during the session at run loop, instead of at module import if not setting_is_set('rhev'): @@ -66,8 +57,6 @@ def test_positive_end_to_end(session, rhev_data, module_org, module_location): :expectedresults: All expected CRUD actions finished successfully. - :CaseLevel: Integration - :CaseImportance: Critical """ name = gen_string('alpha') @@ -109,8 +98,6 @@ def test_positive_add_resource(session, rhev_data): :expectedresults: resource created successfully - :CaseLevel: Integration - :CaseImportance: Critical """ # Our RHEV testing uses custom cert which we specify manually. @@ -142,8 +129,6 @@ def test_positive_edit_resource_description(session, rhev_data): :parametrized: yes :expectedresults: resource updated successfully and has new description - - :CaseLevel: Integration """ name = gen_string('alpha') description = gen_string('alpha') @@ -177,8 +162,6 @@ def test_positive_list_resource_vms(session, rhev_data): :parametrized: yes :expectedresults: VMs listed for provided compute resource - - :CaseLevel: Integration """ name = gen_string('alpha') with session: @@ -209,8 +192,6 @@ def test_positive_resource_vm_power_management(session, rhev_data): :expectedresults: virtual machine is powered on or powered off depending on its initial state - - :CaseLevel: Integration """ name = gen_string('alpha') with session: @@ -253,8 +234,6 @@ def test_positive_VM_import(session, module_org, module_location, rhev_data): :expectedresults: VM is shown as Host in Foreman - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1636067 @@ -385,8 +364,6 @@ def test_positive_image_end_to_end(session, rhev_data, module_location, target_s :expectedresults: All expected CRUD actions finished successfully. - :CaseLevel: Integration - :CaseImportance: High """ cr_name = gen_string('alpha') diff --git a/tests/foreman/ui/test_computeresource_azurerm.py b/tests/foreman/ui/test_computeresource_azurerm.py index ed3c6770b57..36228513ffa 100644 --- a/tests/foreman/ui/test_computeresource_azurerm.py +++ b/tests/foreman/ui/test_computeresource_azurerm.py @@ -4,26 +4,23 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ComputeResources-Azure :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings -from robottelo.constants import AZURERM_FILE_URI -from robottelo.constants import AZURERM_PLATFORM_DEFAULT -from robottelo.constants import AZURERM_VM_SIZE_DEFAULT -from robottelo.constants import COMPUTE_PROFILE_SMALL +from robottelo.constants import ( + AZURERM_FILE_URI, + AZURERM_PLATFORM_DEFAULT, + AZURERM_VM_SIZE_DEFAULT, + COMPUTE_PROFILE_SMALL, +) pytestmark = [pytest.mark.skip_if_not_set('azurerm')] @@ -89,7 +86,6 @@ def test_positive_end_to_end_azurerm_ft_host_provision( sat_azure_loc, module_azure_hg, ): - """Provision Host with hostgroup and Compute-profile using finish template on AzureRm compute resource @@ -99,8 +95,6 @@ def test_positive_end_to_end_azurerm_ft_host_provision( 1. Host is provisioned. 2. Host is deleted Successfully. - :CaseLevel: System - :BZ: 1850934 """ hostname = f'test-{gen_string("alpha")}' @@ -173,7 +167,6 @@ def test_positive_azurerm_host_provision_ud( sat_azure_loc, module_azure_hg, ): - """Provision a Host with hostgroup and Compute-profile using cloud-init image on AzureRm compute resource @@ -183,8 +176,6 @@ def test_positive_azurerm_host_provision_ud( :CaseImportance: Critical - :CaseLevel: System - :BZ: 1850934 """ diff --git a/tests/foreman/ui/test_computeresource_ec2.py b/tests/foreman/ui/test_computeresource_ec2.py index a232e65125c..8f575bd4a80 100644 --- a/tests/foreman/ui/test_computeresource_ec2.py +++ b/tests/foreman/ui/test_computeresource_ec2.py @@ -4,27 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-EC2 :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import AWS_EC2_FLAVOR_T2_MICRO -from robottelo.constants import COMPUTE_PROFILE_LARGE -from robottelo.constants import EC2_REGION_CA_CENTRAL_1 -from robottelo.constants import FOREMAN_PROVIDERS +from robottelo.constants import ( + AWS_EC2_FLAVOR_T2_MICRO, + COMPUTE_PROFILE_LARGE, + EC2_REGION_CA_CENTRAL_1, + FOREMAN_PROVIDERS, +) pytestmark = [pytest.mark.skip_if_not_set('ec2')] @@ -53,7 +50,7 @@ def test_positive_default_end_to_end_with_custom_profile( :id: 33f80a8f-2ecf-4f15-b0c3-aab5fe0ac8d3 - :Steps: + :steps: 1. Create an EC2 compute resource with default properties and taxonomies. 2. Update the compute resource name and add new taxonomies. @@ -63,8 +60,6 @@ def test_positive_default_end_to_end_with_custom_profile( :expectedresults: The EC2 compute resource is created, updated, compute profile associated and deleted. - :CaseLevel: Integration - :BZ: 1451626, 2032530 :CaseImportance: High @@ -161,8 +156,6 @@ def test_positive_create_ec2_with_custom_region(session, module_ec2_settings): :BZ: 1456942 - :CaseLevel: Integration - :CaseImportance: Critical """ cr_name = gen_string('alpha') diff --git a/tests/foreman/ui/test_computeresource_gce.py b/tests/foreman/ui/test_computeresource_gce.py index 9d4ae1e1d99..a25edb1da44 100644 --- a/tests/foreman/ui/test_computeresource_gce.py +++ b/tests/foreman/ui/test_computeresource_gce.py @@ -4,31 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-GCE :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import json import random -import pytest from fauxfactory import gen_string +import pytest from wait_for import wait_for from robottelo.config import settings -from robottelo.constants import COMPUTE_PROFILE_SMALL -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import GCE_EXTERNAL_IP_DEFAULT -from robottelo.constants import GCE_MACHINE_TYPE_DEFAULT -from robottelo.constants import GCE_NETWORK_DEFAULT +from robottelo.constants import ( + COMPUTE_PROFILE_SMALL, + FOREMAN_PROVIDERS, + GCE_EXTERNAL_IP_DEFAULT, + GCE_MACHINE_TYPE_DEFAULT, + GCE_NETWORK_DEFAULT, +) @pytest.mark.tier2 @@ -41,7 +38,7 @@ def test_positive_default_end_to_end_with_custom_profile( :id: 59ffd83e-a984-4c22-b91b-cad055b4fbd7 - :Steps: + :steps: 1. Create an GCE compute resource with default properties. 2. Update the compute resource name and add new taxonomies. @@ -51,8 +48,6 @@ def test_positive_default_end_to_end_with_custom_profile( :expectedresults: The GCE compute resource is created, updated, compute profile associated and deleted. - :CaseLevel: Integration - :CaseImportance: Critical """ cr_name = gen_string('alpha') @@ -165,11 +160,18 @@ def test_positive_gce_provision_end_to_end( :id: 8d1877bb-fbc2-4969-a13e-e95e4df4f4cd :expectedresults: Host is provisioned successfully - - :CaseLevel: System """ + name = f'test{gen_string("alpha", 4).lower()}' hostname = f'{name}.{gce_domain.name}' + + @request.addfinalizer + def _finalize(): + gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) + if gcehost: + gcehost[0].delete() + googleclient.disconnect() + gceapi_vmname = hostname.replace('.', '-') root_pwd = gen_string('alpha', 15) storage = [{'size': 20}] @@ -221,13 +223,6 @@ def test_positive_gce_provision_end_to_end( # 2.2 GCE Backend Assertions assert gceapi_vm.is_stopping or gceapi_vm.is_stopped - @request.addfinalizer - def _finalize(): - gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) - if gcehost: - gcehost[0].delete() - googleclient.disconnect() - @pytest.mark.tier4 @pytest.mark.upgrade @@ -251,11 +246,17 @@ def test_positive_gce_cloudinit_provision_end_to_end( :id: 6ee63ec6-2e8e-4ed6-ae48-e68b078233c6 :expectedresults: Host is provisioned successfully - - :CaseLevel: System """ name = f'test{gen_string("alpha", 4).lower()}' hostname = f'{name}.{gce_domain.name}' + + @request.addfinalizer + def _finalize(): + gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) + if gcehost: + gcehost[0].delete() + googleclient.disconnect() + gceapi_vmname = hostname.replace('.', '-') storage = [{'size': 20}] root_pwd = gen_string('alpha', random.choice([8, 15])) @@ -299,10 +300,3 @@ def test_positive_gce_cloudinit_provision_end_to_end( assert not sat_gce.api.Host().search(query={'search': f'name="{hostname}"'}) # 2.2 GCE Backend Assertions assert gceapi_vm.is_stopping or gceapi_vm.is_stopped - - @request.addfinalizer - def _finalize(): - gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) - if gcehost: - gcehost[0].delete() - googleclient.disconnect() diff --git a/tests/foreman/ui/test_computeresource_libvirt.py b/tests/foreman/ui/test_computeresource_libvirt.py index 73eb14507e8..42477b8d46a 100644 --- a/tests/foreman/ui/test_computeresource_libvirt.py +++ b/tests/foreman/ui/test_computeresource_libvirt.py @@ -4,47 +4,41 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ComputeResources-libvirt :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from random import choice -import pytest from fauxfactory import gen_string -from nailgun import entities +import pytest +from wait_for import wait_for from robottelo.config import settings -from robottelo.constants import COMPUTE_PROFILE_SMALL -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import LIBVIRT_RESOURCE_URL +from robottelo.constants import ( + COMPUTE_PROFILE_SMALL, + FOREMAN_PROVIDERS, + LIBVIRT_RESOURCE_URL, +) +from robottelo.utils.issue_handlers import is_open pytestmark = [pytest.mark.skip_if_not_set('libvirt')] - -@pytest.fixture(scope='module') -def module_libvirt_url(): - return LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname +LIBVIRT_URL = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname @pytest.mark.tier2 -def test_positive_end_to_end(session, module_org, module_location, module_libvirt_url): +@pytest.mark.e2e +def test_positive_end_to_end(session, module_target_sat, module_org, module_location): """Perform end to end testing for compute resource Libvirt component. :id: 7ef925ac-5aec-4e9d-b786-328a9b219c01 :expectedresults: All expected CRUD actions finished successfully. - :CaseLevel: Integration - :CaseImportance: High :BZ: 1662164 @@ -53,17 +47,18 @@ def test_positive_end_to_end(session, module_org, module_location, module_libvir cr_description = gen_string('alpha') new_cr_name = gen_string('alpha') new_cr_description = gen_string('alpha') - new_org = entities.Organization().create() - new_loc = entities.Location().create() + new_org = module_target_sat.api.Organization().create() + new_loc = module_target_sat.api.Location().create() display_type = choice(('VNC', 'SPICE')) console_passwords = choice((True, False)) + module_target_sat.configure_libvirt_cr() with session: session.computeresource.create( { 'name': cr_name, 'description': cr_description, 'provider': FOREMAN_PROVIDERS['libvirt'], - 'provider_content.url': module_libvirt_url, + 'provider_content.url': LIBVIRT_URL, 'provider_content.display_type': display_type, 'provider_content.console_passwords': console_passwords, 'organizations.resources.assigned': [module_org.name], @@ -73,7 +68,7 @@ def test_positive_end_to_end(session, module_org, module_location, module_libvir cr_values = session.computeresource.read(cr_name) assert cr_values['name'] == cr_name assert cr_values['description'] == cr_description - assert cr_values['provider_content']['url'] == module_libvirt_url + assert cr_values['provider_content']['url'] == LIBVIRT_URL assert cr_values['provider_content']['display_type'] == display_type assert cr_values['provider_content']['console_passwords'] == console_passwords assert cr_values['organizations']['resources']['assigned'] == [module_org.name] @@ -102,7 +97,7 @@ def test_positive_end_to_end(session, module_org, module_location, module_libvir # check that the compute resource is listed in one of the default compute profiles profile_cr_values = session.computeprofile.list_resources(COMPUTE_PROFILE_SMALL) profile_cr_names = [cr['Compute Resource'] for cr in profile_cr_values] - assert '{} ({})'.format(new_cr_name, FOREMAN_PROVIDERS['libvirt']) in profile_cr_names + assert f'{new_cr_name} ({FOREMAN_PROVIDERS["libvirt"]})' in profile_cr_names session.computeresource.update_computeprofile( new_cr_name, COMPUTE_PROFILE_SMALL, @@ -112,10 +107,91 @@ def test_positive_end_to_end(session, module_org, module_location, module_libvir new_cr_name, COMPUTE_PROFILE_SMALL ) assert cr_profile_values['compute_profile'] == COMPUTE_PROFILE_SMALL - assert cr_profile_values['compute_resource'] == '{} ({})'.format( - new_cr_name, FOREMAN_PROVIDERS['libvirt'] + assert ( + cr_profile_values['compute_resource'] + == f'{new_cr_name} ({FOREMAN_PROVIDERS["libvirt"]})' ) assert cr_profile_values['provider_content']['cpus'] == '16' assert cr_profile_values['provider_content']['memory'] == '8192 MB' session.computeresource.delete(new_cr_name) assert not session.computeresource.search(new_cr_name) + + +@pytest.mark.on_premises_provisioning +@pytest.mark.tier4 +@pytest.mark.rhel_ver_match('[^6]') +@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete=True'], indirect=True) +def test_positive_provision_end_to_end( + request, + session, + setting_update, + module_sca_manifest_org, + module_location, + provisioning_hostgroup, + module_libvirt_provisioning_sat, + module_provisioning_rhel_content, +): + """Provision Host on libvirt compute resource, and delete it afterwards + + :id: 2678f95f-0c0e-4b46-a3c1-3f9a954d3bde + + :expectedresults: Host is provisioned successfully + + :customerscenario: true + + :BZ: 1243223, 2236693 + + :parametrized: yes + """ + sat = module_libvirt_provisioning_sat.sat + hostname = gen_string('alpha').lower() + os_major_ver = module_provisioning_rhel_content.os.major + cpu_mode = 'host-passthrough' if is_open('BZ:2236693') and os_major_ver == '9' else 'default' + cr = sat.api.LibvirtComputeResource( + provider=FOREMAN_PROVIDERS['libvirt'], + url=LIBVIRT_URL, + display_type='VNC', + location=[module_location], + organization=[module_sca_manifest_org], + ).create() + with session: + session.host.create( + { + 'host.name': hostname, + 'host.organization': module_sca_manifest_org.name, + 'host.location': module_location.name, + 'host.hostgroup': provisioning_hostgroup.name, + 'host.inherit_deploy_option': False, + 'host.deploy': f'{cr.name} (Libvirt)', + 'provider_content.virtual_machine.memory': '6144', + 'provider_content.virtual_machine.cpu_mode': cpu_mode, + 'interfaces.interface.network_type': 'Physical (Bridge)', + 'interfaces.interface.network': f'br-{settings.provisioning.vlan_id}', + 'additional_information.comment': 'Libvirt provision using valid data', + } + ) + name = f'{hostname}.{module_libvirt_provisioning_sat.domain.name}' + request.addfinalizer(lambda: sat.provisioning_cleanup(name)) + assert session.host.search(name)[0]['Name'] == name + + # Check on Libvirt, if VM exists + result = sat.execute( + f'su foreman -s /bin/bash -c "virsh -c {LIBVIRT_URL} list --state-running"' + ) + assert hostname in result.stdout + # Wait for provisioning to complete and report status back to Satellite + wait_for( + lambda: session.host.get_details(name)['properties']['properties_table']['Build'] + != 'Pending installation clear', + timeout=1800, + delay=30, + fail_func=session.browser.refresh, + silent_failure=True, + handle_exception=True, + ) + assert ( + session.host.get_details(name)['properties']['properties_table']['Build'] + == 'Installed clear' + ) + session.host.delete(name) + assert not sat.api.Host().search(query={'search': f'name="{name}"'}) diff --git a/tests/foreman/ui/test_computeresource_vmware.py b/tests/foreman/ui/test_computeresource_vmware.py index dd323bd262c..9ef57de8e6c 100644 --- a/tests/foreman/ui/test_computeresource_vmware.py +++ b/tests/foreman/ui/test_computeresource_vmware.py @@ -8,30 +8,26 @@ :Team: Rocket -:CaseLevel: Acceptance - -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from math import floor -from math import log10 +from math import floor, log10 from random import choice -import pytest from nailgun import entities -from wait_for import TimedOutError -from wait_for import wait_for -from wrapanapi.systems.virtualcenter import vim -from wrapanapi.systems.virtualcenter import VMWareSystem +import pytest +from wait_for import TimedOutError, wait_for +from wrapanapi.systems.virtualcenter import VMWareSystem, vim from robottelo.config import settings -from robottelo.constants import COMPUTE_PROFILE_LARGE -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import VMWARE_CONSTANTS +from robottelo.constants import ( + COMPUTE_PROFILE_LARGE, + DEFAULT_LOC, + FOREMAN_PROVIDERS, + VMWARE_CONSTANTS, +) from robottelo.utils.datafactory import gen_string +from robottelo.utils.issue_handlers import is_open pytestmark = [pytest.mark.skip_if_not_set('vmware')] @@ -61,7 +57,7 @@ def _get_normalized_size(size): return f'{size} {suffixes[suffix_index]}' -def _get_vmware_datastore_summary_string(data_store_name=VMWARE_CONSTANTS['datastore']): +def _get_vmware_datastore_summary_string(data_store_name=settings.vmware.datastore, vmware=None): """Return the datastore string summary for data_store_name For "Local-Ironforge" datastore the string looks Like: @@ -69,7 +65,7 @@ def _get_vmware_datastore_summary_string(data_store_name=VMWARE_CONSTANTS['datas "Local-Ironforge (free: 1.66 TB, prov: 2.29 TB, total: 2.72 TB)" """ system = VMWareSystem( - hostname=settings.vmware.vcenter, + hostname=vmware.hostname, username=settings.vmware.username, password=settings.vmware.password, ) @@ -85,39 +81,14 @@ def _get_vmware_datastore_summary_string(data_store_name=VMWARE_CONSTANTS['datas return f'{data_store_name} (free: {free_space}, prov: {prov}, total: {capacity})' -@pytest.fixture(scope='module') -def module_org(): - return entities.Organization().create() - - -@pytest.fixture(scope='module') -def module_vmware_settings(): - ret = dict( - vcenter=settings.vmware.vcenter, - user=settings.vmware.username, - password=settings.vmware.password, - datacenter=settings.vmware.datacenter, - image_name=settings.vmware.image_name, - image_arch=settings.vmware.image_arch, - image_os=settings.vmware.image_os, - image_username=settings.vmware.image_username, - image_password=settings.vmware.image_password, - vm_name=settings.vmware.vm_name, - ) - if 'INTERFACE' in settings.vmware: - ret['interface'] = VMWARE_CONSTANTS['network_interfaces'] % settings.vmware.interface - return ret - - @pytest.mark.tier1 -def test_positive_end_to_end(session, module_org, module_location, module_vmware_settings): +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_end_to_end(session, module_org, module_location, vmware): """Perform end to end testing for compute resource VMware component. :id: 47fc9e77-5b22-46b4-a76c-3217434fde2f :expectedresults: All expected CRUD actions finished successfully. - - :CaseLevel: Integration """ cr_name = gen_string('alpha') new_cr_name = gen_string('alpha') @@ -133,10 +104,10 @@ def test_positive_end_to_end(session, module_org, module_location, module_vmware 'name': cr_name, 'description': description, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, 'provider_content.display_type': display_type, 'provider_content.vnc_console_passwords': vnc_console_passwords, 'provider_content.enable_caching': enable_caching, @@ -148,11 +119,8 @@ def test_positive_end_to_end(session, module_org, module_location, module_vmware assert cr_values['name'] == cr_name assert cr_values['description'] == description assert cr_values['provider'] == FOREMAN_PROVIDERS['vmware'] - assert cr_values['provider_content']['user'] == module_vmware_settings['user'] - assert ( - cr_values['provider_content']['datacenter']['value'] - == module_vmware_settings['datacenter'] - ) + assert cr_values['provider_content']['user'] == settings.vmware.username + assert cr_values['provider_content']['datacenter']['value'] == settings.vmware.datacenter assert cr_values['provider_content']['display_type'] == display_type assert cr_values['provider_content']['vnc_console_passwords'] == vnc_console_passwords assert cr_values['provider_content']['enable_caching'] == enable_caching @@ -186,7 +154,8 @@ def test_positive_end_to_end(session, module_org, module_location, module_vmware @pytest.mark.tier2 -def test_positive_retrieve_virtual_machine_list(session, module_vmware_settings): +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_retrieve_virtual_machine_list(session, vmware): """List the virtual machine list from vmware compute resource :id: 21ade57a-0caa-4144-9c46-c8e22f33414e @@ -199,20 +168,18 @@ def test_positive_retrieve_virtual_machine_list(session, module_vmware_settings) 2. Go to "Virtual Machines" tab. :expectedresults: The Virtual machines should be displayed - - :CaseLevel: Integration """ cr_name = gen_string('alpha') - vm_name = module_vmware_settings['vm_name'] + vm_name = settings.vmware.vm_name with session: session.computeresource.create( { 'name': cr_name, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, } ) assert session.computeresource.search(cr_name)[0]['Name'] == cr_name @@ -221,30 +188,30 @@ def test_positive_retrieve_virtual_machine_list(session, module_vmware_settings) ) +@pytest.mark.e2e @pytest.mark.tier2 -def test_positive_image_end_to_end(session, module_vmware_settings, target_sat): +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_image_end_to_end(session, target_sat, vmware): """Perform end to end testing for compute resource VMware component image. :id: 6b7949ef-c684-40aa-b181-11f8d4cd39c6 :expectedresults: All expected CRUD actions finished successfully. - - :CaseLevel: Integration """ cr_name = gen_string('alpha') image_name = gen_string('alpha') new_image_name = gen_string('alpha') - target_sat.api_factory.check_create_os_with_title(module_vmware_settings['image_os']) + os = target_sat.api_factory.check_create_os_with_title(settings.vmware.image_os) image_user_data = choice((False, True)) with session: session.computeresource.create( { 'name': cr_name, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, } ) assert session.computeresource.search(cr_name)[0]['Name'] == cr_name @@ -252,21 +219,21 @@ def test_positive_image_end_to_end(session, module_vmware_settings, target_sat): cr_name, dict( name=image_name, - operating_system=module_vmware_settings['image_os'], - architecture=module_vmware_settings['image_arch'], - username=module_vmware_settings['image_username'], + operating_system=os.title, + architecture=settings.vmware.image_arch, + username=settings.vmware.image_username, user_data=image_user_data, - password=module_vmware_settings['image_password'], - image=module_vmware_settings['image_name'], + password=settings.vmware.image_password, + image=settings.vmware.image_name, ), ) values = session.computeresource.read_image(cr_name, image_name) assert values['name'] == image_name - assert values['operating_system'] == module_vmware_settings['image_os'] - assert values['architecture'] == module_vmware_settings['image_arch'] - assert values['username'] == module_vmware_settings['image_username'] + assert values['operating_system'] == os.title + assert values['architecture'] == settings.vmware.image_arch + assert values['username'] == settings.vmware.image_username assert values['user_data'] == image_user_data - assert values['image'] == module_vmware_settings['image_name'] + assert values['image'] == settings.vmware.image_name session.computeresource.update_image(cr_name, image_name, dict(name=new_image_name)) assert session.computeresource.search_images(cr_name, image_name)[0]['Name'] != image_name assert ( @@ -282,27 +249,26 @@ def test_positive_image_end_to_end(session, module_vmware_settings, target_sat): @pytest.mark.tier2 @pytest.mark.run_in_one_thread -def test_positive_resource_vm_power_management(session, module_vmware_settings): +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_resource_vm_power_management(session, vmware): """Read current VMware Compute Resource virtual machine power status and change it to opposite one :id: faeabe45-5112-43a6-bde9-f869dfb26cf5 :expectedresults: virtual machine is powered on or powered off depending on its initial state - - :CaseLevel: Integration """ cr_name = gen_string('alpha') - vm_name = module_vmware_settings['vm_name'] + vm_name = settings.vmware.vm_name with session: session.computeresource.create( { 'name': cr_name, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, } ) assert session.computeresource.search(cr_name)[0]['Name'] == cr_name @@ -321,183 +287,262 @@ def test_positive_resource_vm_power_management(session, module_vmware_settings): timeout=30, delay=2, ) - except TimedOutError: - raise AssertionError('Timed out waiting for VM to toggle power state') + except TimedOutError as err: + raise AssertionError('Timed out waiting for VM to toggle power state') from err +@pytest.mark.e2e +@pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_select_vmware_custom_profile_guest_os_rhel7(session, module_vmware_settings): - """Select custom default (3-Large) compute profile guest OS RHEL7. +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_vmware_custom_profile_end_to_end(session, vmware, request, target_sat): + """Perform end to end testing for VMware compute profile. :id: 24f7bb5f-2aaf-48cb-9a56-d2d0713dfe3d :customerscenario: true - :setup: vmware hostname and credentials. - :steps: 1. Create a compute resource of type vmware. - 2. Provide valid hostname, username and password. - 3. Select the created vmware CR. - 4. Click Compute Profile tab. - 5. Select 3-Large profile - 6. Set Guest OS field to RHEL7 OS. - - :expectedresults: Guest OS RHEL7 is selected successfully. + 2. Update a compute profile with all values - :BZ: 1315277 + :expectedresults: Compute profiles are updated successfully with all the values. - :CaseLevel: Integration + :BZ: 1315277, 2266672 """ cr_name = gen_string('alpha') - guest_os_name = 'Red Hat Enterprise Linux 7 (64-bit)' + guest_os_names = [ + 'Red Hat Enterprise Linux 7 (64-bit)', + 'Red Hat Enterprise Linux 8 (64 bit)', + 'Red Hat Enterprise Linux 9 (64 bit)', + ] + compute_profile = ['1-Small', '2-Medium', '3-Large'] + cpus = ['2', '4', '6'] + vm_memory = ['4000', '6000', '8000'] + annotation_notes = gen_string('alpha') + firmware_type = ['Automatic', 'BIOS', 'EFI'] + resource_pool = VMWARE_CONSTANTS['pool'] + folder = VMWARE_CONSTANTS['folder'] + virtual_hw_version = VMWARE_CONSTANTS['virtualhw_version'] + memory_hot_add = True + cpu_hot_add = True + cdrom_drive = True + disk_size = '10 GB' + network = 'VLAN 1001' # hardcoding network here as this test won't be doing actual provisioning + data_store_summary_string = _get_vmware_datastore_summary_string(vmware=vmware) + storage_data = { + 'storage': { + 'controller': VMWARE_CONSTANTS['scsicontroller'], + 'disks': [ + { + 'data_store': data_store_summary_string, + 'size': disk_size, + 'thin_provision': True, + } + ], + } + } + network_data = { + 'network_interfaces': { + 'nic_type': VMWARE_CONSTANTS['network_interface_name'], + 'network': network, + } + } with session: session.computeresource.create( { 'name': cr_name, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, } ) - assert session.computeresource.search(cr_name)[0]['Name'] == cr_name - session.computeresource.update_computeprofile( - cr_name, COMPUTE_PROFILE_LARGE, {'provider_content.guest_os': guest_os_name} - ) - values = session.computeresource.read_computeprofile(cr_name, COMPUTE_PROFILE_LARGE) - assert values['provider_content']['guest_os'] == guest_os_name + @request.addfinalizer + def _finalize(): + cr = target_sat.api.VMWareComputeResource().search(query={'search': f'name={cr_name}'}) + if cr: + target_sat.api.VMWareComputeResource(id=cr[0].id).delete() -@pytest.mark.tier2 -def test_positive_access_vmware_with_custom_profile(session, module_vmware_settings): - """Associate custom default (3-Large) compute profile + assert session.computeresource.search(cr_name)[0]['Name'] == cr_name + for guest_os_name, cprofile, cpu, memory, firmware in zip( + guest_os_names, compute_profile, cpus, vm_memory, firmware_type, strict=True + ): + session.computeresource.update_computeprofile( + cr_name, + cprofile, + { + 'provider_content.guest_os': guest_os_name, + 'provider_content.cpus': cpu, + 'provider_content.memory': memory, + 'provider_content.cluster': settings.vmware.cluster, + 'provider_content.annotation_notes': annotation_notes, + 'provider_content.virtual_hw_version': virtual_hw_version, + 'provider_content.firmware': firmware, + 'provider_content.resource_pool': resource_pool, + 'provider_content.folder': folder, + 'provider_content.memory_hot_add': memory_hot_add, + 'provider_content.cpu_hot_add': cpu_hot_add, + 'provider_content.cdrom_drive': cdrom_drive, + 'provider_content.storage': [value for value in storage_data.values()], + 'provider_content.network_interfaces': [ + value for value in network_data.values() + ], + }, + ) + values = session.computeresource.read_computeprofile(cr_name, cprofile) + provider_content = values['provider_content'] + assert provider_content['guest_os'] == guest_os_name + assert provider_content['cpus'] == cpu + assert provider_content['memory'] == memory + assert provider_content['cluster'] == settings.vmware.cluster + assert provider_content['annotation_notes'] == annotation_notes + assert provider_content['virtual_hw_version'] == virtual_hw_version + if not is_open('BZ:2266672'): + assert values['provider_content']['firmware'] == firmware + assert provider_content['resource_pool'] == resource_pool + assert provider_content['folder'] == folder + assert provider_content['memory_hot_add'] == memory_hot_add + assert provider_content['cpu_hot_add'] == cpu_hot_add + assert provider_content['cdrom_drive'] == cdrom_drive + assert ( + provider_content['storage'][0]['controller'] == VMWARE_CONSTANTS['scsicontroller'] + ) + assert provider_content['storage'][0]['disks'][0]['size'] == disk_size + assert ( + provider_content['network_interfaces'][0]['nic_type'] + == VMWARE_CONSTANTS['network_interface_name'] + ) + assert provider_content['network_interfaces'][0]['network'] == network + session.computeresource.delete(cr_name) + assert not session.computeresource.search(cr_name) - :id: 751ef765-5091-4322-a0d9-0c9c73009cc4 - :setup: vmware hostname and credentials. +@pytest.mark.tier2 +@pytest.mark.parametrize('vmware', ['vmware7', 'vmware8'], indirect=True) +def test_positive_virt_card(session, target_sat, module_location, module_org, vmware): + """Check to see that the Virtualization card appears for an imported VM - :steps: + :id: 0502d5a6-64c1-422f-a9ba-ac7c2ee7bad2 - 1. Create a compute resource of type vmware. - 2. Provide valid hostname, username and password. - 3. Select the created vmware CR. - 4. Click Compute Profile tab. - 5. Edit (3-Large) with valid configurations and submit. + :parametrized: no - :expectedresults: The Compute Resource created and associated to compute profile (3-Large) - with provided values. + :expectedresults: Virtualization card appears in the new Host UI for the VM - :CaseLevel: Integration + :CaseImportance: Medium """ - cr_name = gen_string('alpha') - data_store_summary_string = _get_vmware_datastore_summary_string() - cr_profile_data = dict( - cpus='2', - cores_per_socket='2', - memory='1024', - firmware='EFI', - cluster=VMWARE_CONSTANTS.get('cluster'), - resource_pool=VMWARE_CONSTANTS.get('pool'), - folder=VMWARE_CONSTANTS.get('folder'), - guest_os=VMWARE_CONSTANTS.get('guest_os'), - virtual_hw_version=VMWARE_CONSTANTS.get('virtualhw_version'), - memory_hot_add=True, - cpu_hot_add=True, - cdrom_drive=True, - annotation_notes=gen_string('alpha'), - network_interfaces=[] - if 'interface' not in module_vmware_settings - else [ - dict( - nic_type=VMWARE_CONSTANTS.get('network_interface_name'), - network=module_vmware_settings['interface'], - ), - dict( - nic_type=VMWARE_CONSTANTS.get('network_interface_name'), - network=module_vmware_settings['interface'], - ), - ], - storage=[ - dict( - controller=VMWARE_CONSTANTS.get('scsicontroller'), - disks=[ - dict( - data_store=data_store_summary_string, - size='10 GB', - thin_provision=True, - ), - dict( - data_store=data_store_summary_string, - size='20 GB', - thin_provision=False, - eager_zero=False, - ), - ], - ), - dict( - controller=VMWARE_CONSTANTS.get('scsicontroller'), - disks=[ - dict( - data_store=data_store_summary_string, - size='30 GB', - thin_provision=False, - eager_zero=True, - ) - ], - ), - ], + # create entities for hostgroup + default_loc_id = ( + target_sat.api.Location().search(query={'search': f'name="{DEFAULT_LOC}"'})[0].id + ) + target_sat.api.SmartProxy(id=1, location=[default_loc_id, module_location.id]).update() + domain = target_sat.api.Domain( + organization=[module_org.id], location=[module_location] + ).create() + subnet = target_sat.api.Subnet( + organization=[module_org.id], location=[module_location], domain=[domain] + ).create() + architecture = target_sat.api.Architecture().create() + ptable = target_sat.api.PartitionTable( + organization=[module_org.id], location=[module_location] + ).create() + operatingsystem = target_sat.api.OperatingSystem( + architecture=[architecture], ptable=[ptable] + ).create() + medium = target_sat.api.Media( + organization=[module_org.id], location=[module_location], operatingsystem=[operatingsystem] + ).create() + lce = ( + target_sat.api.LifecycleEnvironment(name="Library", organization=module_org.id) + .search()[0] + .read() + .id ) + cv = target_sat.api.ContentView(organization=module_org).create() + cv.publish() + + # create hostgroup + hostgroup_name = gen_string('alpha') + target_sat.api.HostGroup( + name=hostgroup_name, + architecture=architecture, + domain=domain, + subnet=subnet, + location=[module_location.id], + medium=medium, + operatingsystem=operatingsystem, + organization=[module_org], + ptable=ptable, + lifecycle_environment=lce, + content_view=cv, + content_source=1, + ).create() + cr_name = gen_string('alpha') with session: session.computeresource.create( { 'name': cr_name, 'provider': FOREMAN_PROVIDERS['vmware'], - 'provider_content.vcenter': module_vmware_settings['vcenter'], - 'provider_content.user': module_vmware_settings['user'], - 'provider_content.password': module_vmware_settings['password'], - 'provider_content.datacenter.value': module_vmware_settings['datacenter'], + 'provider_content.vcenter': vmware.hostname, + 'provider_content.user': settings.vmware.username, + 'provider_content.password': settings.vmware.password, + 'provider_content.datacenter.value': settings.vmware.datacenter, + 'locations.resources.assigned': [module_location.name], + 'organizations.resources.assigned': [module_org.name], } ) - assert session.computeresource.search(cr_name)[0]['Name'] == cr_name - session.computeresource.update_computeprofile( + session.hostgroup.update(hostgroup_name, {'host_group.deploy': cr_name + " (VMware)"}) + session.computeresource.vm_import( cr_name, - COMPUTE_PROFILE_LARGE, - {f'provider_content.{key}': value for key, value in cr_profile_data.items()}, + settings.vmware.vm_name, + hostgroup_name, + module_location.name, + name=settings.vmware.vm_name, ) - values = session.computeresource.read_computeprofile(cr_name, COMPUTE_PROFILE_LARGE) - provider_content = values['provider_content'] - # assert main compute resource profile data updated successfully. - excluded_keys = ['network_interfaces', 'storage'] - expected_value = { - key: value for key, value in cr_profile_data.items() if key not in excluded_keys - } - provided_value = { - key: value for key, value in provider_content.items() if key in expected_value - } - assert provided_value == expected_value - # assert compute resource profile network data updated successfully. - for network_index, expected_network_value in enumerate( - cr_profile_data['network_interfaces'] - ): - provided_network_value = { - key: value - for key, value in provider_content['network_interfaces'][network_index].items() - if key in expected_network_value - } - assert provided_network_value == expected_network_value - # assert compute resource profile storage data updated successfully. - for controller_index, expected_controller_value in enumerate(cr_profile_data['storage']): - provided_controller_value = provider_content['storage'][controller_index] - assert ( - provided_controller_value['controller'] == expected_controller_value['controller'] - ) - for disk_index, expected_disk_value in enumerate(expected_controller_value['disks']): - provided_disk_value = { - key: value - for key, value in provided_controller_value['disks'][disk_index].items() - if key in expected_disk_value - } - assert provided_disk_value == expected_disk_value + host_name = '.'.join([settings.vmware.vm_name, domain.name]) + power_status = session.computeresource.vm_status(cr_name, settings.vmware.vm_name) + if power_status is False: + session.computeresource.vm_poweron(cr_name, settings.vmware.vm_name) + try: + wait_for( + lambda: ( + session.browser.refresh(), + session.computeresource.vm_status(cr_name, settings.vmware.vm_name), + )[1] + is not power_status, + timeout=30, + delay=2, + ) + except TimedOutError as err: + raise AssertionError('Timed out waiting for VM to toggle power state') from err + + virt_card = session.host_new.get_virtualization(host_name)['details'] + assert virt_card['datacenter'] == settings.vmware.datacenter + assert virt_card['cluster'] == settings.vmware.cluster + assert virt_card['memory'] == '5 GB' + assert 'public_ip_address' in virt_card + assert virt_card['mac_address'] == vmware.mac_address + assert virt_card['cpus'] == '1' + if 'disk_label' in virt_card: + assert virt_card['disk_label'] == 'Hard disk 1' + if 'disk_capacity' in virt_card: + assert virt_card['disk_capacity'] != '' + if 'partition_capacity' in virt_card: + assert virt_card['partition_capacity'] != '' + if 'partition_path' in virt_card: + assert virt_card['partition_path'] == '/boot' + if 'partition_allocation' in virt_card: + assert virt_card['partition_allocation'] != '' + assert virt_card['cores_per_socket'] == '1' + assert virt_card['firmware'] == 'bios' + assert virt_card['hypervisor'] != '' + assert virt_card['connection_state'] == 'connected' + assert virt_card['overall_status'] == 'green' + assert virt_card['annotation_notes'] == '' + assert virt_card['running_on'] == cr_name + target_sat.api.Host( + id=target_sat.api.Host().search(query={'search': f'name={host_name}'})[0].id + ).delete() diff --git a/tests/foreman/ui/test_config_group.py b/tests/foreman/ui/test_config_group.py index db2b6eba171..b38384d2977 100644 --- a/tests/foreman/ui/test_config_group.py +++ b/tests/foreman/ui/test_config_group.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Low -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest @pytest.fixture(scope='module') @@ -34,8 +29,6 @@ def test_positive_end_to_end(session_puppet_enabled_sat, module_puppet_class): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_containerimagetag.py b/tests/foreman/ui/test_containerimagetag.py index 8c5517c8935..b46cc15bc4e 100644 --- a/tests/foreman/ui/test_containerimagetag.py +++ b/tests/foreman/ui/test_containerimagetag.py @@ -4,25 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ContainerManagement-Content :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from nailgun import entities +import pytest -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import ENVIRONMENT -from robottelo.constants import REPO_TYPE +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + DEFAULT_CV, + ENVIRONMENT, + REPO_TYPE, +) +from robottelo.utils.issue_handlers import is_open @pytest.fixture(scope="module") @@ -43,11 +42,10 @@ def module_repository(module_product): product=module_product, url=CONTAINER_REGISTRY_HUB, ).create() - repo.sync() + repo.sync(timeout=1440) return repo -@pytest.mark.skip_if_open("BZ:2009069") @pytest.mark.tier2 def test_positive_search(session, module_org, module_product, module_repository): """Search for a docker image tag and reads details of it @@ -57,14 +55,21 @@ def test_positive_search(session, module_org, module_product, module_repository) :expectedresults: The docker image tag can be searched and found, details are read - :CaseLevel: Integration + :BZ: 2009069, 2242515 """ with session: session.organization.select(org_name=module_org.name) search = session.containerimagetag.search('latest') - assert module_product.name in [i['Product Name'] for i in search] - assert module_repository.name in [i['Repository Name'] for i in search] + if not is_open('BZ:2242515'): + assert module_product.name in [i['Product Name'] for i in search] values = session.containerimagetag.read('latest') - assert module_product.name == values['details']['product'] - assert module_repository.name == values['details']['repository'] + if not is_open('BZ:2242515'): + assert module_product.name == values['details']['product'] assert values['lce']['table'][0]['Environment'] == ENVIRONMENT + repo_line = next( + (item for item in values['repos']['table'] if item['Name'] == module_repository.name), + None, + ) + assert module_product.name == repo_line['Product'] + assert repo_line['Content View'] == DEFAULT_CV + assert 'Success' in repo_line['Last Sync'] diff --git a/tests/foreman/ui/test_contentcredentials.py b/tests/foreman/ui/test_contentcredentials.py index 3a2ce7c9296..4fa3f519986 100644 --- a/tests/foreman/ui/test_contentcredentials.py +++ b/tests/foreman/ui/test_contentcredentials.py @@ -4,37 +4,25 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentCredentials :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities from robottelo.config import settings -from robottelo.constants import CONTENT_CREDENTIALS_TYPES -from robottelo.constants import DataFile +from robottelo.constants import CONTENT_CREDENTIALS_TYPES, DataFile from robottelo.utils.datafactory import gen_string empty_message = "You currently don't have any Products associated with this Content Credential." -@pytest.fixture(scope='module') -def module_org(): - return entities.Organization().create() - - @pytest.fixture(scope='module') def gpg_content(): - return DataFile.VALID_GPG_KEY_FILE.read_bytes() + return DataFile.VALID_GPG_KEY_FILE.read_text() @pytest.fixture(scope='module') @@ -45,14 +33,12 @@ def gpg_path(): @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_end_to_end(session, module_org, gpg_content): +def test_positive_end_to_end(session, target_sat, module_org, gpg_content): """Perform end to end testing for gpg key component :id: d1a8cc1b-a072-465b-887d-5bca0acd21c3 :expectedresults: All expected CRUD actions finished successfully - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') @@ -66,11 +52,11 @@ def test_positive_end_to_end(session, module_org, gpg_content): } ) assert session.contentcredential.search(name)[0]['Name'] == name - gpg_key = entities.ContentCredential(organization=module_org).search( + gpg_key = target_sat.api.ContentCredential(organization=module_org).search( query={'search': f'name="{name}"'} )[0] - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() values = session.contentcredential.read(name) assert values['details']['name'] == name assert values['details']['content_type'] == CONTENT_CREDENTIALS_TYPES['gpg'] @@ -86,13 +72,16 @@ def test_positive_end_to_end(session, module_org, gpg_content): # Update gpg key with new name session.contentcredential.update(name, {'details.name': new_name}) assert session.contentcredential.search(new_name)[0]['Name'] == new_name + # Delete repo and product dependent on the gpg key + repo.delete() + product.delete() # Delete gpg key session.contentcredential.delete(new_name) assert session.contentcredential.search(new_name)[0]['Name'] != new_name @pytest.mark.tier2 -def test_positive_search_scoped(session, gpg_content): +def test_positive_search_scoped(session, target_sat, gpg_content, module_org): """Search for gpgkey by organization id parameter :id: e1e04f68-5d4f-43f6-a9c1-b9f566fcbc92 @@ -104,9 +93,8 @@ def test_positive_search_scoped(session, gpg_content): :BZ: 1259374 """ name = gen_string('alpha') - org = entities.Organization().create() with session: - session.organization.select(org.name) + session.organization.select(module_org.name) session.contentcredential.create( { 'name': name, @@ -114,22 +102,23 @@ def test_positive_search_scoped(session, gpg_content): 'content': gpg_content, } ) - assert session.contentcredential.search(f'organization_id = {org.id}')[0]['Name'] == name + assert ( + session.contentcredential.search(f'organization_id = {module_org.id}')[0]['Name'] + == name + ) @pytest.mark.tier2 -def test_positive_add_empty_product(session, module_org, gpg_content): +def test_positive_add_empty_product(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with empty (no repos) custom product :id: e18ae9f5-43d9-4049-92ca-1eafaca05096 :expectedresults: gpg key is associated with product - - :CaseLevel: Integration """ prod_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() + gpg_key = target_sat.api.GPGKey(content=gpg_content, organization=module_org).create() with session: session.product.create({'name': prod_name, 'gpg_key': gpg_key.name}) values = session.contentcredential.read(gpg_key.name) @@ -140,7 +129,7 @@ def test_positive_add_empty_product(session, module_org, gpg_content): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_product_with_repo(session, module_org, gpg_content): +def test_positive_add_product_with_repo(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with custom product that has one repository @@ -148,15 +137,15 @@ def test_positive_add_product_with_repo(session, module_org, gpg_content): :expectedresults: gpg key is associated with product as well as with the repository - - :CaseLevel: Integration """ name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() # Creates new repository without GPGKey - repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + repo = target_sat.api.Repository(url=settings.repos.yum_1.url, product=product).create() with session: values = session.contentcredential.read(name) assert values['products']['table'][0]['Name'] == empty_message @@ -174,24 +163,24 @@ def test_positive_add_product_with_repo(session, module_org, gpg_content): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_product_with_repos(session, module_org, gpg_content): +def test_positive_add_product_with_repos(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with custom product that has more than one repository :id: 0edffad7-0ab4-4bef-b16b-f6c8de55b0dc :expectedresults: gpg key is properly associated with repositories - - :CaseLevel: Integration """ name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product and associate GPGKey with it - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() # Creates new repository_1 without GPGKey - repo1 = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + repo1 = target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() # Creates new repository_2 without GPGKey - repo2 = entities.Repository(product=product, url=settings.repos.yum_2.url).create() + repo2 = target_sat.api.Repository(product=product, url=settings.repos.yum_2.url).create() with session: values = session.contentcredential.read(name) assert len(values['repositories']['table']) == 2 @@ -201,7 +190,7 @@ def test_positive_add_product_with_repos(session, module_org, gpg_content): @pytest.mark.tier2 -def test_positive_add_repo_from_product_with_repo(session, module_org, gpg_content): +def test_positive_add_repo_from_product_with_repo(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it to repository from custom product that has one repository @@ -209,15 +198,15 @@ def test_positive_add_repo_from_product_with_repo(session, module_org, gpg_conte :expectedresults: gpg key is associated with the repository but not with the product - - :CaseLevel: Integration """ name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() # Creates new repository - repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + repo = target_sat.api.Repository(url=settings.repos.yum_1.url, product=product).create() with session: values = session.contentcredential.read(name) assert values['products']['table'][0]['Name'] == empty_message @@ -232,7 +221,7 @@ def test_positive_add_repo_from_product_with_repo(session, module_org, gpg_conte @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_repo_from_product_with_repos(session, module_org, gpg_content): +def test_positive_add_repo_from_product_with_repos(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it to repository from custom product that has more than one repository @@ -240,19 +229,19 @@ def test_positive_add_repo_from_product_with_repos(session, module_org, gpg_cont :expectedresults: gpg key is associated with one of the repositories but not with the product - - :CaseLevel: Integration """ name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product without selecting GPGkey - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() # Creates new repository with GPGKey - repo1 = entities.Repository( + repo1 = target_sat.api.Repository( url=settings.repos.yum_1.url, product=product, gpg_key=gpg_key ).create() # Creates new repository without GPGKey - entities.Repository(url=settings.repos.yum_2.url, product=product).create() + target_sat.api.Repository(url=settings.repos.yum_2.url, product=product).create() with session: values = session.contentcredential.read(name) assert values['products']['table'][0]['Name'] == empty_message @@ -274,8 +263,6 @@ def test_positive_add_product_using_repo_discovery(session, gpg_path): the repositories :BZ: 1210180, 1461804, 1595792 - - :CaseLevel: Integration """ name = gen_string('alpha') product_name = gen_string('alpha') @@ -308,7 +295,7 @@ def test_positive_add_product_using_repo_discovery(session, gpg_path): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_product_and_search(session, module_org, gpg_content): +def test_positive_add_product_and_search(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with custom product that has one repository After search and select product through gpg key interface @@ -321,15 +308,15 @@ def test_positive_add_product_and_search(session, module_org, gpg_content): gpg key 'Product' tab :BZ: 1411800 - - :CaseLevel: Integration """ name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product and associate GPGKey with it - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() # Creates new repository without GPGKey - repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + repo = target_sat.api.Repository(url=settings.repos.yum_1.url, product=product).create() with session: values = session.contentcredential.read(gpg_key.name) assert len(values['products']['table']) == 1 @@ -356,8 +343,6 @@ def test_positive_update_key_for_product_using_repo_discovery(session, gpg_path) repository before/after update :BZ: 1210180, 1461804 - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') @@ -400,54 +385,7 @@ def test_positive_update_key_for_product_using_repo_discovery(session, gpg_path) @pytest.mark.tier2 -@pytest.mark.upgrade -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -@pytest.mark.usefixtures('allow_repo_discovery') -def test_positive_delete_key_for_product_using_repo_discovery(session, gpg_path): - """Create gpg key with valid name and valid gpg then associate - it with custom product using Repo discovery method then delete it - - :id: 513ae138-84d9-4c43-8d4e-7b9fb797208d - - :expectedresults: gpg key is associated with product as well as with - the repositories during creation but removed from product after - deletion - - :BZ: 1210180, 1461804 - - :CaseLevel: Integration - """ - name = gen_string('alpha') - product_name = gen_string('alpha') - repo_name = 'fakerepo01' - with session: - session.contentcredential.create( - { - 'name': name, - 'content_type': CONTENT_CREDENTIALS_TYPES['gpg'], - 'upload_file': gpg_path, - } - ) - assert session.contentcredential.search(name)[0]['Name'] == name - session.product.discover_repo( - { - 'repo_type': 'Yum Repositories', - 'url': settings.repos.repo_discovery.url, - 'discovered_repos.repos': repo_name, - 'create_repo.product_type': 'New Product', - 'create_repo.product_content.product_name': product_name, - 'create_repo.product_content.gpg_key': name, - } - ) - product_values = session.product.read(product_name) - assert product_values['details']['gpg_key'] == name - session.contentcredential.delete(name) - product_values = session.product.read(product_name) - assert product_values['details']['gpg_key'] == '' - - -@pytest.mark.tier2 -def test_positive_update_key_for_empty_product(session, module_org, gpg_content): +def test_positive_update_key_for_empty_product(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with empty (no repos) custom product then update the key @@ -455,14 +393,14 @@ def test_positive_update_key_for_empty_product(session, module_org, gpg_content) :expectedresults: gpg key is associated with product before/after update - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product and associate GPGKey with it - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() with session: values = session.contentcredential.read(name) # Assert that GPGKey is associated with product @@ -477,7 +415,7 @@ def test_positive_update_key_for_empty_product(session, module_org, gpg_content) @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_update_key_for_product_with_repo(session, module_org, gpg_content): +def test_positive_update_key_for_product_with_repo(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with custom product that has one repository then update the key @@ -485,16 +423,16 @@ def test_positive_update_key_for_product_with_repo(session, module_org, gpg_cont :expectedresults: gpg key is associated with product as well as with repository after update - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product and associate GPGKey with it - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() # Creates new repository without GPGKey - repo = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + repo = target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() with session: session.contentcredential.update(name, {'details.name': new_name}) values = session.contentcredential.read(new_name) @@ -508,7 +446,7 @@ def test_positive_update_key_for_product_with_repo(session, module_org, gpg_cont @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_update_key_for_product_with_repos(session, module_org, gpg_content): +def test_positive_update_key_for_product_with_repos(session, target_sat, module_org, gpg_content): """Create gpg key with valid name and valid gpg key then associate it with custom product that has more than one repository then update the key @@ -517,18 +455,18 @@ def test_positive_update_key_for_product_with_repos(session, module_org, gpg_con :expectedresults: gpg key is associated with product as well as with repositories after update - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product and associate GPGKey with it - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() # Creates new repository_1 without GPGKey - repo1 = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + repo1 = target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() # Creates new repository_2 without GPGKey - repo2 = entities.Repository(product=product, url=settings.repos.yum_2.url).create() + repo2 = target_sat.api.Repository(product=product, url=settings.repos.yum_2.url).create() with session: session.contentcredential.update(name, {'details.name': new_name}) values = session.contentcredential.read(new_name) @@ -540,7 +478,9 @@ def test_positive_update_key_for_product_with_repos(session, module_org, gpg_con @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_update_key_for_repo_from_product_with_repo(session, module_org, gpg_content): +def test_positive_update_key_for_repo_from_product_with_repo( + session, target_sat, module_org, gpg_content +): """Create gpg key with valid name and valid gpg key then associate it to repository from custom product that has one repository then update the key @@ -549,16 +489,16 @@ def test_positive_update_key_for_repo_from_product_with_repo(session, module_org :expectedresults: gpg key is associated with repository after update but not with product. - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product without selecting GPGkey - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() # Creates new repository with GPGKey - repo = entities.Repository( + repo = target_sat.api.Repository( gpg_key=gpg_key, product=product, url=settings.repos.yum_1.url ).create() with session: @@ -575,7 +515,9 @@ def test_positive_update_key_for_repo_from_product_with_repo(session, module_org @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_update_key_for_repo_from_product_with_repos(session, module_org, gpg_content): +def test_positive_update_key_for_repo_from_product_with_repos( + session, target_sat, module_org, gpg_content +): """Create gpg key with valid name and valid gpg key then associate it to repository from custom product that has more than one repository then update the key @@ -584,224 +526,23 @@ def test_positive_update_key_for_repo_from_product_with_repos(session, module_or :expectedresults: gpg key is associated with single repository after update but not with product - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') - gpg_key = entities.GPGKey(content=gpg_content, name=name, organization=module_org).create() + gpg_key = target_sat.api.GPGKey( + content=gpg_content, name=name, organization=module_org + ).create() # Creates new product without selecting GPGkey - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() # Creates new repository_1 with GPGKey - repo1 = entities.Repository( + repo1 = target_sat.api.Repository( url=settings.repos.yum_1.url, product=product, gpg_key=gpg_key ).create() # Creates new repository_2 without GPGKey - entities.Repository(product=product, url=settings.repos.yum_2.url).create() + target_sat.api.Repository(product=product, url=settings.repos.yum_2.url).create() with session: session.contentcredential.update(name, {'details.name': new_name}) values = session.contentcredential.read(new_name) assert values['products']['table'][0]['Name'] == empty_message assert len(values['repositories']['table']) == 1 assert values['repositories']['table'][0]['Name'] == repo1.name - - -@pytest.mark.tier2 -def test_positive_delete_key_for_empty_product(session, module_org, gpg_content): - """Create gpg key with valid name and valid gpg key then - associate it with empty (no repos) custom product then delete it - - :id: b9766403-61b2-4a88-a744-a25d53d577fb - - :expectedresults: gpg key is associated with product during creation - but removed from product after deletion - - :CaseLevel: Integration - """ - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() - # Creates new product and associate GPGKey with it - product = entities.Product( - gpg_key=gpg_key, name=gen_string('alpha'), organization=module_org - ).create() - with session: - # Assert that GPGKey is associated with product - gpg_values = session.contentcredential.read(gpg_key.name) - assert len(gpg_values['products']['table']) == 1 - assert gpg_values['products']['table'][0]['Name'] == product.name - product_values = session.product.read(product.name) - assert product_values['details']['gpg_key'] == gpg_key.name - session.contentcredential.delete(gpg_key.name) - # Assert GPGKey isn't associated with product - product_values = session.product.read(product.name) - assert not product_values['details']['gpg_key'] - - -@pytest.mark.tier2 -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_key_for_product_with_repo(session, module_org, gpg_content): - """Create gpg key with valid name and valid gpg key then - associate it with custom product that has one repository then delete it - - :id: 75057dd2-9083-47a8-bea7-4f073bdb667e - - :expectedresults: gpg key is associated with product as well as with - the repository during creation but removed from product after - deletion - - :CaseLevel: Integration - """ - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() - # Creates new product and associate GPGKey with it - product = entities.Product( - gpg_key=gpg_key, name=gen_string('alpha'), organization=module_org - ).create() - # Creates new repository without GPGKey - repo = entities.Repository( - name=gen_string('alpha'), url=settings.repos.yum_1.url, product=product - ).create() - with session: - # Assert that GPGKey is associated with product - values = session.contentcredential.read(gpg_key.name) - assert len(values['products']['table']) == 1 - assert values['products']['table'][0]['Name'] == product.name - assert len(values['repositories']['table']) == 1 - assert values['repositories']['table'][0]['Name'] == repo.name - repo_values = session.repository.read(product.name, repo.name) - assert repo_values['repo_content']['gpg_key'] == gpg_key.name - session.contentcredential.delete(gpg_key.name) - # Assert GPGKey isn't associated with product and repository - product_values = session.product.read(product.name) - assert not product_values['details']['gpg_key'] - repo_values = session.repository.read(product.name, repo.name) - assert not repo_values['repo_content']['gpg_key'] - - -@pytest.mark.tier2 -@pytest.mark.upgrade -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_key_for_product_with_repos(session, module_org, gpg_content): - """Create gpg key with valid name and valid gpg key then - associate it with custom product that has more than one repository then - delete it - - :id: cb5d4efd-863a-4b8e-b1f8-a0771e90ff5e - - :expectedresults: gpg key is associated with product as well as with - repositories during creation but removed from product after - deletion - - :CaseLevel: Integration - """ - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() - # Creates new product and associate GPGKey with it - product = entities.Product( - gpg_key=gpg_key, name=gen_string('alpha'), organization=module_org - ).create() - # Creates new repository_1 without GPGKey - repo1 = entities.Repository( - name=gen_string('alpha'), product=product, url=settings.repos.yum_1.url - ).create() - # Creates new repository_2 without GPGKey - repo2 = entities.Repository( - name=gen_string('alpha'), product=product, url=settings.repos.yum_2.url - ).create() - with session: - # Assert that GPGKey is associated with product - values = session.contentcredential.read(gpg_key.name) - assert len(values['products']['table']) == 1 - assert values['products']['table'][0]['Name'] == product.name - assert len(values['repositories']['table']) == 2 - assert {repo1.name, repo2.name} == { - repo['Name'] for repo in values['repositories']['table'] - } - session.contentcredential.delete(gpg_key.name) - # Assert GPGKey isn't associated with product and repositories - product_values = session.product.read(product.name) - assert not product_values['details']['gpg_key'] - for repo in [repo1, repo2]: - repo_values = session.repository.read(product.name, repo.name) - assert not repo_values['repo_content']['gpg_key'] - - -@pytest.mark.tier2 -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_key_for_repo_from_product_with_repo(session, module_org, gpg_content): - """Create gpg key with valid name and valid gpg key then - associate it to repository from custom product that has one repository - then delete the key - - :id: 92ba492e-79af-48fe-84cb-763102b42fa7 - - :expectedresults: gpg key is associated with single repository during - creation but removed from repository after deletion - - :CaseLevel: Integration - """ - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() - # Creates new product without selecting GPGkey - product = entities.Product(name=gen_string('alpha'), organization=module_org).create() - # Creates new repository with GPGKey - repo = entities.Repository( - name=gen_string('alpha'), url=settings.repos.yum_1.url, product=product, gpg_key=gpg_key - ).create() - with session: - # Assert that GPGKey is associated with product - values = session.contentcredential.read(gpg_key.name) - assert values['products']['table'][0]['Name'] == empty_message - assert len(values['repositories']['table']) == 1 - assert values['repositories']['table'][0]['Name'] == repo.name - repo_values = session.repository.read(product.name, repo.name) - assert repo_values['repo_content']['gpg_key'] == gpg_key.name - session.contentcredential.delete(gpg_key.name) - # Assert GPGKey isn't associated with repository - repo_values = session.repository.read(product.name, repo.name) - assert not repo_values['repo_content']['gpg_key'] - - -@pytest.mark.tier2 -@pytest.mark.upgrade -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_key_for_repo_from_product_with_repos(session, module_org, gpg_content): - """Create gpg key with valid name and valid gpg key then - associate it to repository from custom product that has more than - one repository then delete the key - - :id: 5f204a44-bf7b-4a9c-9974-b701e0d38860 - - :expectedresults: gpg key is associated with single repository but not - with product during creation but removed from repository after - deletion - - :BZ: 1461804 - - :CaseLevel: Integration - """ - # Creates New GPGKey - gpg_key = entities.GPGKey(content=gpg_content, organization=module_org).create() - # Creates new product without GPGKey association - product = entities.Product(name=gen_string('alpha'), organization=module_org).create() - # Creates new repository_1 with GPGKey association - repo1 = entities.Repository( - gpg_key=gpg_key, name=gen_string('alpha'), product=product, url=settings.repos.yum_1.url - ).create() - repo2 = entities.Repository( - name=gen_string('alpha'), - product=product, - url=settings.repos.yum_2.url, - # notice that we're not making this repo point to the GPG key - ).create() - with session: - # Assert that GPGKey is associated with product - values = session.contentcredential.read(gpg_key.name) - assert values['products']['table'][0]['Name'] == empty_message - assert len(values['repositories']['table']) == 1 - assert values['repositories']['table'][0]['Name'] == repo1.name - repo_values = session.repository.read(product.name, repo1.name) - assert repo_values['repo_content']['gpg_key'] == gpg_key.name - repo_values = session.repository.read(product.name, repo2.name) - assert not repo_values['repo_content']['gpg_key'] - session.contentcredential.delete(gpg_key.name) - # Assert GPGKey isn't associated with repositories - for repo in [repo1, repo2]: - repo_values = session.repository.read(product.name, repo.name) - assert not repo_values['repo_content']['gpg_key'] diff --git a/tests/foreman/ui/test_contenthost.py b/tests/foreman/ui/test_contenthost.py index d2bd6ded4d3..f0c33476198 100644 --- a/tests/foreman/ui/test_contenthost.py +++ b/tests/foreman/ui/test_contenthost.py @@ -4,46 +4,36 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Hosts-Content :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from datetime import datetime, timedelta import re -from datetime import datetime -from datetime import timedelta from urllib.parse import urlparse +from fauxfactory import gen_integer, gen_string import pytest -from airgun.session import Session -from fauxfactory import gen_integer -from fauxfactory import gen_string -from nailgun import entities - -from robottelo.cli.factory import CLIFactoryError -from robottelo.cli.factory import make_fake_host -from robottelo.cli.factory import make_virt_who_config -from robottelo.config import setting_is_set -from robottelo.config import settings -from robottelo.constants import DEFAULT_SYSPURPOSE_ATTRIBUTES -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP_NAME -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_1_ERRATA_ID -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE_NAME -from robottelo.constants import VDC_SUBSCRIPTION_NAME -from robottelo.constants import VIRT_WHO_HYPERVISOR_TYPES + +from robottelo.config import setting_is_set, settings +from robottelo.constants import ( + DEFAULT_SYSPURPOSE_ATTRIBUTES, + FAKE_0_CUSTOM_PACKAGE, + FAKE_0_CUSTOM_PACKAGE_GROUP, + FAKE_0_CUSTOM_PACKAGE_GROUP_NAME, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, + FAKE_1_ERRATA_ID, + FAKE_2_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE_NAME, + VDC_SUBSCRIPTION_NAME, + VIRT_WHO_HYPERVISOR_TYPES, +) +from robottelo.exceptions import CLIFactoryError from robottelo.utils.issue_handlers import is_open from robottelo.utils.virtwho import create_fake_hypervisor_content @@ -52,8 +42,10 @@ @pytest.fixture(scope='module', autouse=True) -def host_ui_default(): - settings_object = entities.Setting().search(query={'search': 'name=host_details_ui'})[0] +def host_ui_default(module_target_sat): + settings_object = module_target_sat.api.Setting().search( + query={'search': 'name=host_details_ui'} + )[0] settings_object.value = 'No' settings_object.update({'value'}) yield @@ -62,10 +54,10 @@ def host_ui_default(): @pytest.fixture(scope='module') -def module_org(): - org = entities.Organization(simple_content_access=False).create() +def module_org(module_target_sat): + org = module_target_sat.api.Organization(simple_content_access=False).create() # adding remote_execution_connect_by_ip=Yes at org level - entities.Parameter( + module_target_sat.api.Parameter( name='remote_execution_connect_by_ip', value='Yes', organization=org.id, @@ -78,7 +70,8 @@ def vm(module_repos_collection_with_manifest, rhel7_contenthost, target_sat): """Virtual machine registered in satellite""" module_repos_collection_with_manifest.setup_virtual_machine(rhel7_contenthost) rhel7_contenthost.add_rex_key(target_sat) - yield rhel7_contenthost + rhel7_contenthost.run(r'subscription-manager repos --enable \*') + return rhel7_contenthost @pytest.fixture @@ -88,12 +81,12 @@ def vm_module_streams(module_repos_collection_with_manifest, rhel8_contenthost, rhel8_contenthost, install_katello_agent=False ) rhel8_contenthost.add_rex_key(satellite=target_sat) - yield rhel8_contenthost + return rhel8_contenthost -def set_ignore_facts_for_os(value=False): +def set_ignore_facts_for_os(module_target_sat, value=False): """Helper to set 'ignore_facts_for_operatingsystem' setting""" - ignore_setting = entities.Setting().search( + ignore_setting = module_target_sat.api.Setting().search( query={'search': 'name="ignore_facts_for_operatingsystem"'} )[0] ignore_setting.value = str(value) @@ -133,8 +126,6 @@ def test_positive_end_to_end(session, default_location, module_repos_collection_ :expectedresults: content host details are the same as expected, package and errata installation are successful - :CaseLevel: System - :parametrized: yes :CaseImportance: Critical @@ -247,8 +238,6 @@ def test_positive_end_to_end_bulk_update(session, default_location, vm, target_s :BZ: 1712069, 1838800 :parametrized: yes - - :CaseLevel: System """ hc_name = gen_string('alpha') description = gen_string('alpha') @@ -331,8 +320,6 @@ def test_positive_search_by_subscription_status(session, default_location, vm): :BZ: 1406855, 1498827, 1495271 :parametrized: yes - - :CaseLevel: System """ with session: session.location.select(default_location.name) @@ -382,8 +369,6 @@ def test_positive_toggle_subscription_status(session, default_location, vm): :BZ: 1836868 - :CaseLevel: System - :parametrized: yes :CaseImportance: Medium @@ -440,8 +425,6 @@ def test_negative_install_package(session, default_location, vm): :expectedresults: Task finished with warning :parametrized: yes - - :CaseLevel: System """ with session: session.location.select(default_location.name) @@ -477,8 +460,6 @@ def test_positive_remove_package(session, default_location, vm): :expectedresults: Package was successfully removed :parametrized: yes - - :CaseLevel: System """ vm.download_install_rpm(settings.repos.yum_6.url, FAKE_0_CUSTOM_PACKAGE) with session: @@ -516,8 +497,6 @@ def test_positive_upgrade_package(session, default_location, vm): :expectedresults: Package was successfully upgraded :parametrized: yes - - :CaseLevel: System """ vm.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') with session: @@ -556,8 +535,6 @@ def test_positive_install_package_group(session, default_location, vm): :expectedresults: Package group was successfully installed :parametrized: yes - - :CaseLevel: System """ with session: session.location.select(default_location.name) @@ -597,8 +574,6 @@ def test_positive_remove_package_group(session, default_location, vm): :expectedresults: Package group was successfully removed :parametrized: yes - - :CaseLevel: System """ with session: session.location.select(default_location.name) @@ -628,7 +603,7 @@ def test_positive_remove_package_group(session, default_location, vm): indirect=True, ) def test_positive_search_errata_non_admin( - session, default_location, vm, test_name, default_viewer_role + default_location, vm, test_name, default_viewer_role, module_target_sat ): """Search for host's errata by non-admin user with enough permissions @@ -642,11 +617,9 @@ def test_positive_search_errata_non_admin( listed :parametrized: yes - - :CaseLevel: System """ vm.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') - with Session( + with module_target_sat.ui_session( test_name, user=default_viewer_role.login, password=default_viewer_role.password ) as session: session.location.select(default_location.name) @@ -697,8 +670,6 @@ def test_positive_ensure_errata_applicability_with_host_reregistered(session, de :BZ: 1463818 :parametrized: yes - - :CaseLevel: System """ vm.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') result = vm.run(f'rpm -q {FAKE_1_CUSTOM_PACKAGE}') @@ -757,8 +728,6 @@ def test_positive_host_re_registration_with_host_rename( :BZ: 1762793 :parametrized: yes - - :CaseLevel: System """ vm.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') result = vm.run(f'rpm -q {FAKE_1_CUSTOM_PACKAGE}') @@ -797,7 +766,9 @@ def test_positive_host_re_registration_with_host_rename( ], indirect=True, ) -def test_positive_check_ignore_facts_os_setting(session, default_location, vm, module_org, request): +def test_positive_check_ignore_facts_os_setting( + session, default_location, vm, module_org, request, module_target_sat +): """Verify that 'Ignore facts for operating system' setting works properly @@ -826,15 +797,13 @@ def test_positive_check_ignore_facts_os_setting(session, default_location, vm, m :BZ: 1155704 :parametrized: yes - - :CaseLevel: System """ major = str(gen_integer(15, 99)) minor = str(gen_integer(1, 9)) expected_os = f'RedHat {major}.{minor}' - set_ignore_facts_for_os(False) + set_ignore_facts_for_os(module_target_sat, False) host = ( - entities.Host() + module_target_sat.api.Host() .search(query={'search': f'name={vm.hostname} and organization_id={module_org.id}'})[0] .read() ) @@ -843,7 +812,7 @@ def test_positive_check_ignore_facts_os_setting(session, default_location, vm, m # Get host current operating system value os = session.contenthost.read(vm.hostname, widget_names='details')['details']['os'] # Change necessary setting to true - set_ignore_facts_for_os(True) + set_ignore_facts_for_os(module_target_sat, True) # Add cleanup function to roll back setting to default value request.addfinalizer(set_ignore_facts_for_os) # Read all facts for corresponding host @@ -860,7 +829,7 @@ def test_positive_check_ignore_facts_os_setting(session, default_location, vm, m # Check that host OS was not changed due setting was set to true assert os == updated_os # Put it to false and re-run the process - set_ignore_facts_for_os(False) + set_ignore_facts_for_os(module_target_sat, False) host.upload_facts(data={'name': vm.hostname, 'facts': facts}) session.contenthost.search('') updated_os = session.contenthost.read(vm.hostname, widget_names='details')['details']['os'] @@ -896,15 +865,13 @@ def test_positive_virt_who_hypervisor_subscription_status( :BZ: 1336924, 1860928 :parametrized: yes - - :CaseLevel: System """ - org = entities.Organization().create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = target_sat.api.Organization().create() + lce = target_sat.api.LifecycleEnvironment(organization=org).create() # TODO move this to either hack around virt-who service or use an env-* compute resource provisioning_server = settings.libvirt.libvirt_hostname # Create a new virt-who config - virt_who_config = make_virt_who_config( + virt_who_config = target_sat.cli_factory.virt_who_config( { 'organization-id': org.id, 'hypervisor-type': VIRT_WHO_HYPERVISOR_TYPES['libvirt'], @@ -971,7 +938,9 @@ def test_positive_virt_who_hypervisor_subscription_status( ], indirect=True, ) -def test_module_stream_actions_on_content_host(session, default_location, vm_module_streams): +def test_module_stream_actions_on_content_host( + session, default_location, vm_module_streams, module_target_sat +): """Check remote execution for module streams actions e.g. install, remove, disable works on content host. Verify that correct stream module stream get installed/removed. @@ -981,12 +950,10 @@ def test_module_stream_actions_on_content_host(session, default_location, vm_mod :expectedresults: Remote execution for module actions should succeed. :parametrized: yes - - :CaseLevel: System """ stream_version = '5.21' run_remote_command_on_content_host('dnf -y upload-profile', vm_module_streams) - entities.Parameter( + module_target_sat.api.Parameter( name='remote_execution_connect_by_ip', value='Yes', parameter_type='boolean', @@ -1010,7 +977,8 @@ def test_module_stream_actions_on_content_host(session, default_location, vm_mod ) assert module_stream[0]['Name'] == FAKE_2_CUSTOM_PACKAGE_NAME assert module_stream[0]['Stream'] == stream_version - assert 'Enabled' and 'Installed' in module_stream[0]['Status'] + assert 'Enabled' in module_stream[0]['Status'] + assert 'Installed' in module_stream[0]['Status'] # remove Module Stream result = session.contenthost.execute_module_stream_action( @@ -1102,8 +1070,6 @@ def test_module_streams_customize_action(session, default_location, vm_module_st :expectedresults: Remote execution for module actions should be succeed. - :CaseLevel: System - :parametrized: yes :CaseImportance: Medium @@ -1170,8 +1136,6 @@ def test_install_modular_errata(session, default_location, vm_module_streams): :expectedresults: Modular Errata should get installed on content host. :parametrized: yes - - :CaseLevel: System """ stream_version = '0' module_name = 'kangaroo' @@ -1251,8 +1215,6 @@ def test_module_status_update_from_content_host_to_satellite( :expectedresults: module stream status should get updated in Satellite :parametrized: yes - - :CaseLevel: System """ module_name = 'walrus' stream_version = '0.71' @@ -1318,8 +1280,6 @@ def test_module_status_update_without_force_upload_package_profile( :expectedresults: module stream status should get updated in Satellite - :CaseLevel: System - :parametrized: yes :CaseImportance: Medium @@ -1402,8 +1362,6 @@ def test_module_stream_update_from_satellite(session, default_location, vm_modul :expectedresults: module stream should get updated. :parametrized: yes - - :CaseLevel: System """ module_name = 'duck' stream_version = '0' @@ -1480,8 +1438,6 @@ def test_syspurpose_attributes_empty(session, default_location, vm_module_stream :expectedresults: Syspurpose attrs are empty, and syspurpose status is set as 'Not specified' - :CaseLevel: System - :parametrized: yes :CaseImportance: High @@ -1493,7 +1449,7 @@ def test_syspurpose_attributes_empty(session, default_location, vm_module_stream ] syspurpose_status = details['system_purpose_status'] assert syspurpose_status.lower() == 'not specified' - for spname, spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.items(): + for spname in DEFAULT_SYSPURPOSE_ATTRIBUTES: assert details[spname] == '' @@ -1522,8 +1478,6 @@ def test_set_syspurpose_attributes_cli(session, default_location, vm_module_stre :expectedresults: Syspurpose attributes set for the content host - :CaseLevel: System - :parametrized: yes :CaseImportance: High @@ -1531,7 +1485,7 @@ def test_set_syspurpose_attributes_cli(session, default_location, vm_module_stre with session: session.location.select(default_location.name) # Set sypurpose attributes - for spname, spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.items(): + for spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.values(): run_remote_command_on_content_host( f'syspurpose set-{spdata[0]} "{spdata[1]}"', vm_module_streams ) @@ -1569,18 +1523,16 @@ def test_unset_syspurpose_attributes_cli(session, default_location, vm_module_st :expectedresults: Syspurpose attributes are empty - :CaseLevel: System - :parametrized: yes :CaseImportance: High """ # Set sypurpose attributes... - for spname, spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.items(): + for spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.values(): run_remote_command_on_content_host( f'syspurpose set-{spdata[0]} "{spdata[1]}"', vm_module_streams ) - for spname, spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.items(): + for spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.values(): # ...and unset them. run_remote_command_on_content_host(f'syspurpose unset-{spdata[0]}', vm_module_streams) @@ -1589,7 +1541,7 @@ def test_unset_syspurpose_attributes_cli(session, default_location, vm_module_st details = session.contenthost.read(vm_module_streams.hostname, widget_names='details')[ 'details' ] - for spname, spdata in DEFAULT_SYSPURPOSE_ATTRIBUTES.items(): + for spname in DEFAULT_SYSPURPOSE_ATTRIBUTES: assert details[spname] == '' @@ -1620,8 +1572,6 @@ def test_syspurpose_matched(session, default_location, vm_module_streams): :expectedresults: Syspurpose status is Matched - :CaseLevel: System - :parametrized: yes :CaseImportance: High @@ -1663,8 +1613,6 @@ def test_syspurpose_bulk_action(session, default_location, vm): :expectedresults: Syspurpose parameters are set and reflected on the host - :CaseLevel: System - :CaseImportance: High """ syspurpose_attributes = { @@ -1709,8 +1657,6 @@ def test_syspurpose_mismatched(session, default_location, vm_module_streams): :expectedresults: Syspurpose status is 'Mismatched' - :CaseLevel: System - :parametrized: yes :CaseImportance: High @@ -1727,7 +1673,7 @@ def test_syspurpose_mismatched(session, default_location, vm_module_streams): @pytest.mark.tier3 -def test_pagination_multiple_hosts_multiple_pages(session, module_host_template): +def test_pagination_multiple_hosts_multiple_pages(session, module_host_template, target_sat): """Create hosts to fill more than one page, sort on OS, check pagination. Search for hosts based on operating system and assert that more than one page @@ -1752,7 +1698,7 @@ def test_pagination_multiple_hosts_multiple_pages(session, module_host_template) # Create more than one page of fake hosts. Need two digits in name to ensure sort order. for count in range(host_num): host_name = f'test-{count + 1:0>2}' - make_fake_host( + target_sat.cli_factory.make_fake_host( { 'name': host_name, 'organization-id': module_host_template.organization.id, @@ -1784,7 +1730,7 @@ def test_pagination_multiple_hosts_multiple_pages(session, module_host_template) @pytest.mark.tier3 -def test_search_for_virt_who_hypervisors(session, default_location): +def test_search_for_virt_who_hypervisors(session, default_location, module_target_sat): """ Search the virt_who hypervisors with hypervisor=True or hypervisor=False. @@ -1796,11 +1742,9 @@ def test_search_for_virt_who_hypervisors(session, default_location): :customerscenario: true - :CaseLevel: System - :CaseImportance: Medium """ - org = entities.Organization().create() + org = module_target_sat.api.Organization().create() with session: session.organization.select(org.name) session.location.select(default_location.name) diff --git a/tests/foreman/ui/test_contentview.py b/tests/foreman/ui/test_contentview.py index 340dea318f6..356c17e3fe0 100644 --- a/tests/foreman/ui/test_contentview.py +++ b/tests/foreman/ui/test_contentview.py @@ -7,51 +7,47 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: ContentViews :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import datetime from random import randint -import pytest -from airgun.exceptions import InvalidElementStateException -from airgun.exceptions import NoSuchElementException +from airgun.exceptions import InvalidElementStateException, NoSuchElementException from airgun.session import Session from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout from navmazing import NavigationTriesExceeded from productmd.common import parse_nvra +import pytest from robottelo import constants from robottelo.cli.contentview import ContentView from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import DEFAULT_ARCHITECTURE -from robottelo.constants import DEFAULT_CV -from robottelo.constants import DEFAULT_PTABLE -from robottelo.constants import ENVIRONMENT -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_9_YUM_SECURITY_ERRATUM_COUNT -from robottelo.constants import FILTER_CONTENT_TYPE -from robottelo.constants import FILTER_ERRATA_TYPE -from robottelo.constants import FILTER_TYPE -from robottelo.constants import PERMISSIONS -from robottelo.constants import PRDS -from robottelo.constants import REPO_TYPE -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + DEFAULT_ARCHITECTURE, + DEFAULT_CV, + DEFAULT_PTABLE, + ENVIRONMENT, + FAKE_0_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE, + FAKE_9_YUM_SECURITY_ERRATUM_COUNT, + FILTER_CONTENT_TYPE, + FILTER_ERRATA_TYPE, + FILTER_TYPE, + PERMISSIONS, + PRDS, + REPO_TYPE, + REPOS, + REPOSET, +) from robottelo.utils.datafactory import gen_string VERSION = 'Version 1.0' @@ -77,8 +73,6 @@ def test_positive_add_custom_content(session): :expectedresults: Custom content can be seen in a view - :CaseLevel: Integration - :CaseImportance: Critical """ org = entities.Organization().create() @@ -95,7 +89,6 @@ def test_positive_add_custom_content(session): assert cv['repositories']['resources']['assigned'][0]['Name'] == repo_name -@pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_end_to_end(session, module_org, target_sat): @@ -113,8 +106,6 @@ def test_positive_end_to_end(session, module_org, target_sat): :expectedresults: content view is created, updated with repo publish and promoted to next selected env - :CaseLevel: Integration - :CaseImportance: High """ repo_name = gen_string('alpha') @@ -150,8 +141,6 @@ def test_positive_publish_version_changes_in_source_env(session, module_org): :expectedresults: Content view version is updated in source environment. - :CaseLevel: Integration - :CaseImportance: High """ lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -203,8 +192,6 @@ def test_positive_repo_count_for_composite_cv(session, module_org, target_sat): :BZ: 1431778 - :CaseLevel: Integration - :CaseImportance: High """ lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -252,8 +239,6 @@ def test_positive_create_composite( :expectedresults: Composite content views are created - :CaseLevel: System - :CaseImportance: High """ org = module_entitlement_manifest_org @@ -306,8 +291,6 @@ def test_positive_add_rh_content(session, function_entitlement_manifest_org, tar :expectedresults: RH Content can be seen in a view - :CaseLevel: Integration - :CaseImportance: Critical """ cv_name = gen_string('alpha') @@ -339,8 +322,6 @@ def test_positive_add_docker_repo(session, module_org, module_prod): :expectedresults: The repo is added to a non-composite content view - :CaseLevel: Integration - :CaseImportance: High """ content_view = entities.ContentView(composite=False, organization=module_org).create() @@ -363,8 +344,6 @@ def test_positive_add_docker_repos(session, module_org, module_prod): :expectedresults: The repos are added to a non-composite content view. - :CaseLevel: Integration - :CaseImportance: Low """ content_view = entities.ContentView(composite=False, organization=module_org).create() @@ -392,8 +371,6 @@ def test_positive_add_synced_docker_repo(session, module_org, module_prod): :expectedresults: Synchronized docker repository was successfully added to content view. - :CaseLevel: Integration - :CaseImportance: High """ content_view = entities.ContentView(composite=False, organization=module_org).create() @@ -418,8 +395,6 @@ def test_positive_add_docker_repo_to_ccv(session, module_org, module_prod): :expectedresults: The repository is added to a content view which is then added to a composite content view. - :CaseLevel: Integration - :CaseImportance: High """ content_view = entities.ContentView(composite=False, organization=module_org).create() @@ -446,8 +421,6 @@ def test_positive_add_docker_repos_to_ccv(session, module_org, module_prod): :expectedresults: The repository is added to a random number of content views which are then added to a composite content view. - :CaseLevel: Integration - :CaseImportance: Low """ cvs = [] @@ -481,8 +454,6 @@ def test_positive_publish_with_docker_repo(session, module_org, module_prod): :expectedresults: The repo is added to a content view which is then successfully published. - :CaseLevel: Integration - :CaseImportance: High """ content_view = entities.ContentView(composite=False, organization=module_org).create() @@ -507,8 +478,6 @@ def test_positive_publish_with_docker_repo_composite(session, module_org, module which is then published only once and then added to a composite content view which is also published only once. - :CaseLevel: Integration - :CaseImportance: High """ repo = entities.Repository( @@ -536,8 +505,6 @@ def test_positive_publish_multiple_with_docker_repo(session, module_org, module_ :expectedresults: Content view with docker repo is successfully published multiple times. - :CaseLevel: Integration - :CaseImportance: Low """ repo = entities.Repository( @@ -561,8 +528,6 @@ def test_positive_publish_multiple_with_docker_repo_composite(session, module_or :expectedresults: Composite content view with docker repo is successfully published multiple times. - :CaseLevel: Integration - :CaseImportance: Low """ repo = entities.Repository( @@ -590,8 +555,6 @@ def test_positive_promote_with_docker_repo(session, module_org, module_prod): :expectedresults: Docker repository is promoted to content view found in the specific lifecycle-environment. - :CaseLevel: Integration - :CaseImportance: High """ lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -618,8 +581,6 @@ def test_positive_promote_multiple_with_docker_repo(session, module_org, module_ :expectedresults: Docker repository is promoted to content view found in the specific lifecycle-environments. - :CaseLevel: Integration - :CaseImportance: Low """ repo = entities.Repository( @@ -647,8 +608,6 @@ def test_positive_promote_with_docker_repo_composite(session, module_org, module :expectedresults: Docker repository is promoted to content view found in the specific lifecycle-environment. - :CaseLevel: Integration - :CaseImportance: High """ lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -681,8 +640,6 @@ def test_positive_promote_multiple_with_docker_repo_composite(session, module_or :expectedresults: Docker repository is promoted to content view found in the specific lifecycle-environments. - :CaseLevel: Integration - :CaseImportance: Low """ repo = entities.Repository( @@ -713,8 +670,6 @@ def test_negative_add_components_to_non_composite(session): :expectedresults: User cannot add components to the view - :CaseLevel: Integration - :CaseImportance: Low """ cv1_name = gen_string('alpha') @@ -743,8 +698,6 @@ def test_positive_add_unpublished_cv_to_composite(session): :expectedresults: Non-composite content view is added to composite one - :CaseLevel: Integration - :CaseImportance: Low :BZ: 1367123 @@ -787,8 +740,6 @@ def test_positive_add_non_composite_cv_to_composite(session): composite content view. 3. Composite content view is successfully published - :CaseLevel: Integration - :BZ: 1367123 :CaseImportance: High @@ -834,8 +785,6 @@ def test_positive_check_composite_cv_addition_list_versions(session): :expectedresults: second non-composite content view version should be listed as default one to be added to composite view - :CaseLevel: Integration - :BZ: 1411074 :CaseImportance: Low @@ -873,8 +822,6 @@ def test_negative_add_dupe_repos(session, module_org, target_sat): :expectedresults: User cannot add repos multiple times to the view - :CaseLevel: Integration - :CaseImportance: Low """ cv_name = gen_string('alpha') @@ -901,8 +848,6 @@ def test_positive_publish_with_custom_content(session, module_org, target_sat): :expectedresults: Content view can be published - :CaseLevel: Integration - :CaseImportance: Critical """ repo_name = gen_string('alpha') @@ -930,8 +875,6 @@ def test_positive_publish_with_rh_content(session, function_entitlement_manifest :expectedresults: Content view can be published - :CaseLevel: Integration - :CaseImportance: Critical """ cv_name = gen_string('alpha') @@ -970,8 +913,6 @@ def test_positive_publish_composite_with_custom_content( :expectedresults: Composite content view can be published - :CaseLevel: Integration - :CaseImportance: High """ cv1_name = gen_string('alpha') @@ -1062,8 +1003,6 @@ def test_positive_publish_version_changes_in_target_env(session, module_org, tar :expectedresults: Content view version is updated in target environment. - :CaseLevel: Integration - :CaseImportance: High """ cv_name = gen_string('alpha') @@ -1114,8 +1053,6 @@ def test_positive_promote_with_custom_content(session, module_org, target_sat): :expectedresults: Content view can be promoted - :CaseLevel: Integration - :BZ: 1361793 :CaseImportance: Critical @@ -1159,8 +1096,6 @@ def test_positive_promote_with_rh_content(session, function_entitlement_manifest :expectedresults: Content view can be promoted - :CaseLevel: System - :CaseImportance: Critical """ cv_name = gen_string('alpha') @@ -1202,8 +1137,6 @@ def test_positive_promote_composite_with_custom_content( :expectedresults: Composite content view can be promoted - :CaseLevel: Integration - :CaseImportance: High """ cv1_name = gen_string('alpha') @@ -1334,8 +1267,6 @@ def test_negative_add_same_package_filter_twice(session, module_org, target_sat) :expectedresults: Same package filter can not be added again - :CaseLevel: Integration - :CaseImportance: High """ cv_name = gen_string('alpha') @@ -1381,8 +1312,6 @@ def test_positive_remove_cv_version_from_default_env(session, module_org, target :expectedresults: content view version is removed from Library environment - :CaseLevel: Integration - :CaseImportance: Critical """ cv_name = gen_string('alpha') @@ -1424,8 +1353,6 @@ def test_positive_remove_promoted_cv_version_from_default_env(session, module_or 1. Content view version exist only in DEV and not in Library 2. The yum repos exists in content view version - :CaseLevel: Integration - :CaseImportance: High """ repo = target_sat.cli_factory.RepositoryCollection( @@ -1470,8 +1397,6 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(session, module :expectedresults: Content view version exist only in DEV, QE and not in Library - :CaseLevel: Integration - :CaseImportance: Low """ dev_lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -1541,8 +1466,6 @@ def test_positive_remove_cv_version_from_env(session, module_org, repos_collecti :expectedresults: Content view version exist in Library, DEV, QE - :CaseLevel: Integration - :CaseImportance: High """ dev_lce = entities.LifecycleEnvironment(organization=module_org).create() @@ -1599,8 +1522,6 @@ def test_positive_delete_cv_promoted_to_multi_env(session, module_org, target_sa :expectedresults: The content view doesn't exists. - :CaseLevel: Integration - :CaseImportance:High """ repo = target_sat.cli_factory.RepositoryCollection( @@ -1634,8 +1555,6 @@ def test_positive_delete_composite_version(session, module_org, target_sat): :expectedresults: Deletion was performed successfully - :CaseLevel: Integration - :BZ: 1276479 :CaseImportance: High @@ -1672,8 +1591,6 @@ def test_positive_delete_non_default_version(session, target_sat): :expectedresults: Deletion was performed successfully - :CaseLevel: Integration - :CaseImportance: Critical """ repo_name = gen_string('alpha') @@ -1706,8 +1623,6 @@ def test_positive_delete_version_with_ak(session): :expectedresults: Delete operation was performed successfully - :CaseLevel: Integration - :CaseImportance: High """ org = entities.Organization().create() @@ -1747,8 +1662,6 @@ def test_positive_clone_within_same_env(session, module_org, target_sat): :BZ: 1461017 - :CaseLevel: Integration - :CaseImportance: High """ repo_name = gen_string('alpha') @@ -1780,8 +1693,6 @@ def test_positive_clone_within_diff_env(session, module_org, target_sat): :BZ: 1461017 - :CaseLevel: Integration - :CaseImportance: High """ repo_name = gen_string('alpha') @@ -1820,8 +1731,6 @@ def test_positive_remove_filter(session, module_org): :expectedresults: content views filter removed successfully - :CaseLevel: Integration - :CaseImportance: Low """ filter_name = gen_string('alpha') @@ -1849,8 +1758,6 @@ def test_positive_add_package_filter(session, module_org, target_sat): :expectedresults: content views filter created and selected packages can be added for inclusion - :CaseLevel: Integration - :CaseImportance: High """ packages = ( @@ -1893,8 +1800,6 @@ def test_positive_add_package_inclusion_filter_and_publish(session, module_org, :expectedresults: Package is included in content view version - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -1939,8 +1844,6 @@ def test_positive_add_package_exclusion_filter_and_publish(session, module_org, :expectedresults: Package is excluded from content view version - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -1986,8 +1889,6 @@ def test_positive_remove_package_from_exclusion_filter(session, module_org, targ :expectedresults: Package was successfully removed from content view filter and is present in next published content view version - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2035,8 +1936,6 @@ def test_positive_update_inclusive_filter_package_version(session, module_org, t :expectedresults: Version was updated, next content view version contains package with updated version - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2063,7 +1962,8 @@ def test_positive_update_inclusive_filter_package_version(session, module_org, t cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '0.71' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '0.71' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) @@ -2084,7 +1984,8 @@ def test_positive_update_inclusive_filter_package_version(session, module_org, t cv.name, new_version, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '5.21' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '5.21' @pytest.mark.skip_if_open('BZ:2086957') @@ -2097,8 +1998,6 @@ def test_positive_update_exclusive_filter_package_version(session, module_org, t :expectedresults: Version was updated, next content view version contains package with updated version - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2125,7 +2024,8 @@ def test_positive_update_exclusive_filter_package_version(session, module_org, t cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '5.21' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '5.21' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) @@ -2146,7 +2046,8 @@ def test_positive_update_exclusive_filter_package_version(session, module_org, t cv.name, new_version, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '0.71' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '0.71' @pytest.mark.skip_if_open('BZ:2086957') @@ -2211,8 +2112,6 @@ def test_positive_edit_rh_custom_spin(session, target_sat): :expectedresults: edited content view save is successful and info is updated - :CaseLevel: System - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2272,8 +2171,6 @@ def test_positive_promote_with_rh_custom_spin(session, target_sat): :expectedresults: Content view can be promoted - :CaseLevel: Integration - :CaseImportance: Critical """ filter_name = gen_string('alpha') @@ -2364,8 +2261,6 @@ def test_positive_add_errata_filter(session, module_org, target_sat): :expectedresults: content views filter created and selected errata-id can be added for inclusion/exclusion - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2403,8 +2298,6 @@ def test_positive_add_module_stream_filter(session, module_org, target_sat): :expectedresults: content views filter created and selected module stream can be added for inclusion/exclusion - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2444,8 +2337,6 @@ def test_positive_add_package_group_filter(session, module_org, target_sat): :expectedresults: content views filter created and selected package groups can be added for inclusion/exclusion - :CaseLevel: Integration - :CaseImportance: Low """ filter_name = gen_string('alpha') @@ -2480,8 +2371,6 @@ def test_positive_update_filter_affected_repos(session, module_org, target_sat): version publishing only updated repos are affected by content view filter - :CaseLevel: Integration - :CaseImportance: High """ filter_name = gen_string('alpha') @@ -2516,7 +2405,8 @@ def test_positive_update_filter_affected_repos(session, module_org, target_sat): cv.name, VERSION, 'name = "{}" and version = "{}"'.format(repo1_package_name, '4.2.8') ) assert len(packages) == 1 - assert packages[0]['Name'] == repo1_package_name and packages[0]['Version'] == '4.2.8' + assert packages[0]['Name'] == repo1_package_name + assert packages[0]['Version'] == '4.2.8' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(repo1_package_name, '4.2.9') ) @@ -2529,7 +2419,8 @@ def test_positive_update_filter_affected_repos(session, module_org, target_sat): 'name = "{}" and version = "{}"'.format(repo2_package_name, '3.10.232'), ) assert len(packages) == 1 - assert packages[0]['Name'] == repo2_package_name and packages[0]['Version'] == '3.10.232' + assert packages[0]['Name'] == repo2_package_name + assert packages[0]['Version'] == '3.10.232' @pytest.mark.tier3 @@ -2544,8 +2435,6 @@ def test_positive_search_composite(session): :BZ: 1259374 - :CaseLevel: Integration - :CaseImportance: Low """ composite_name = gen_string('alpha') @@ -2578,8 +2467,6 @@ def test_positive_publish_with_repo_with_disabled_http(session, module_org, targ :BZ: 1355752 - :CaseLevel: Integration - :CaseImportance: Low """ repo_name = gen_string('alpha') @@ -2625,8 +2512,6 @@ def test_positive_subscribe_system_with_custom_content( :expectedresults: Systems can be subscribed to content view(s) - :CaseLevel: Integration - :parametrized: yes :CaseImportance: High @@ -2661,8 +2546,6 @@ def test_positive_delete_with_kickstart_repo_and_host_group( :BZ: 1417072 - :CaseLevel: Integration - :CaseImportance: High """ hg_name = gen_string('alpha') @@ -2738,8 +2621,6 @@ def test_positive_rh_mixed_content_end_to_end( :expectedresults: CV should be published and promoted with RH OSTree and all other contents. Then version is removed successfully. - :CaseLevel: System - :customerscenario: true :CaseImportance: High @@ -2794,7 +2675,7 @@ def test_positive_errata_inc_update_list_package(session, target_sat): :CaseImportance: High - :CaseLevel: Integration + """ org = entities.Organization().create() product = entities.Product(organization=org).create() @@ -2882,7 +2763,6 @@ def test_positive_composite_child_inc_update(session, rhel7_contenthost, target_ :parametrized: yes - :CaseLevel: Integration """ org = entities.Organization().create() lce = entities.LifecycleEnvironment(organization=org).create() @@ -2976,8 +2856,6 @@ def test_positive_module_stream_end_to_end(session, module_org, target_sat): :expectedresults: Content view works properly with module_streams and count shown should be correct - :CaseLevel: Integration - :CaseImportance: Medium """ repo_name = gen_string('alpha') @@ -3019,8 +2897,6 @@ def test_positive_search_module_streams_in_content_view(session, module_org, tar :expectedresults: Searching for module streams should work inside content view version - :CaseLevel: Integration - :CaseImportance: Low """ repo_name = gen_string('alpha') @@ -3040,10 +2916,8 @@ def test_positive_search_module_streams_in_content_view(session, module_org, tar f'name = "{module_stream}" and stream = "{module_version}"', ) assert len(module_streams) == 1 - assert ( - module_streams[0]['Name'] == module_stream - and module_streams[0]['Stream'] == module_version - ) + assert module_streams[0]['Name'] == module_stream + assert module_streams[0]['Stream'] == module_version @pytest.mark.tier2 @@ -3065,8 +2939,6 @@ def test_positive_non_admin_user_actions(session, module_org, test_name, target_ :BZ: 1461017 - :CaseLevel: Integration - :CaseImportance: Critical """ # note: the user to be created should not have permissions to access @@ -3156,8 +3028,6 @@ def test_positive_readonly_user_actions(module_org, test_name, target_sat): :expectedresults: User with read-only role for content view can view the repository in the content view - :CaseLevel: Integration - :CaseImportance: Critical """ user_login = gen_string('alpha') @@ -3210,8 +3080,6 @@ def test_negative_read_only_user_actions(session, module_org, test_name, target_ :BZ: 1922134 - :CaseLevel: Integration - :CaseImportance: Critical """ # create a content view read only user with lifecycle environment @@ -3314,8 +3182,6 @@ def test_negative_non_readonly_user_actions(module_org, test_name, target_sat): :expectedresults: the user cannot access content views web resources - :CaseLevel: Integration - :CaseImportance: High """ user_login = gen_string('alpha') @@ -3736,7 +3602,6 @@ def test_positive_no_duplicate_key_violate_unique_constraint_using_filters( :CaseImportance: Medium - :CaseLevel: Integration """ cv = gen_string('alpha') filter_name = gen_string('alpha') diff --git a/tests/foreman/ui/test_dashboard.py b/tests/foreman/ui/test_dashboard.py index f08465319fb..9be16d4e6a9 100644 --- a/tests/foreman/ui/test_dashboard.py +++ b/tests/foreman/ui/test_dashboard.py @@ -4,38 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Dashboard :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from airgun.session import Session -from nailgun import entities from nailgun.entity_mixins import TaskFailedError +import pytest from robottelo.config import settings -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE +from robottelo.constants import FAKE_7_CUSTOM_PACKAGE from robottelo.utils.datafactory import gen_string from robottelo.utils.issue_handlers import is_open @pytest.mark.tier2 -def test_positive_host_configuration_status(session): +def test_positive_host_configuration_status(session, target_sat): """Check if the Host Configuration Status Widget links are working :id: ffb0a6a1-2b65-4578-83c7-61492122d865 :customerscenario: true - :Steps: + :steps: 1. Navigate to Monitor -> Dashboard 2. Review the Host Configuration Status @@ -45,12 +38,10 @@ def test_positive_host_configuration_status(session): :expectedresults: Each link shows the right info :BZ: 1631219 - - :CaseLevel: Integration """ - org = entities.Organization().create() - loc = entities.Location().create() - host = entities.Host(organization=org, location=loc).create() + org = target_sat.api.Organization().create() + loc = target_sat.api.Location().create() + host = target_sat.api.Host(organization=org, location=loc).create() criteria_list = [ 'Hosts that had performed modifications without error', 'Hosts in error state', @@ -86,7 +77,7 @@ def test_positive_host_configuration_status(session): else: assert dashboard_values['status_list'][criteria] == 0 - for criteria, search in zip(criteria_list, search_strings_list): + for criteria, search in zip(criteria_list, search_strings_list, strict=True): if criteria == 'Hosts with no reports': session.dashboard.action({'HostConfigurationStatus': {'status_list': criteria}}) values = session.host.read_all() @@ -101,24 +92,22 @@ def test_positive_host_configuration_status(session): @pytest.mark.tier2 -def test_positive_host_configuration_chart(session): +def test_positive_host_configuration_chart(session, target_sat): """Check if the Host Configuration Chart is working in the Dashboard UI :id: b03314aa-4394-44e5-86da-c341c783003d - :Steps: + :steps: 1. Navigate to Monitor -> Dashboard 2. Review the Host Configuration Chart widget 3. Check that chart contains correct percentage value :expectedresults: Chart showing correct data - - :CaseLevel: Integration """ - org = entities.Organization().create() - loc = entities.Location().create() - entities.Host(organization=org, location=loc).create() + org = target_sat.api.Organization().create() + loc = target_sat.api.Location().create() + target_sat.api.Host(organization=org, location=loc).create() with session: session.organization.select(org_name=org.name) session.location.select(loc_name=loc.name) @@ -129,13 +118,13 @@ def test_positive_host_configuration_chart(session): @pytest.mark.upgrade @pytest.mark.run_in_one_thread @pytest.mark.tier2 -def test_positive_task_status(session): +def test_positive_task_status(session, target_sat): """Check if the Task Status is working in the Dashboard UI and filter from Tasks index page is working correctly :id: fb667d6a-7255-4341-9f79-2f03d19e8e0f - :Steps: + :steps: 1. Navigate to Monitor -> Dashboard 2. Review the Latest Warning/Error Tasks widget @@ -148,13 +137,13 @@ def test_positive_task_status(session): from Tasks dashboard :BZ: 1718889 - - :CaseLevel: Integration """ url = 'www.non_existent_repo_url.org' - org = entities.Organization().create() - product = entities.Product(organization=org).create() - repo = entities.Repository(url=f'http://{url}', product=product, content_type='yum').create() + org = target_sat.api.Organization().create() + product = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository( + url=f'http://{url}', product=product, content_type='yum' + ).create() with pytest.raises(TaskFailedError): repo.sync() with session: @@ -186,30 +175,36 @@ def test_positive_task_status(session): @pytest.mark.upgrade +@pytest.mark.no_containers @pytest.mark.run_in_one_thread @pytest.mark.skip_if_not_set('clients') @pytest.mark.tier3 +@pytest.mark.rhel_ver_match('8') @pytest.mark.skipif((not settings.robottelo.repos_hosting_url), reason='Missing repos_hosting_url') @pytest.mark.parametrize( 'repos_collection', [ { - 'distro': 'rhel7', - 'SatelliteToolsRepository': {}, - 'YumRepository': {'url': settings.repos.yum_6.url}, - }, + 'distro': 'rhel8', + 'YumRepository': {'url': settings.repos.yum_3.url}, + } ], indirect=True, ) def test_positive_user_access_with_host_filter( - test_name, module_location, rhel7_contenthost, target_sat, repos_collection + test_name, + function_entitlement_manifest_org, + module_location, + rhel_contenthost, + target_sat, + repos_collection, ): """Check if user with necessary host permissions can access dashboard and required widgets are rendered with proper values :id: 24b4b371-cba0-4bc8-bc6a-294c62e0586d - :Steps: + :steps: 1. Specify proper filter with permission for your role 2. Create new user and assign role to it @@ -223,15 +218,13 @@ def test_positive_user_access_with_host_filter( :BZ: 1417114 :parametrized: yes - - :CaseLevel: System """ user_login = gen_string('alpha') user_password = gen_string('alphanumeric') - org = entities.Organization().create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = function_entitlement_manifest_org + lce = target_sat.api.LifecycleEnvironment(organization=org).create() # create a role with necessary permissions - role = entities.Role().create() + role = target_sat.api.Role().create() user_permissions = { 'Organization': ['view_organizations'], 'Location': ['view_locations'], @@ -240,7 +233,7 @@ def test_positive_user_access_with_host_filter( } target_sat.api_factory.create_role_permissions(role, user_permissions) # create a user and assign the above created role - entities.User( + target_sat.api.User( default_organization=org, organization=[org], default_location=module_location, @@ -249,35 +242,36 @@ def test_positive_user_access_with_host_filter( login=user_login, password=user_password, ).create() - with Session(test_name, user=user_login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user_login, password=user_password) as session: assert session.dashboard.read('HostConfigurationStatus')['total_count'] == 0 assert len(session.dashboard.read('LatestErrata')['erratas']) == 0 - repos_collection.setup_content(org.id, lce.id, upload_manifest=True) + rhel_contenthost.add_rex_key(target_sat) + repos_collection.setup_content(org.id, lce.id, upload_manifest=False) repos_collection.setup_virtual_machine( - rhel7_contenthost, location_title=module_location.name + rhel_contenthost, location_title=module_location.name, install_katello_agent=False ) - result = rhel7_contenthost.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + rhel_contenthost.run('subscription-manager repos --enable "*"') + result = rhel_contenthost.run(f'yum install -y {FAKE_7_CUSTOM_PACKAGE}') assert result.status == 0 - hostname = rhel7_contenthost.hostname + hostname = rhel_contenthost.hostname # Check UI for values assert session.host.search(hostname)[0]['Name'] == hostname hosts_values = session.dashboard.read('HostConfigurationStatus') assert hosts_values['total_count'] == 1 errata_values = session.dashboard.read('LatestErrata')['erratas'] - assert len(errata_values) == 1 - assert errata_values[0]['Type'] == 'security' - assert settings.repos.yum_6.errata[2] in errata_values[0]['Errata'] + assert len(errata_values) == 2 + assert 'security' in [e['Type'] for e in errata_values] + assert settings.repos.yum_3.errata[25] in [e['Errata'].split()[0] for e in errata_values] @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_overview_widget(session, module_org, module_product): - +def test_positive_sync_overview_widget(session, module_product, module_target_sat): """Check if the Sync Overview widget is working in the Dashboard UI :id: 553fbe33-0f6f-46fb-8d80-5d1d9ed483cf - :Steps: + :steps: 1. Sync some repositories 2. Navigate to Monitor -> Dashboard 3. Review the Sync Overview widget @@ -285,10 +279,10 @@ def test_positive_sync_overview_widget(session, module_org, module_product): :expectedresults: Correct data should appear in the widget :BZ: 1995424 - - :CaseLevel: Integration """ - repo = entities.Repository(url=settings.repos.yum_1.url, product=module_product).create() + repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=module_product + ).create() with session: session.repository.synchronize(module_product.name, repo.name) sync_params = session.dashboard.read('SyncOverview')['syncs'] diff --git a/tests/foreman/ui/test_discoveredhost.py b/tests/foreman/ui/test_discoveredhost.py index f694e6a86a2..64476188f89 100644 --- a/tests/foreman/ui/test_discoveredhost.py +++ b/tests/foreman/ui/test_discoveredhost.py @@ -8,78 +8,21 @@ :Team: Rocket -:TestType: Functional - -:CaseLevel: System - -:Upstream: No """ -import pytest -from fauxfactory import gen_ipaddr from fauxfactory import gen_string -from nailgun import entities +import pytest +from wait_for import wait_for from robottelo.utils import ssh pytestmark = [pytest.mark.run_in_one_thread] -@pytest.fixture(scope='module') -def discovery_org(module_org): - # Update default discovered host organization - discovery_org = entities.Setting().search(query={'search': 'name="discovery_organization"'})[0] - default_discovery_org = discovery_org.value - discovery_org.value = module_org.name - discovery_org.update(['value']) - yield module_org - discovery_org.value = default_discovery_org - discovery_org.update(['value']) - - -@pytest.fixture(scope='module') -def discovery_location(module_location): - # Update default discovered host location - discovery_loc = entities.Setting().search(query={'search': 'name="discovery_location"'})[0] - default_discovery_loc = discovery_loc.value - discovery_loc.value = module_location.name - discovery_loc.update(['value']) - yield module_location - discovery_loc.value = default_discovery_loc - discovery_loc.update(['value']) - - -@pytest.fixture(scope='module') -def provisioning_env(module_target_sat, discovery_org, discovery_location): - # Build PXE default template to get default PXE file - module_target_sat.cli.ProvisioningTemplate().build_pxe_default() - return module_target_sat.api_factory.configure_provisioning( - org=discovery_org, - loc=discovery_location, - os='Redhat {}'.format(module_target_sat.cli_factory.RHELRepository().repo_data['version']), - ) - - @pytest.fixture def discovered_host(target_sat): return target_sat.api_factory.create_discovered_host() -@pytest.fixture(scope='module') -def module_host_group(discovery_org, discovery_location): - host = entities.Host(organization=discovery_org, location=discovery_location) - host.create_missing() - return entities.HostGroup( - organization=[discovery_org], - location=[discovery_location], - medium=host.medium, - root_pass=gen_string('alpha'), - operatingsystem=host.operatingsystem, - ptable=host.ptable, - domain=host.domain, - architecture=host.architecture, - ).create() - - def _is_host_reachable(host, retries=12, iteration_sleep=5, expect_reachable=True): """Helper to ensure given IP/hostname is reachable or not. The return value returned depend from expect reachable value. @@ -97,57 +40,27 @@ def _is_host_reachable(host, retries=12, iteration_sleep=5, expect_reachable=Tru result = ssh.command(cmd.format(retries, host, operator, iteration_sleep)) if expect_reachable: return not result.status - else: - return bool(result.status) - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_pxe_based_discovery(session, provisioning_env): - """Discover a host via PXE boot by setting "proxy.type=proxy" in - PXE default - - :id: 43a8857d-2f08-436e-97fb-ffec6a0c84dd - - :Setup: Provisioning should be configured - - :Steps: PXE boot a host/VM - - :expectedresults: Host should be successfully discovered - - :BZ: 1731112 - - :CaseImportance: Critical - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_pxe_less_with_dhcp_unattended(session, provisioning_env): - """Discover a host with dhcp via bootable discovery ISO by setting - "proxy.type=proxy" in PXE default in unattended mode. - - :id: fc13167f-6fa0-4fe5-8584-7716292866ce - - :Setup: Provisioning should be configured - - :Steps: Boot a host/VM using modified discovery ISO. - - :expectedresults: Host should be successfully discovered - - :BZ: 1731112 - - :CaseImportance: Critical - """ + return bool(result.status) @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_provision_using_quick_host_button( - session, discovery_org, discovery_location, discovered_host, module_host_group +@pytest.mark.on_premises_provisioning +@pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.rhel_ver_match('9') +def test_positive_provision_pxe_host( + request, + session, + module_location, + module_org, + module_provisioning_rhel_content, + module_discovery_sat, + provisioning_host, + provisioning_hostgroup, + pxe_loader, ): - """Associate hostgroup while provisioning a discovered host from - host properties model window and select quick host. + """Provision a PXE-based discoveredhost :id: 34c1e9ea-f210-4a1e-aead-421eb962643b @@ -156,22 +69,40 @@ def test_positive_provision_using_quick_host_button( 1. Host should already be discovered 2. Hostgroup should already be created with all required entities. - :expectedresults: Host should be quickly provisioned and entry from + :expectedresults: Host should be provisioned and entry from discovered host should be auto removed. :BZ: 1728306, 1731112 :CaseImportance: High """ - discovered_host_name = discovered_host['name'] - domain_name = module_host_group.domain.read().name + sat = module_discovery_sat.sat + provisioning_host.power_control(ensure=False) + mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=1500, + delay=20, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + + discovered_host_name = discovered_host.name + domain_name = provisioning_hostgroup.domain.read().name host_name = f'{discovered_host_name}.{domain_name}' + + # Teardown + request.addfinalizer(lambda: sat.provisioning_cleanup(host_name)) + with session: session.discoveredhosts.provision( discovered_host_name, - module_host_group.name, - discovery_org.name, - discovery_location.name, + provisioning_hostgroup.name, + module_org.name, + module_location.name, ) values = session.host.get_details(host_name) assert values['properties']['properties_table']['Status'] == 'OK' @@ -180,7 +111,7 @@ def test_positive_provision_using_quick_host_button( @pytest.mark.tier3 def test_positive_update_name( - session, discovery_org, discovery_location, module_host_group, discovered_host + session, discovery_org, discovery_location, module_discovery_hostgroup, discovered_host ): """Update the discovered host name and provision it @@ -196,7 +127,7 @@ def test_positive_update_name( :CaseImportance: High """ discovered_host_name = discovered_host['name'] - domain_name = module_host_group.domain.read().name + domain_name = module_discovery_hostgroup.domain.read().name new_name = gen_string('alpha').lower() new_host_name = f'{new_name}.{domain_name}' with session: @@ -204,7 +135,7 @@ def test_positive_update_name( assert discovered_host_values['Name'] == discovered_host_name session.discoveredhosts.provision( discovered_host_name, - module_host_group.name, + module_discovery_hostgroup.name, discovery_org.name, discovery_location.name, quick=False, @@ -218,10 +149,22 @@ def test_positive_update_name( @pytest.mark.tier3 @pytest.mark.upgrade +@pytest.mark.on_premises_provisioning +@pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) +@pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) +@pytest.mark.rhel_ver_match('9') def test_positive_auto_provision_host_with_rule( - session, discovery_org, discovery_location, module_host_group, target_sat + request, + session, + module_org, + module_location, + module_provisioning_rhel_content, + module_discovery_sat, + pxeless_discovery_host, + provisioning_hostgroup, + pxe_loader, ): - """Create a new discovery rule and automatically create host from discovered host using that + """Create a new discovery rule and automatically provision host from discovered host using that discovery rule. Set query as (e.g IP=IP_of_discovered_host) @@ -230,29 +173,47 @@ def test_positive_auto_provision_host_with_rule( :Setup: Host should already be discovered - :expectedresults: Host should reboot and provision + :expectedresults: Host should be successfully provisioned :BZ: 1665471, 1731112 :CaseImportance: High """ - host_ip = gen_ipaddr() - discovered_host_name = target_sat.api_factory.create_discovered_host(ip_address=host_ip)['name'] - domain = module_host_group.domain.read() - discovery_rule = entities.DiscoveryRule( - max_count=1, - hostgroup=module_host_group, - search_=f'ip = {host_ip}', - location=[discovery_location], - organization=[discovery_org], + sat = module_discovery_sat.sat + pxeless_discovery_host.power_control(ensure=False) + mac = pxeless_discovery_host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=1500, + delay=20, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + + discovered_host_name = discovered_host.name + domain_name = provisioning_hostgroup.domain.read().name + host_name = f'{discovered_host_name}.{domain_name}' + + # Teardown + request.addfinalizer(lambda: sat.provisioning_cleanup(host_name)) + + discovery_rule = sat.api.DiscoveryRule( + max_count=10, + hostgroup=provisioning_hostgroup, + search_=f'name = {discovered_host_name}', + location=[module_location], + organization=[module_org], ).create() with session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.discoveredhosts.apply_action('Auto Provision', [discovered_host_name]) - host_name = f'{discovered_host_name}.{domain.name}' assert session.host.search(host_name)[0]['Name'] == host_name host_values = session.host.get_details(host_name) assert host_values['properties']['properties_table']['Status'] == 'OK' - assert host_values['properties']['properties_table']['IP Address'] == host_ip assert ( host_values['properties']['properties_table']['Comment'] == f"Auto-discovered and provisioned via rule '{discovery_rule.name}'" @@ -297,10 +258,10 @@ def test_positive_update_default_taxonomies(session, discovery_org, discovery_lo :CaseImportance: High """ host_names = [target_sat.api_factory.create_discovered_host()['name'] for _ in range(2)] - new_org = entities.Organization().create() + new_org = target_sat.api.Organization().create() discovery_location.organization.append(new_org) discovery_location.update(['organization']) - new_loc = entities.Location(organization=[discovery_org]).create() + new_loc = target_sat.api.Location(organization=[discovery_org]).create() with session: values = session.discoveredhosts.search('name = "{}" or name = "{}"'.format(*host_names)) assert set(host_names) == {value['Name'] for value in values} @@ -320,21 +281,3 @@ def test_positive_update_default_taxonomies(session, discovery_org, discovery_lo assert set(host_names) == {value['Name'] for value in values} values = session.dashboard.read('DiscoveredHosts') assert len(values['hosts']) == 2 - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_reboot(session, provisioning_env): - """Reboot a discovered host. - - :id: 5edc6831-bfc8-4e69-9029-b4c0caa3ee32 - - :Setup: Host should already be discovered - - :expectedresults: Discovered host without provision is going to shutdown after reboot command - is passed. - - :BZ: 1731112 - - :CaseImportance: Medium - """ diff --git a/tests/foreman/ui/test_discoveryrule.py b/tests/foreman/ui/test_discoveryrule.py index 1e7f21285d4..83ab23cbcc2 100644 --- a/tests/foreman/ui/test_discoveryrule.py +++ b/tests/foreman/ui/test_discoveryrule.py @@ -4,75 +4,51 @@ :CaseAutomation: Automated -:CaseLevel: System - :CaseComponent: DiscoveryPlugin :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_integer, gen_ipaddr, gen_string import pytest -from airgun.session import Session -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_string -from nailgun import entities - - -@pytest.fixture(scope='module') -def manager_loc(): - return entities.Location().create() - - -@pytest.fixture(scope='module') -def module_org(): - return entities.Organization().create() @pytest.fixture -def module_discovery_env(module_org, module_location): - discovery_loc = entities.Setting().search(query={'search': 'name="discovery_location"'})[0] - default_discovery_loc = discovery_loc.value - discovery_loc.value = module_location.name - discovery_loc.update(['value']) - discovery_org = entities.Setting().search(query={'search': 'name="discovery_organization"'})[0] - default_discovery_org = discovery_org.value - discovery_org.value = module_org.name - discovery_org.update(['value']) +def module_discovery_env(module_org, module_location, module_target_sat): + discovery_loc = module_target_sat.update_setting('discovery_location', module_location.name) + discovery_org = module_target_sat.update_setting('discovery_organization', module_org.name) yield - discovery_loc.value = default_discovery_loc - discovery_loc.update(['value']) - discovery_org.value = default_discovery_org - discovery_org.update(['value']) + module_target_sat.update_setting('discovery_location', discovery_loc) + module_target_sat.update_setting('discovery_organization', discovery_org) -@pytest.fixture -def manager_user(manager_loc, module_location, module_org): - manager_role = entities.Role().search(query={'search': 'name="Discovery Manager"'})[0] +@pytest.fixture(scope='module') +def manager_user(module_location, module_org, module_target_sat): + manager_role = module_target_sat.api.Role().search( + query={'search': 'name="Discovery Manager"'} + )[0] password = gen_string('alphanumeric') - manager_user = entities.User( + manager_user = module_target_sat.api.User( login=gen_string('alpha'), role=[manager_role], password=password, - location=[module_location, manager_loc], + location=[module_location], organization=[module_org], ).create() manager_user.password = password return manager_user -@pytest.fixture -def reader_user(module_location, module_org): +@pytest.fixture(scope='module') +def reader_user(module_location, module_org, module_target_sat): password = gen_string('alphanumeric') - reader_role = entities.Role().search(query={'search': 'name="Discovery Reader"'})[0] - reader_user = entities.User( + # Applying Discovery reader_role to the user + role = module_target_sat.api.Role().search(query={'search': 'name="Discovery Reader"'})[0] + reader_user = module_target_sat.api.User( login=gen_string('alpha'), - role=[reader_role], + role=[role], password=password, organization=[module_org], location=[module_location], @@ -87,86 +63,68 @@ def gen_int32(min_value=1): @pytest.mark.tier2 -def test_positive_create_rule_with_non_admin_user(manager_loc, manager_user, module_org, test_name): - """Create rule with non-admin user by associating discovery_manager role +def test_positive_crud_with_non_admin_user( + module_location, manager_user, module_org, module_target_sat +): + """CRUD with non-admin user by associating discovery_manager role :id: 6a03983b-363d-4646-b277-34af5f5abc55 - :expectedresults: Rule should be created successfully. - - :CaseLevel: Integration + :expectedresults: All crud operations should work with non_admin user. """ - name = gen_string('alpha') + rule_name = gen_string('alpha') search = gen_string('alpha') - hg = entities.HostGroup(organization=[module_org]).create() - with Session(test_name, user=manager_user.login, password=manager_user.password) as session: - session.location.select(loc_name=manager_loc.name) + priority = str(gen_integer(1, 20)) + new_rule_name = gen_string('alpha') + new_search = gen_string('alpha') + new_hg_name = gen_string('alpha') + new_priority = str(gen_integer(101, 200)) + hg = module_target_sat.api.HostGroup(organization=[module_org]).create() + new_hg_name = module_target_sat.api.HostGroup(organization=[module_org]).create() + with module_target_sat.ui_session( + user=manager_user.login, password=manager_user.password + ) as session: + session.location.select(loc_name=module_location.name) session.discoveryrule.create( { - 'primary.name': name, + 'primary.name': rule_name, 'primary.search': search, 'primary.host_group': hg.name, - 'primary.priority': gen_int32(), + 'primary.priority': priority, } ) - dr_val = session.discoveryrule.read(name, widget_names='primary') - assert dr_val['primary']['name'] == name - assert dr_val['primary']['host_group'] == hg.name - - -@pytest.mark.tier2 -def test_positive_delete_rule_with_non_admin_user(manager_loc, manager_user, module_org, test_name): - """Delete rule with non-admin user by associating discovery_manager role - - :id: 7fa56bab-82d7-46c9-a4fa-c44ef173c703 - - :expectedresults: Rule should be deleted successfully. - - :CaseLevel: Integration - """ - hg = entities.HostGroup(organization=[module_org]).create() - dr = entities.DiscoveryRule( - hostgroup=hg, organization=[module_org], location=[manager_loc] - ).create() - with Session(test_name, user=manager_user.login, password=manager_user.password) as session: - dr_val = session.discoveryrule.read_all() - assert dr.name in [rule['Name'] for rule in dr_val] - session.discoveryrule.delete(dr.name) - dr_val = session.discoveryrule.read_all() - assert dr.name not in [rule['Name'] for rule in dr_val] - - -@pytest.mark.tier2 -def test_positive_view_existing_rule_with_non_admin_user( - module_location, module_org, reader_user, test_name -): - """Existing rule should be viewed to non-admin user by associating - discovery_reader role. - - :id: 0f5b0221-43be-47bc-8619-749824c4e54f - - :Steps: - - 1. create a rule with admin user - 2. create a non-admin user and assign 'Discovery Reader' role - 3. Login with non-admin user - :expectedresults: Rule should be visible to non-admin user. + values = session.discoveryrule.read(rule_name, widget_names='primary') + assert values['primary']['name'] == rule_name + assert values['primary']['search'] == search + assert values['primary']['host_group'] == hg.name + assert values['primary']['priority'] == priority + session.discoveryrule.update( + rule_name, + { + 'primary.name': new_rule_name, + 'primary.search': new_search, + 'primary.host_group': new_hg_name.name, + 'primary.priority': new_priority, + }, + ) + values = session.discoveryrule.read( + new_rule_name, + widget_names='primary', + ) + assert values['primary']['name'] == new_rule_name + assert values['primary']['search'] == new_search + assert values['primary']['host_group'] == new_hg_name.name + assert values['primary']['priority'] == new_priority - :CaseLevel: Integration - """ - hg = entities.HostGroup(organization=[module_org]).create() - dr = entities.DiscoveryRule( - hostgroup=hg, organization=[module_org], location=[module_location] - ).create() - with Session(test_name, user=reader_user.login, password=reader_user.password) as session: + session.discoveryrule.delete(new_rule_name) dr_val = session.discoveryrule.read_all() - assert dr.name in [rule['Name'] for rule in dr_val] + assert new_rule_name not in [rule['Name'] for rule in dr_val] @pytest.mark.tier2 def test_negative_delete_rule_with_non_admin_user( - module_location, module_org, reader_user, test_name + module_location, module_org, module_target_sat, reader_user ): """Delete rule with non-admin user by associating discovery_reader role @@ -174,15 +132,24 @@ def test_negative_delete_rule_with_non_admin_user( :expectedresults: User should validation error and rule should not be deleted successfully. - - :CaseLevel: Integration """ - hg = entities.HostGroup(organization=[module_org]).create() - dr = entities.DiscoveryRule( - hostgroup=hg, organization=[module_org], location=[module_location] + hg_name = gen_string('alpha') + rule_name = gen_string('alpha') + search = gen_string('alpha') + hg = module_target_sat.api.HostGroup( + name=hg_name, organization=[module_org], location=[module_location] + ).create() + dr = module_target_sat.api.DiscoveryRule( + name=rule_name, + search_=search, + hostgroup=hg.id, + organization=[module_org], + location=[module_location], ).create() - with Session(test_name, user=reader_user.login, password=reader_user.password) as session: - with pytest.raises(ValueError): + with module_target_sat.ui_session( + user=reader_user.login, password=reader_user.password + ) as session: + with pytest.raises(ValueError): # noqa: PT011 - TODO Adarsh determine better exception session.discoveryrule.delete(dr.name) dr_val = session.discoveryrule.read_all() assert dr.name in [rule['Name'] for rule in dr_val] @@ -199,7 +166,7 @@ def test_positive_list_host_based_on_rule_search_query( :id: f7473fa2-7349-42d3-9cdb-f74b55d2f440 - :Steps: + :steps: 1. discovered host with cpu_count = 2 2. Define a rule 'rule1' with search query cpu_count = 2 @@ -218,8 +185,8 @@ def test_positive_list_host_based_on_rule_search_query( cpu_count = gen_integer(2, 10) rule_search = f'cpu_count = {cpu_count}' # any way create a host to be sure that this org has more than one host - host = entities.Host(organization=module_org, location=module_location).create() - host_group = entities.HostGroup( + host = target_sat.api.Host(organization=module_org, location=module_location).create() + host_group = target_sat.api.HostGroup( organization=[module_org], location=[module_location], medium=host.medium, @@ -229,7 +196,7 @@ def test_positive_list_host_based_on_rule_search_query( domain=host.domain, architecture=host.architecture, ).create() - discovery_rule = entities.DiscoveryRule( + discovery_rule = target_sat.api.DiscoveryRule( hostgroup=host_group, search_=rule_search, organization=[module_org], @@ -241,8 +208,10 @@ def test_positive_list_host_based_on_rule_search_query( ) # create an other discovered host with an other cpu count target_sat.api_factory.create_discovered_host(options={'physicalprocessorcount': cpu_count + 1}) - provisioned_host_name = '{}.{}'.format(discovered_host['name'], host.domain.read().name) + provisioned_host_name = f'{host.domain.read().name}' with session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) values = session.discoveryrule.read_all() assert discovery_rule.name in [rule['Name'] for rule in values] values = session.discoveryrule.read_discovered_hosts(discovery_rule.name) @@ -251,19 +220,20 @@ def test_positive_list_host_based_on_rule_search_query( assert values['table'][0]['IP Address'] == ip_address assert values['table'][0]['CPUs'] == str(cpu_count) # auto provision the discovered host - session.discoveredhosts.apply_action('Auto Provision', [discovered_host['name']]) - assert not session.discoveredhosts.search('name = "{}"'.format(discovered_host['name'])) + result = target_sat.api.DiscoveredHost(id=discovered_host['id']).auto_provision() + assert f'provisioned with rule {discovery_rule.name}' in result['message'] values = session.discoveryrule.read_associated_hosts(discovery_rule.name) + host_name = values['table'][0]['Name'] assert values['searchbox'] == f'discovery_rule = "{discovery_rule.name}"' assert len(values['table']) == 1 - assert values['table'][0]['Name'] == provisioned_host_name - values = session.host.get_details(provisioned_host_name) + assert provisioned_host_name in host_name + values = session.host.get_details(host_name) assert values['properties']['properties_table']['IP Address'] == ip_address @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_end_to_end(session, module_org, module_location): +def test_positive_end_to_end(session, module_org, module_location, module_target_sat): """Perform end to end testing for discovery rule component. :id: dd35e566-dc3a-43d3-939c-a33ae528740f @@ -273,23 +243,25 @@ def test_positive_end_to_end(session, module_org, module_location): :CaseImportance: Critical """ rule_name = gen_string('alpha') - search = f'cpu_count = {gen_integer(1, 5)}' + search = gen_string('alpha') hg_name = gen_string('alpha') hostname = gen_string('alpha') hosts_limit = str(gen_integer(0, 100)) priority = str(gen_integer(1, 100)) new_rule_name = gen_string('alpha') - new_search = f'cpu_count = {gen_integer(6, 10)}' + new_search = gen_string('alpha') new_hg_name = gen_string('alpha') new_hostname = gen_string('alpha') new_hosts_limit = str(gen_integer(101, 200)) new_priority = str(gen_integer(101, 200)) - entities.HostGroup(name=hg_name, organization=[module_org], location=[module_location]).create() - entities.HostGroup( + module_target_sat.api.HostGroup( + name=hg_name, organization=[module_org], location=[module_location] + ).create() + module_target_sat.api.HostGroup( name=new_hg_name, organization=[module_org], location=[module_location] ).create() - new_org = entities.Organization().create() - new_loc = entities.Location().create() + new_org = module_target_sat.api.Organization().create() + new_loc = module_target_sat.api.Location().create() with session: session.discoveryrule.create( { diff --git a/tests/foreman/ui/test_domain.py b/tests/foreman/ui/test_domain.py index 48e297f591a..09e4864f07f 100644 --- a/tests/foreman/ui/test_domain.py +++ b/tests/foreman/ui/test_domain.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.utils.datafactory import valid_domain_names @@ -40,8 +35,6 @@ def test_positive_set_parameter(session, valid_domain_name, param_value): :parametrized: yes :expectedresults: Domain parameter is created. - - :CaseLevel: Integration """ new_param = {'name': gen_string('alpha', 255), 'value': param_value} with session: @@ -62,8 +55,6 @@ def test_negative_set_parameter(session, valid_domain_name): :expectedresults: Domain parameter is not updated. Error is raised - :CaseLevel: Integration - :CaseImportance: Medium """ update_values = { @@ -85,8 +76,6 @@ def test_negative_set_parameter_same(session, valid_domain_name): :expectedresults: Domain parameter with same values is not created. - :CaseLevel: Integration - :CaseImportance: Medium """ param_name = gen_string('alpha') @@ -108,8 +97,6 @@ def test_positive_remove_parameter(session, valid_domain_name): :expectedresults: Domain parameter is removed - :CaseLevel: Integration - :CaseImportance: Medium """ param_name = gen_string('alpha') @@ -133,8 +120,6 @@ def test_positive_end_to_end(session, module_org, module_location, valid_domain_ :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ dns_domain_name = valid_domain_name diff --git a/tests/foreman/ui/test_eol_banner.py b/tests/foreman/ui/test_eol_banner.py new file mode 100644 index 00000000000..c19b19838e6 --- /dev/null +++ b/tests/foreman/ui/test_eol_banner.py @@ -0,0 +1,99 @@ +"""Test module for Dashboard UI + +:Requirement: Dashboard + +:CaseAutomation: Automated + +:CaseComponent: Dashboard + +:Team: Endeavour + +:CaseImportance: High + +""" +from datetime import datetime, timedelta + +from airgun.session import Session +from navmazing import NavigationTriesExceeded +import pytest + +from robottelo.utils.datafactory import gen_string + + +def set_eol_date(target_sat, eol_date): + target_sat.execute( + rf'''sed -i "/end_of_life/c\ 'end_of_life': '{eol_date}'" /usr/share/satellite/lifecycle-metadata.yml''' + ) + target_sat.restart_services() + + +@pytest.mark.upgrade +@pytest.mark.run_in_one_thread +@pytest.mark.tier2 +def test_positive_eol_banner_e2e(session, target_sat, test_name): + """Check if the EOL banner is displayed correctly + + :id: 0ce6c11c-d969-4e7e-a934-cd1683de62a3 + + :Steps: + + 1. Set the EOL date witin 6 months, assert warning banner + 2. Check non-admin users can't see warning banner + 3. Dismiss banner + 4. Move EOL date to the past, assert error banner + 5. Check non-admin users can't see error banner + 6. Dismiss banner + + :expectedresults: Banner shows up when it should + """ + # non-admin user + username = gen_string('alpha') + password = gen_string('alpha') + target_sat.api.User(login=username, password=password, mail='test@example.com').create() + # admin user + admin_username = gen_string('alpha') + admin_password = gen_string('alpha') + target_sat.api.User( + login=admin_username, password=admin_password, admin=True, mail='admin@example.com' + ).create() + + # eol in 3 months + eol_date = (datetime.now() + timedelta(days=90)).strftime("%Y-%m-%d") + message_date = (datetime.now() + timedelta(days=90)).strftime("%B %Y") + set_eol_date(target_sat, eol_date) + + # non-admin can't see banner + with Session(test_name, username, password) as newsession: + with pytest.raises(NavigationTriesExceeded) as error: + newsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # admin can see warning banner + with Session(test_name, admin_username, admin_password) as adminsession: + banner = adminsession.eol_banner.read() + assert message_date in banner["name"] + assert adminsession.eol_banner.is_warning() + adminsession.eol_banner.dismiss() + with pytest.raises(NavigationTriesExceeded) as error: + adminsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # past eol_date + eol_date = (datetime.now() - timedelta(days=5)).strftime("%Y-%m-%d") + set_eol_date(target_sat, eol_date) + + # non-admin can't see danger banner + with Session(test_name, username, password) as newsession: + with pytest.raises(NavigationTriesExceeded) as error: + newsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' + + # admin can see danger banner + with Session(test_name, admin_username, admin_password) as adminsession: + banner = adminsession.eol_banner.read() + assert eol_date in banner["name"] + assert adminsession.eol_banner.is_danger() + adminsession.eol_banner.dismiss() + with pytest.raises(NavigationTriesExceeded) as error: + adminsession.eol_banner.read() + assert error.typename == 'NavigationTriesExceeded' diff --git a/tests/foreman/ui/test_errata.py b/tests/foreman/ui/test_errata.py index 4747930357f..cbbd91d71b1 100644 --- a/tests/foreman/ui/test_errata.py +++ b/tests/foreman/ui/test_errata.py @@ -4,45 +4,40 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ErrataManagement :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from airgun.session import Session from broker import Broker from fauxfactory import gen_string -from nailgun import entities +from manifester import Manifester +import pytest from robottelo.config import settings -from robottelo.constants import DEFAULT_LOC -from robottelo.constants import FAKE_10_YUM_BUGFIX_ERRATUM -from robottelo.constants import FAKE_10_YUM_BUGFIX_ERRATUM_COUNT -from robottelo.constants import FAKE_11_YUM_ENHANCEMENT_ERRATUM -from robottelo.constants import FAKE_11_YUM_ENHANCEMENT_ERRATUM_COUNT -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_3_YUM_OUTDATED_PACKAGES -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE -from robottelo.constants import FAKE_5_CUSTOM_PACKAGE -from robottelo.constants import FAKE_9_YUM_OUTDATED_PACKAGES -from robottelo.constants import FAKE_9_YUM_SECURITY_ERRATUM -from robottelo.constants import FAKE_9_YUM_SECURITY_ERRATUM_COUNT -from robottelo.constants import PRDS -from robottelo.constants import REAL_0_RH_PACKAGE -from robottelo.constants import REAL_4_ERRATA_CVES -from robottelo.constants import REAL_4_ERRATA_ID +from robottelo.constants import ( + DEFAULT_LOC, + FAKE_1_CUSTOM_PACKAGE, + FAKE_2_CUSTOM_PACKAGE, + FAKE_3_YUM_OUTDATED_PACKAGES, + FAKE_4_CUSTOM_PACKAGE, + FAKE_5_CUSTOM_PACKAGE, + FAKE_9_YUM_OUTDATED_PACKAGES, + FAKE_9_YUM_SECURITY_ERRATUM, + FAKE_9_YUM_SECURITY_ERRATUM_COUNT, + FAKE_10_YUM_BUGFIX_ERRATUM, + FAKE_10_YUM_BUGFIX_ERRATUM_COUNT, + FAKE_11_YUM_ENHANCEMENT_ERRATUM, + FAKE_11_YUM_ENHANCEMENT_ERRATUM_COUNT, + PRDS, + REAL_0_RH_PACKAGE, + REAL_4_ERRATA_CVES, + REAL_4_ERRATA_ID, +) from robottelo.hosts import ContentHost - CUSTOM_REPO_URL = settings.repos.yum_9.url CUSTOM_REPO_ERRATA_ID = settings.repos.yum_9.errata[0] @@ -53,9 +48,9 @@ pytestmark = [pytest.mark.run_in_one_thread] -def _generate_errata_applicability(hostname): +def _generate_errata_applicability(hostname, module_target_sat): """Force host to generate errata applicability""" - host = entities.Host().search(query={'search': f'name={hostname}'})[0].read() + host = module_target_sat.api.Host().search(query={'search': f'name={hostname}'})[0].read() host.errata_applicability(synchronous=False) @@ -83,37 +78,55 @@ def _set_setting_value(setting_entity, value): setting_entity.update(['value']) -def _org(module_target_sat): - org = entities.Organization().create() - # adding remote_execution_connect_by_ip=Yes at org level - entities.Parameter( +@pytest.fixture(scope='module') +def module_manifest(): + with Manifester(manifest_category=settings.manifest.entitlement) as manifest: + yield manifest + + +@pytest.fixture +def function_manifest(): + with Manifester(manifest_category=settings.manifest.entitlement) as manifest: + yield manifest + + +@pytest.fixture(scope='module') +def module_org_with_parameter(module_target_sat, module_manifest): + org = module_target_sat.api.Organization(simple_content_access=False).create() + # org.sca_disable() + module_target_sat.api.Parameter( name='remote_execution_connect_by_ip', parameter_type='boolean', value='Yes', organization=org.id, ).create() - module_target_sat.upload_manifest(org.id) + module_target_sat.upload_manifest(org.id, module_manifest.content) return org -@pytest.fixture(scope='module') -def module_org(): - return _org() - - @pytest.fixture -def org(): - return _org() +def function_org_with_parameter(target_sat, function_manifest): + org = target_sat.api.Organization(simple_content_access=False).create() + target_sat.api.Parameter( + name='remote_execution_connect_by_ip', + parameter_type='boolean', + value='Yes', + organization=org.id, + ).create() + target_sat.upload_manifest(org.id, function_manifest.content) + return org @pytest.fixture(scope='module') -def module_lce(module_org): - return entities.LifecycleEnvironment(organization=module_org).create() +def module_lce(module_target_sat, module_org_with_parameter): + return module_target_sat.api.LifecycleEnvironment( + organization=module_org_with_parameter + ).create() @pytest.fixture -def lce(org): - return entities.LifecycleEnvironment(organization=org).create() +def lce(target_sat, function_org_with_parameter): + return target_sat.api.LifecycleEnvironment(organization=function_org_with_parameter).create() @pytest.fixture @@ -125,9 +138,9 @@ def erratatype_vm(module_repos_collection_with_setup, target_sat): @pytest.fixture -def errata_status_installable(): +def errata_status_installable(module_target_sat): """Fixture to allow restoring errata_status_installable setting after usage""" - errata_status_installable = entities.Setting().search( + errata_status_installable = module_target_sat.api.Setting().search( query={'search': 'name="errata_status_installable"'} )[0] original_value = errata_status_installable.value @@ -135,12 +148,12 @@ def errata_status_installable(): _set_setting_value(errata_status_installable, original_value) -@pytest.fixture(scope='function') +@pytest.fixture def vm(module_repos_collection_with_setup, rhel7_contenthost, target_sat): """Virtual machine registered in satellite""" module_repos_collection_with_setup.setup_virtual_machine(rhel7_contenthost) rhel7_contenthost.add_rex_key(satellite=target_sat) - yield rhel7_contenthost + return rhel7_contenthost @pytest.mark.e2e @@ -160,7 +173,12 @@ def vm(module_repos_collection_with_setup, rhel7_contenthost, target_sat): ) @pytest.mark.no_containers def test_end_to_end( - session, module_org, module_repos_collection_with_setup, vm, target_sat, setting_update + session, + module_org_with_parameter, + module_repos_collection_with_setup, + vm, + target_sat, + setting_update, ): """Create all entities required for errata, set up applicable host, read errata details and apply it to host @@ -175,8 +193,6 @@ def test_end_to_end( :BZ: 2029192 :customerscenario: true - - :CaseLevel: System """ ERRATA_DETAILS = { 'advisory': 'RHSA-2012:0055', @@ -240,14 +256,14 @@ def test_end_to_end( @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_content_host_errata_page_pagination(session, org, lce, target_sat): +def test_content_host_errata_page_pagination(session, function_org_with_parameter, lce, target_sat): """ # Test per-page pagination for BZ#1662254 # Test apply by REX using Select All for BZ#1846670 :id: 6363eda7-a162-4a4a-b70f-75decbd8202e - :Steps: + :steps: 1. Install more than 20 packages that need errata 2. View Content Host's Errata page 3. Assert total_pages > 1 @@ -269,10 +285,9 @@ def test_content_host_errata_page_pagination(session, org, lce, target_sat): :customerscenario: true :BZ: 1662254, 1846670 - - :CaseLevel: System """ + org = function_org_with_parameter pkgs = ' '.join(FAKE_3_YUM_OUTDATED_PACKAGES) repos_collection = target_sat.cli_factory.RepositoryCollection( distro='rhel7', @@ -281,7 +296,7 @@ def test_content_host_errata_page_pagination(session, org, lce, target_sat): target_sat.cli_factory.YumRepository(url=settings.repos.yum_3.url), ], ) - repos_collection.setup_content(org.id, lce.id, upload_manifest=True) + repos_collection.setup_content(org.id, lce.id) with Broker(nick=repos_collection.distro, host_class=ContentHost) as client: client.add_rex_key(satellite=target_sat) # Add repo and install packages that need errata @@ -326,23 +341,22 @@ def test_content_host_errata_page_pagination(session, org, lce, target_sat): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_list(session, org, lce, target_sat): +def test_positive_list(session, function_org_with_parameter, lce, target_sat): """View all errata in an Org :id: 71c7a054-a644-4c1e-b304-6bc34ea143f4 :Setup: Errata synced on satellite server. - :Steps: Create two Orgs each having a product synced which contains errata. + :steps: Create two Orgs each having a product synced which contains errata. :expectedresults: Check that the errata belonging to one Org is not showing in the other. :BZ: 1659941, 1837767 :customerscenario: true - - :CaseLevel: Integration """ + org = function_org_with_parameter rc = target_sat.cli_factory.RepositoryCollection( repositories=[target_sat.cli_factory.YumRepository(settings.repos.yum_3.url)] ) @@ -373,7 +387,9 @@ def test_positive_list(session, org, lce, target_sat): ], indirect=True, ) -def test_positive_list_permission(test_name, module_org, module_repos_collection_with_setup): +def test_positive_list_permission( + test_name, module_org_with_parameter, module_repos_collection_with_setup, module_target_sat +): """Show errata only if the User has permissions to view them :id: cdb28f6a-23df-47a2-88ab-cd3b492126b2 @@ -384,30 +400,31 @@ def test_positive_list_permission(test_name, module_org, module_repos_collection 2. Make sure that they both have errata. 3. Create a user with view access on one product and not on the other. - :Steps: Go to Content -> Errata. + :steps: Go to Content -> Errata. :expectedresults: Check that the new user is able to see errata for one product only. - - :CaseLevel: Integration """ - role = entities.Role().create() - entities.Filter( + module_org = module_org_with_parameter + role = module_target_sat.api.Role().create() + module_target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=module_target_sat.api.Permission().search( query={'search': 'resource_type="Katello::Product"'} ), role=role, search='name = "{}"'.format(PRDS['rhel']), ).create() user_password = gen_string('alphanumeric') - user = entities.User( + user = module_target_sat.api.User( default_organization=module_org, organization=[module_org], role=[role], password=user_password, ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with module_target_sat.ui_session( + test_name, user=user.login, password=user_password + ) as session: assert ( session.errata.search(RHVA_ERRATA_ID, applicable=False)[0]['Errata ID'] == RHVA_ERRATA_ID @@ -430,7 +447,7 @@ def test_positive_list_permission(test_name, module_org, module_repos_collection indirect=True, ) def test_positive_apply_for_all_hosts( - session, module_org, module_repos_collection_with_setup, target_sat + session, module_org_with_parameter, module_repos_collection_with_setup, target_sat ): """Apply an erratum for all content hosts @@ -440,15 +457,13 @@ def test_positive_apply_for_all_hosts( :customerscenario: true - :Steps: + :steps: 1. Go to Content -> Errata. Select an erratum -> Content Hosts tab. 2. Select all Content Hosts and apply the erratum. :expectedresults: Check that the erratum is applied in all the content hosts. - - :CaseLevel: System """ with Broker( nick=module_repos_collection_with_setup.distro, host_class=ContentHost, _count=2 @@ -494,14 +509,12 @@ def test_positive_view_cve(session, module_repos_collection_with_setup): :Setup: Errata synced on satellite server. - :Steps: Go to Content -> Errata. Select an Errata. + :steps: Go to Content -> Errata. Select an Errata. :expectedresults: 1. Check if the CVE information is shown in Errata Details page. 2. Check if 'N/A' is displayed if CVE information is not present. - - :CaseLevel: Integration """ with session: errata_values = session.errata.read(RHVA_ERRATA_ID) @@ -528,7 +541,7 @@ def test_positive_view_cve(session, module_repos_collection_with_setup): indirect=True, ) def test_positive_filter_by_environment( - session, module_org, module_repos_collection_with_setup, target_sat + session, module_org_with_parameter, module_repos_collection_with_setup, target_sat ): """Filter Content hosts by environment @@ -540,13 +553,12 @@ def test_positive_filter_by_environment( :Setup: Errata synced on satellite server. - :Steps: Go to Content -> Errata. Select an Errata -> Content Hosts tab + :steps: Go to Content -> Errata. Select an Errata -> Content Hosts tab -> Filter content hosts by Environment. :expectedresults: Content hosts can be filtered by Environment. - - :CaseLevel: System """ + module_org = module_org_with_parameter with Broker( nick=module_repos_collection_with_setup.distro, host_class=ContentHost, _count=2 ) as clients: @@ -556,14 +568,16 @@ def test_positive_filter_by_environment( ) assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) # Promote the latest content view version to a new lifecycle environment - content_view = entities.ContentView( + content_view = target_sat.api.ContentView( id=module_repos_collection_with_setup.setup_content_data['content_view']['id'] ).read() content_view_version = content_view.version[-1].read() lce = content_view_version.environment[-1].read() - new_lce = entities.LifecycleEnvironment(organization=module_org, prior=lce).create() + new_lce = target_sat.api.LifecycleEnvironment(organization=module_org, prior=lce).create() content_view_version.promote(data={'environment_ids': new_lce.id}) - host = entities.Host().search(query={'search': f'name={clients[0].hostname}'})[0].read() + host = ( + target_sat.api.Host().search(query={'search': f'name={clients[0].hostname}'})[0].read() + ) host.content_facet_attributes = { 'content_view_id': content_view.id, 'lifecycle_environment_id': new_lce.id, @@ -604,7 +618,7 @@ def test_positive_filter_by_environment( indirect=True, ) def test_positive_content_host_previous_env( - session, module_org, module_repos_collection_with_setup, vm + session, module_org_with_parameter, module_repos_collection_with_setup, vm, module_target_sat ): """Check if the applicable errata are available from the content host's previous environment @@ -616,26 +630,27 @@ def test_positive_content_host_previous_env( 1. Make sure multiple environments are present. 2. Content host's previous environments have additional errata. - :Steps: Go to Content Hosts -> Select content host -> Errata Tab -> + :steps: Go to Content Hosts -> Select content host -> Errata Tab -> Select Previous environments. :expectedresults: The errata from previous environments are displayed. :parametrized: yes - - :CaseLevel: System """ + module_org = module_org_with_parameter hostname = vm.hostname assert _install_client_package(vm, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) # Promote the latest content view version to a new lifecycle environment - content_view = entities.ContentView( + content_view = module_target_sat.api.ContentView( id=module_repos_collection_with_setup.setup_content_data['content_view']['id'] ).read() content_view_version = content_view.version[-1].read() lce = content_view_version.environment[-1].read() - new_lce = entities.LifecycleEnvironment(organization=module_org, prior=lce).create() + new_lce = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce + ).create() content_view_version.promote(data={'environment_ids': new_lce.id}) - host = entities.Host().search(query={'search': f'name={hostname}'})[0].read() + host = module_target_sat.api.Host().search(query={'search': f'name={hostname}'})[0].read() host.content_facet_attributes = { 'content_view_id': content_view.id, 'lifecycle_environment_id': new_lce.id, @@ -663,7 +678,7 @@ def test_positive_content_host_previous_env( ], indirect=True, ) -def test_positive_content_host_library(session, module_org, vm): +def test_positive_content_host_library(session, module_org_with_parameter, vm): """Check if the applicable errata are available from the content host's Library @@ -674,13 +689,11 @@ def test_positive_content_host_library(session, module_org, vm): 1. Make sure multiple environments are present. 2. Content host's Library environment has additional errata. - :Steps: Go to Content Hosts -> Select content host -> Errata Tab -> Select 'Library'. + :steps: Go to Content Hosts -> Select content host -> Errata Tab -> Select 'Library'. :expectedresults: The errata from Library are displayed. :parametrized: yes - - :CaseLevel: System """ hostname = vm.hostname assert _install_client_package(vm, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) @@ -714,14 +727,12 @@ def test_positive_content_host_search_type(session, erratatype_vm): :customerscenario: true - :Steps: Search for errata on content host by type (e.g. 'type = security') + :steps: Search for errata on content host by type (e.g. 'type = security') Step 1 Search for "type = security", assert expected amount and IDs found Step 2 Search for "type = bugfix", assert expected amount and IDs found Step 3 Search for "type = enhancement", assert expected amount and IDs found :BZ: 1653293 - - :CaseLevel: Integration """ pkgs = ' '.join(FAKE_9_YUM_OUTDATED_PACKAGES) @@ -777,7 +788,9 @@ def test_positive_content_host_search_type(session, erratatype_vm): ], indirect=True, ) -def test_positive_show_count_on_content_host_page(session, module_org, erratatype_vm): +def test_positive_show_count_on_content_host_page( + session, module_org_with_parameter, erratatype_vm +): """Available errata count displayed in Content hosts page :id: 8575e282-d56e-41dc-80dd-f5f6224417cb @@ -787,15 +800,13 @@ def test_positive_show_count_on_content_host_page(session, module_org, erratatyp 1. Errata synced on satellite server. 2. Some content hosts are present. - :Steps: Go to Hosts -> Content Hosts. + :steps: Go to Hosts -> Content Hosts. :expectedresults: The available errata count is displayed. :BZ: 1484044, 1775427 :customerscenario: true - - :CaseLevel: System """ vm = erratatype_vm hostname = vm.hostname @@ -833,7 +844,9 @@ def test_positive_show_count_on_content_host_page(session, module_org, erratatyp ], indirect=True, ) -def test_positive_show_count_on_content_host_details_page(session, module_org, erratatype_vm): +def test_positive_show_count_on_content_host_details_page( + session, module_org_with_parameter, erratatype_vm +): """Errata count on Content host Details page :id: 388229da-2b0b-41aa-a457-9b5ecbf3df4b @@ -843,13 +856,11 @@ def test_positive_show_count_on_content_host_details_page(session, module_org, e 1. Errata synced on satellite server. 2. Some content hosts are present. - :Steps: Go to Hosts -> Content Hosts -> Select Content Host -> Details page. + :steps: Go to Hosts -> Content Hosts -> Select Content Host -> Details page. :expectedresults: The errata section should be displayed with Security, Bug fix, Enhancement. :BZ: 1484044 - - :CaseLevel: System """ vm = erratatype_vm hostname = vm.hostname @@ -876,7 +887,11 @@ def test_positive_show_count_on_content_host_details_page(session, module_org, e @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') @pytest.mark.parametrize('setting_update', ['errata_status_installable'], indirect=True) def test_positive_filtered_errata_status_installable_param( - session, errata_status_installable, target_sat, setting_update + session, + function_entitlement_manifest_org, + errata_status_installable, + target_sat, + setting_update, ): """Filter errata for specific content view and verify that host that was registered using that content view has different states in @@ -885,7 +900,7 @@ def test_positive_filtered_errata_status_installable_param( :id: ed94cf34-b8b9-4411-8edc-5e210ea6af4f - :Steps: + :steps: 1. Prepare setup: Create Lifecycle Environment, Content View, Activation Key and all necessary repos @@ -901,11 +916,9 @@ def test_positive_filtered_errata_status_installable_param( :BZ: 1368254, 2013093 :CaseImportance: Medium - - :CaseLevel: System """ - org = entities.Organization().create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = function_entitlement_manifest_org + lce = target_sat.api.LifecycleEnvironment(organization=org).create() repos_collection = target_sat.cli_factory.RepositoryCollection( distro='rhel7', repositories=[ @@ -916,22 +929,22 @@ def test_positive_filtered_errata_status_installable_param( target_sat.cli_factory.YumRepository(url=CUSTOM_REPO_URL), ], ) - repos_collection.setup_content(org.id, lce.id, upload_manifest=True) + repos_collection.setup_content(org.id, lce.id) with Broker(nick=repos_collection.distro, host_class=ContentHost) as client: repos_collection.setup_virtual_machine(client) assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) # Adding content view filter and content view filter rule to exclude errata for the # installed package. - content_view = entities.ContentView( + content_view = target_sat.api.ContentView( id=repos_collection.setup_content_data['content_view']['id'] ).read() - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=False ).create() - errata = entities.Errata(content_view_version=content_view.version[-1]).search( + errata = target_sat.api.Errata(content_view_version=content_view.version[-1]).search( query=dict(search=f'errata_id="{CUSTOM_REPO_ERRATA_ID}"') )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view = content_view.read() content_view_version = content_view.version[-1] @@ -990,7 +1003,7 @@ def test_positive_filtered_errata_status_installable_param( indirect=True, ) def test_content_host_errata_search_commands( - session, module_org, module_repos_collection_with_setup, target_sat + session, module_org_with_parameter, module_repos_collection_with_setup, target_sat ): """View a list of affected content hosts for security (RHSA) and bugfix (RHBA) errata, filtered with errata status and applicable flags. Applicability is calculated using the @@ -1003,7 +1016,7 @@ def test_content_host_errata_search_commands( :customerscenario: true - :Steps: + :steps: 1. host list --search "errata_status = security_needed" 2. host list --search "errata_status = errata_needed" 3. host list --search "applicable_errata = RHSA-2012:0055" diff --git a/tests/foreman/ui/test_hardwaremodel.py b/tests/foreman/ui/test_hardwaremodel.py index 5d7d8d2f02a..38b42de6aac 100644 --- a/tests/foreman/ui/test_hardwaremodel.py +++ b/tests/foreman/ui/test_hardwaremodel.py @@ -4,34 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_end_to_end(session, host_ui_options): - """Perform end to end testing for hardware model component :id: 93663cc9-7c8f-4f43-8050-444be1313bed :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Medium :BZ:1758260 diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index bad6f2f5307..7ccf497c927 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -4,47 +4,38 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import copy import csv import os import re -from datetime import datetime +from airgun.exceptions import DisabledWidgetError, NoSuchElementException import pytest -import yaml -from airgun.exceptions import DisabledWidgetError -from airgun.exceptions import NoSuchElementException -from airgun.session import Session from wait_for import wait_for +import yaml -from robottelo import constants from robottelo.config import settings -from robottelo.constants import ANY_CONTEXT -from robottelo.constants import DEFAULT_CV -from robottelo.constants import DEFAULT_LOC -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import ENVIRONMENT -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_7_CUSTOM_PACKAGE -from robottelo.constants import FAKE_8_CUSTOM_PACKAGE -from robottelo.constants import FAKE_8_CUSTOM_PACKAGE_NAME -from robottelo.constants import FOREMAN_PROVIDERS -from robottelo.constants import OSCAP_PERIOD -from robottelo.constants import OSCAP_WEEKDAY -from robottelo.constants import PERMISSIONS -from robottelo.constants import REPO_TYPE +from robottelo.constants import ( + ANY_CONTEXT, + DEFAULT_CV, + DEFAULT_LOC, + ENVIRONMENT, + FAKE_7_CUSTOM_PACKAGE, + FAKE_8_CUSTOM_PACKAGE, + FAKE_8_CUSTOM_PACKAGE_NAME, + OSCAP_PERIOD, + OSCAP_WEEKDAY, + PERMISSIONS, + REPO_TYPE, +) +from robottelo.constants.repos import CUSTOM_FILE_REPO from robottelo.utils.datafactory import gen_string from robottelo.utils.issue_handlers import is_open @@ -64,12 +55,12 @@ def ui_user(ui_user, smart_proxy_location, module_target_sat): id=ui_user.id, default_location=smart_proxy_location, ).update(['default_location']) - yield ui_user + return ui_user @pytest.fixture def scap_policy(scap_content, target_sat): - scap_policy = target_sat.cli_factory.make_scap_policy( + return target_sat.cli_factory.make_scap_policy( { 'name': gen_string('alpha'), 'deploy-by': 'ansible', @@ -79,7 +70,6 @@ def scap_policy(scap_content, target_sat): 'weekday': OSCAP_WEEKDAY['friday'].lower(), } ) - return scap_policy @pytest.fixture(scope='module') @@ -97,168 +87,6 @@ def module_global_params(module_target_sat): global_parameter.delete() -@pytest.fixture(scope='module') -def module_libvirt_resource(module_org, smart_proxy_location, module_target_sat): - # Search if Libvirt compute-resource already exists - # If so, just update its relevant fields otherwise, - # Create new compute-resource with 'libvirt' provider. - resource_url = f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system' - comp_res = [ - res - for res in module_target_sat.api.LibvirtComputeResource().search() - if res.provider == FOREMAN_PROVIDERS['libvirt'] and res.url == resource_url - ] - if len(comp_res) > 0: - computeresource = module_target_sat.api.LibvirtComputeResource(id=comp_res[0].id).read() - computeresource.location.append(smart_proxy_location) - computeresource.organization.append(module_org) - computeresource = computeresource.update(['location', 'organization']) - else: - # Create Libvirt compute-resource - computeresource = module_target_sat.api.LibvirtComputeResource( - provider=FOREMAN_PROVIDERS['libvirt'], - url=resource_url, - set_console_password=False, - display_type='VNC', - location=[smart_proxy_location], - organization=[module_org], - ).create() - return f'{computeresource.name} (Libvirt)' - - -@pytest.fixture(scope='module') -def module_libvirt_domain(module_org, smart_proxy_location, default_domain): - default_domain.location.append(smart_proxy_location) - default_domain.organization.append(module_org) - default_domain.update(['location', 'organization']) - return default_domain - - -@pytest.fixture(scope='module') -def module_libvirt_subnet( - module_org, smart_proxy_location, module_libvirt_domain, default_smart_proxy, module_target_sat -): - # Search if subnet is defined with given network. - # If so, just update its relevant fields otherwise, - # Create new subnet - network = settings.vlan_networking.subnet - subnet = module_target_sat.api.Subnet().search(query={'search': f'network={network}'}) - if len(subnet) > 0: - subnet = subnet[0].read() - subnet.domain.append(module_libvirt_domain) - subnet.location.append(smart_proxy_location) - subnet.organization.append(module_org) - subnet.dns = default_smart_proxy - subnet.dhcp = default_smart_proxy - subnet.ipam = 'DHCP' - subnet.tftp = default_smart_proxy - subnet.discovery = default_smart_proxy - subnet = subnet.update( - ['domain', 'discovery', 'dhcp', 'dns', 'ipam', 'location', 'organization', 'tftp'] - ) - else: - # Create new subnet - subnet = module_target_sat.api.Subnet( - network=network, - mask=settings.vlan_networking.netmask, - location=[smart_proxy_location], - organization=[module_org], - domain=[module_libvirt_domain], - ipam='DHCP', - dns=default_smart_proxy, - dhcp=default_smart_proxy, - tftp=default_smart_proxy, - discovery=default_smart_proxy, - ).create() - return subnet - - -@pytest.fixture(scope='module') -def module_libvirt_media(module_org, smart_proxy_location, os_path, default_os, module_target_sat): - media = module_target_sat.api.Media().search(query={'search': f'path="{os_path}"'}) - if len(media) > 0: - # Media with this path already exist, make sure it is correct - media = media[0].read() - media.organization.append(module_org) - media.location.append(smart_proxy_location) - media.operatingsystem.append(default_os) - media.os_family = 'Redhat' - media = media.update(['organization', 'location', 'operatingsystem', 'os_family']) - else: - # Create new media - media = module_target_sat.api.Media( - organization=[module_org], - location=[smart_proxy_location], - operatingsystem=[default_os], - path_=os_path, - os_family='Redhat', - ).create() - return media - - -@pytest.fixture(scope='module') -def module_libvirt_hostgroup( - module_org, - smart_proxy_location, - default_partition_table, - default_architecture, - default_os, - module_libvirt_media, - module_libvirt_subnet, - default_smart_proxy, - module_libvirt_domain, - module_lce, - module_cv_repo, - module_target_sat, -): - return module_target_sat.api.HostGroup( - architecture=default_architecture, - domain=module_libvirt_domain, - subnet=module_libvirt_subnet, - lifecycle_environment=module_lce, - content_view=module_cv_repo, - location=[smart_proxy_location], - operatingsystem=default_os, - organization=[module_org], - ptable=default_partition_table, - medium=module_libvirt_media, - ).create() - - -@pytest.fixture(scope='module') -def module_activation_key(module_entitlement_manifest_org, module_target_sat): - """Create activation key using default CV and library environment.""" - activation_key = module_target_sat.api.ActivationKey( - auto_attach=True, - content_view=module_entitlement_manifest_org.default_content_view.id, - environment=module_entitlement_manifest_org.library.id, - organization=module_entitlement_manifest_org, - ).create() - - # Find the 'Red Hat Employee Subscription' and attach it to the activation key. - for subs in module_target_sat.api.Subscription( - organization=module_entitlement_manifest_org - ).search(): - if subs.name == DEFAULT_SUBSCRIPTION_NAME: - # 'quantity' must be 1, not subscription['quantity']. Greater - # values produce this error: 'RuntimeError: Error: Only pools - # with multi-entitlement product subscriptions can be added to - # the activation key with a quantity greater than one.' - activation_key.add_subscriptions(data={'quantity': 1, 'subscription_id': subs.id}) - break - return activation_key - - -@pytest.fixture(scope='function') -def remove_vm_on_delete(target_sat, setting_update): - setting_update.value = 'true' - setting_update.update({'value'}) - assert ( - target_sat.api.Setting().search(query={'search': 'name=destroy_vm_on_host_delete'})[0].value - ) - yield - - @pytest.fixture def tracer_install_host(rex_contenthost, target_sat): """Sets up a contenthost with katello-host-tools-tracer enabled, @@ -271,7 +99,21 @@ def tracer_install_host(rex_contenthost, target_sat): rex_contenthost.create_custom_repos( **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} ) - yield rex_contenthost + return rex_contenthost + + +@pytest.fixture +def new_host_ui(target_sat): + """Changes the setting to use the New All Host UI + then returns it back to the normal value""" + all_hosts_setting = target_sat.api.Setting().search( + query={'search': f'name={"new_hosts_page"}'} + )[0] + all_hosts_setting.value = 'True' + all_hosts_setting.update({'value'}) + yield + all_hosts_setting.value = 'False' + all_hosts_setting.update({'value'}) @pytest.mark.e2e @@ -286,8 +128,6 @@ def test_positive_end_to_end(session, module_global_params, target_sat, host_ui_ and deleted. :BZ: 1419161 - - :CaseLevel: System """ api_values, host_name = host_ui_options global_params = [ @@ -348,8 +188,6 @@ def test_positive_read_from_details_page(session, module_host_template): :id: ffba5d40-918c-440e-afbb-6b910db3a8fb :expectedresults: Host is created and has expected content - - :CaseLevel: System """ template = module_host_template @@ -382,8 +220,6 @@ def test_positive_read_from_edit_page(session, host_ui_options): :id: 758fcab3-b363-4bfc-8f5d-173098a7e72d :expectedresults: Host is created and has expected content - - :CaseLevel: System """ api_values, host_name = host_ui_options with session: @@ -431,8 +267,6 @@ def test_positive_assign_taxonomies( :expectedresults: Host Assign Organization and Location actions are working as expected. - - :CaseLevel: Integration """ host = target_sat.api.Host(organization=module_org, location=smart_proxy_location).create() with session: @@ -494,8 +328,6 @@ def test_positive_assign_compliance_policy(session, scap_policy, target_sat, fun expected. :BZ: 1862135 - - :CaseLevel: Integration """ org = function_host.organization.read() loc = function_host.location.read() @@ -547,8 +379,6 @@ def test_positive_export(session, target_sat, function_org, function_location): :id: ffc512ad-982e-4b60-970a-41e940ebc74c :expectedresults: csv file contains same values as on web UI - - :CaseLevel: System """ hosts = [ target_sat.api.Host(organization=function_org, location=function_location).create() @@ -618,9 +448,6 @@ def test_negative_delete_primary_interface(session, host_ui_options): :BZ: 1417119 :expectedresults: Interface was not deleted - - - :CaseLevel: System """ values, host_name = host_ui_options interface_id = values['interfaces.interface.device_identifier'] @@ -647,8 +474,6 @@ def test_positive_view_hosts_with_non_admin_user( :expectedresults: user with only view_hosts, edit_hosts and view_organization permissions is able to read content hosts and hosts - - :CaseLevel: System """ user_password = gen_string('alpha') role = target_sat.api.Role(organization=[module_org]).create() @@ -667,7 +492,7 @@ def test_positive_view_hosts_with_non_admin_user( created_host = target_sat.api.Host( location=smart_proxy_location, organization=module_org ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: host = session.host.get_details(created_host.name, widget_names='breadcrumb') assert host['breadcrumb'] == created_host.name content_host = session.contenthost.read(created_host.name, widget_names='breadcrumb') @@ -686,8 +511,6 @@ def test_positive_remove_parameter_non_admin_user( :expectedresults: user with sufficient permissions may remove host parameter - - :CaseLevel: System """ user_password = gen_string('alpha') parameter = {'name': gen_string('alpha'), 'value': gen_string('alpha')} @@ -718,7 +541,7 @@ def test_positive_remove_parameter_non_admin_user( organization=module_org, host_parameters_attributes=[parameter], ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: values = session.host.read(host.name, 'parameters') assert values['parameters']['host_params'][0] == parameter session.host.update(host.name, {'parameters.host_params': []}) @@ -742,8 +565,6 @@ def test_negative_remove_parameter_non_admin_user( :expectedresults: user with insufficient permissions is unable to remove host parameter, 'Remove' link is not visible for him - - :CaseLevel: System """ user_password = gen_string('alpha') @@ -775,7 +596,7 @@ def test_negative_remove_parameter_non_admin_user( organization=module_org, host_parameters_attributes=[parameter], ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: values = session.host.read(host.name, 'parameters') assert values['parameters']['host_params'][0] == parameter with pytest.raises(NoSuchElementException) as context: @@ -798,8 +619,6 @@ def test_positive_check_permissions_affect_create_procedure( entities for create host procedure that he has access to :BZ: 1293716 - - :CaseLevel: System """ # Create two lifecycle environments lc_env = target_sat.api.LifecycleEnvironment(organization=function_org).create() @@ -886,11 +705,11 @@ def test_positive_check_permissions_affect_create_procedure( 'other_fields_values': {'host.lce': filter_lc_env.name}, }, ] - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: for host_field in host_fields: + values = {host_field['name']: host_field['unexpected_value']} + values.update(host_field.get('other_fields_values', {})) with pytest.raises(NoSuchElementException) as context: - values = {host_field['name']: host_field['unexpected_value']} - values.update(host_field.get('other_fields_values', {})) session.host.helper.read_create_view(values) error_message = str(context.value) assert host_field['unexpected_value'] in error_message @@ -914,8 +733,6 @@ def test_positive_search_by_parameter(session, module_org, smart_proxy_location, :expectedresults: Only one specific host is returned by search :BZ: 1725686 - - :CaseLevel: Integration """ param_name = gen_string('alpha') param_value = gen_string('alpha') @@ -949,8 +766,6 @@ def test_positive_search_by_parameter_with_different_values( :expectedresults: Only one specific host is returned by search :BZ: 1725686 - - :CaseLevel: Integration """ param_name = gen_string('alpha') param_values = [gen_string('alpha'), gen_string('alphanumeric')] @@ -967,7 +782,7 @@ def test_positive_search_by_parameter_with_different_values( for host in hosts: assert session.host.search(host.name)[0]['Name'] == host.name # Check that search by parameter returns only one host in the list - for param_value, host in zip(param_values, hosts): + for param_value, host in zip(param_values, hosts, strict=True): values = session.host.search(f'params.{param_name} = {param_value}') assert len(values) == 1 assert values[0]['Name'] == host.name @@ -985,8 +800,6 @@ def test_positive_search_by_parameter_with_prefix( :expectedresults: All assigned hosts to organization are returned by search - - :CaseLevel: Integration """ param_name = gen_string('alpha') param_value = gen_string('alpha') @@ -1024,8 +837,6 @@ def test_positive_search_by_parameter_with_operator( search :BZ: 1463806 - - :CaseLevel: Integration """ param_name = gen_string('alpha') param_value = gen_string('alpha') @@ -1066,8 +877,6 @@ def test_positive_search_with_org_and_loc_context( :BZ: 1405496 :customerscenario: true - - :CaseLevel: Integration """ host = target_sat.api.Host(organization=function_org, location=function_location).create() with session: @@ -1091,8 +900,6 @@ def test_positive_search_by_org(session, smart_proxy_location, target_sat): result is returned :BZ: 1447958 - - :CaseLevel: Integration """ host = target_sat.api.Host(location=smart_proxy_location).create() org = host.organization.read() @@ -1113,8 +920,6 @@ def test_positive_validate_inherited_cv_lce_ansiblerole(session, target_sat, mod :expectedresults: Host's lifecycle environment, content view and ansible role match the ones specified in hostgroup. - :CaseLevel: Integration - :customerscenario: true :BZ: 1391656, 2094912 @@ -1138,7 +943,7 @@ def test_positive_validate_inherited_cv_lce_ansiblerole(session, target_sat, mod target_sat.cli.Ansible.roles_sync( {'role-names': SELECTED_ROLE, 'proxy-id': target_sat.nailgun_smart_proxy.id} ) - hostgroup = target_sat.cli_factory.make_hostgroup( + hostgroup = target_sat.cli_factory.hostgroup( { 'content-view-id': cv.id, 'lifecycle-environment-id': lce.id, @@ -1176,580 +981,6 @@ def test_positive_validate_inherited_cv_lce_ansiblerole(session, target_sat, mod assert host.name in [host.name for host in matching_hosts] -@pytest.mark.tier2 -def test_positive_global_registration_form( - session, module_activation_key, module_org, smart_proxy_location, default_os, target_sat -): - """Host registration form produces a correct curl command for various inputs - - :id: f81c2ec4-85b1-4372-8e63-464ddbf70296 - - :customerscenario: true - - :expectedresults: The curl command contains all required parameters - - :CaseLevel: Integration - """ - # rex and insights parameters are only specified in curl when differing from - # inerited parameters - result = ( - target_sat.api.CommonParameter() - .search(query={'search': 'name=host_registration_remote_execution'})[0] - .read() - ) - rex_value = not result.value - result = ( - target_sat.api.CommonParameter() - .search(query={'search': 'name=host_registration_insights'})[0] - .read() - ) - insights_value = not result.value - hostgroup = target_sat.api.HostGroup( - organization=[module_org], location=[smart_proxy_location] - ).create() - iface = 'eth0' - with session: - cmd = session.host.get_register_command( - { - 'advanced.setup_insights': 'Yes (override)' if insights_value else 'No (override)', - 'advanced.setup_rex': 'Yes (override)' if rex_value else 'No (override)', - 'general.insecure': True, - 'general.host_group': hostgroup.name, - 'general.operating_system': default_os.title, - 'general.activation_keys': module_activation_key.name, - 'advanced.update_packages': True, - 'advanced.rex_interface': iface, - } - ) - expected_pairs = [ - f'organization_id={module_org.id}', - f'activation_keys={module_activation_key.name}', - f'hostgroup_id={hostgroup.id}', - f'location_id={smart_proxy_location.id}', - f'operatingsystem_id={default_os.id}', - f'remote_execution_interface={iface}', - f'setup_insights={"true" if insights_value else "false"}', - f'setup_remote_execution={"true" if rex_value else "false"}', - f'{target_sat.hostname}', - 'insecure', - 'update_packages=true', - ] - for pair in expected_pairs: - assert pair in cmd - - -@pytest.mark.e2e -@pytest.mark.no_containers -@pytest.mark.tier3 -@pytest.mark.rhel_ver_match('[^6]') -def test_positive_global_registration_end_to_end( - session, - module_activation_key, - module_org, - smart_proxy_location, - default_os, - default_smart_proxy, - rhel_contenthost, - target_sat, -): - """Host registration form produces a correct registration command and host is - registered successfully with it, remote execution and insights are set up - - :id: a02658bf-097e-47a8-8472-5d9f649ba07a - - :customerscenario: true - - :BZ: 1993874 - - :expectedresults: Host is successfully registered, remote execution and insights - client work out of the box - - :parametrized: yes - - :CaseLevel: Integration - """ - # make sure global parameters for rex and insights are set to true - insights_cp = ( - target_sat.api.CommonParameter() - .search(query={'search': 'name=host_registration_insights'})[0] - .read() - ) - rex_cp = ( - target_sat.api.CommonParameter() - .search(query={'search': 'name=host_registration_remote_execution'})[0] - .read() - ) - - if not insights_cp.value: - target_sat.api.CommonParameter(id=insights_cp.id, value=1).update(['value']) - if not rex_cp.value: - target_sat.api.CommonParameter(id=rex_cp.id, value=1).update(['value']) - - # rex interface - iface = 'eth0' - # fill in the global registration form - with session: - cmd = session.host.get_register_command( - { - 'general.operating_system': default_os.title, - 'general.activation_keys': module_activation_key.name, - 'advanced.update_packages': True, - 'advanced.rex_interface': iface, - 'general.insecure': True, - } - ) - expected_pairs = [ - f'organization_id={module_org.id}', - f'activation_keys={module_activation_key.name}', - f'location_id={smart_proxy_location.id}', - f'operatingsystem_id={default_os.id}', - f'{default_smart_proxy.name}', - 'insecure', - 'update_packages=true', - ] - for pair in expected_pairs: - assert pair in cmd - # rhel repo required for insights client installation, - # syncing it to the satellite would take too long - rhelver = rhel_contenthost.os_version.major - if rhelver > 7: - rhel_contenthost.create_custom_repos(**settings.repos[f'rhel{rhelver}_os']) - else: - rhel_contenthost.create_custom_repos( - **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} - ) - # make sure there will be package availabe for update - if rhel_contenthost.os_version.major == '6': - package = FAKE_1_CUSTOM_PACKAGE - repo_url = settings.repos.yum_1['url'] - else: - package = FAKE_7_CUSTOM_PACKAGE - repo_url = settings.repos.yum_3['url'] - rhel_contenthost.create_custom_repos(fake_yum=repo_url) - rhel_contenthost.execute(f"yum install -y {package}") - # run curl - result = rhel_contenthost.execute(cmd) - assert result.status == 0 - result = rhel_contenthost.execute('subscription-manager identity') - assert result.status == 0 - # Assert that a yum update was made this day ("Update" or "I, U" in history) - timezone_offset = rhel_contenthost.execute('date +"%:z"').stdout.strip() - tzinfo = datetime.strptime(timezone_offset, '%z').tzinfo - result = rhel_contenthost.execute('yum history | grep U') - assert result.status == 0 - assert datetime.now(tzinfo).strftime('%Y-%m-%d') in result.stdout - # Set "Connect to host using IP address" - target_sat.api.Parameter( - host=rhel_contenthost.hostname, - name='remote_execution_connect_by_ip', - parameter_type='boolean', - value='True', - ).create() - # run insights-client via REX - command = "insights-client --status" - invocation_command = target_sat.cli_factory.make_job_invocation( - { - 'job-template': 'Run Command - Script Default', - 'inputs': f'command={command}', - 'search-query': f"name ~ {rhel_contenthost.hostname}", - } - ) - # results provide all info but job invocation might not be finished yet - result = ( - target_sat.api.JobInvocation() - .search( - query={'search': f'id={invocation_command["id"]} and host={rhel_contenthost.hostname}'} - )[0] - .read() - ) - # make sure that task is finished - task_result = target_sat.wait_for_tasks( - search_query=(f'id = {result.task.id}'), search_rate=2, max_tries=60 - ) - assert task_result[0].result == 'success' - host = ( - target_sat.api.Host() - .search(query={'search': f'name={rhel_contenthost.hostname}'})[0] - .read() - ) - for interface in host.interface: - interface_result = target_sat.api.Interface(host=host.id).search( - query={'search': f'{interface.id}'} - )[0] - # more interfaces can be inside the host - if interface_result.identifier == iface: - assert interface_result.execution - - -@pytest.mark.tier2 -def test_global_registration_form_populate( - module_org, - session, - module_ak_with_cv, - module_lce, - module_promoted_cv, - default_architecture, - default_os, - target_sat, -): - """Host registration form should be populated automatically based on the host-group - - :id: b949e010-36b8-48b8-9907-36138342c72b - - :expectedresults: Some of the fields in the form should be populated based on host-group - e.g. activation key, operating system, life-cycle environment, host parameters for - remote-execution, insights setup. - - :CaseLevel: Integration - - :steps: - 1. create and sync repository - 2. create the content view and activation-key - 3. create the host-group with activation key, operating system, host-parameters - 4. Open the global registration form and select the same host-group - 5. check host registration form should be populated automatically based on the host-group - - :BZ: 2056469 - - :CaseAutomation: Automated - """ - hg_name = gen_string('alpha') - iface = gen_string('alpha') - group_params = {'name': 'host_packages', 'value': constants.FAKE_0_CUSTOM_PACKAGE} - target_sat.api.HostGroup( - name=hg_name, - organization=[module_org], - lifecycle_environment=module_lce, - architecture=default_architecture, - operatingsystem=default_os, - content_view=module_promoted_cv, - group_parameters_attributes=[group_params], - ).create() - with session: - session.hostgroup.update( - hg_name, - { - 'activation_keys.activation_keys': module_ak_with_cv.name, - }, - ) - cmd = session.host.get_register_command( - { - 'general.host_group': hg_name, - 'advanced.rex_interface': iface, - 'general.insecure': True, - }, - full_read=True, - ) - - assert hg_name in cmd['general']['host_group'] - assert module_ak_with_cv.name in cmd['general']['activation_key_helper'] - assert module_lce.name in cmd['advanced']['life_cycle_env_helper'] - assert constants.FAKE_0_CUSTOM_PACKAGE in cmd['advanced']['install_packages_helper'] - - -@pytest.mark.tier2 -def test_global_registration_with_capsule_host( - session, - capsule_configured, - rhel7_contenthost, - module_org, - module_location, - module_product, - default_os, - module_lce_library, - target_sat, -): - """Host registration form produces a correct registration command and host is - registered successfully with selected capsule from form. - - :id: 6356c6d0-ee45-4ad7-8a0e-484d3490bc58 - - :expectedresults: Host is successfully registered with capsule host, - remote execution and insights client work out of the box - - :CaseLevel: Integration - - :steps: - 1. create and sync repository - 2. create the content view and activation-key - 3. integrate capsule and sync content - 4. open the global registration form and select the same capsule - 5. check host is registered successfully with selected capsule - - :parametrized: yes - - :CaseAutomation: Automated - """ - client = rhel7_contenthost - repo = target_sat.api.Repository( - url=settings.repos.yum_1.url, - content_type=REPO_TYPE['yum'], - product=module_product, - ).create() - # Sync all repositories in the product - module_product.sync() - capsule = target_sat.api.Capsule(id=capsule_configured.nailgun_capsule.id).search( - query={'search': f'name={capsule_configured.hostname}'} - )[0] - module_org = target_sat.api.Organization(id=module_org.id).read() - module_org.smart_proxy.append(capsule) - module_location = target_sat.api.Location(id=module_location.id).read() - module_location.smart_proxy.append(capsule) - module_org.update(['smart_proxy']) - module_location.update(['smart_proxy']) - - # Associate the lifecycle environment with the capsule - capsule.content_add_lifecycle_environment(data={'environment_id': module_lce_library.id}) - result = capsule.content_lifecycle_environments() - # TODO result is not used, please add assert once you fix the test - - # Create a content view with the repository - cv = target_sat.api.ContentView(organization=module_org, repository=[repo]).create() - - # Publish new version of the content view - cv.publish() - cv = cv.read() - - assert len(cv.version) == 1 - - activation_key = target_sat.api.ActivationKey( - content_view=cv, environment=module_lce_library, organization=module_org - ).create() - - # Assert that a task to sync lifecycle environment to the capsule - # is started (or finished already) - sync_status = capsule.content_get_sync() - assert len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] - - # Wait till capsule sync finishes - for task in sync_status['active_sync_tasks']: - target_sat.api.ForemanTask(id=task['id']).poll() - with session: - session.organization.select(org_name=module_org.name) - session.location.select(loc_name=module_location.name) - cmd = session.host.get_register_command( - { - 'general.orgnization': module_org.name, - 'general.location': module_location.name, - 'general.operating_system': default_os.title, - 'general.capsule': capsule_configured.hostname, - 'general.activation_keys': activation_key.name, - 'general.insecure': True, - } - ) - client.create_custom_repos(rhel7=settings.repos.rhel7_os) - # run curl - client.execute(cmd) - result = client.execute('subscription-manager identity') - assert result.status == 0 - assert module_lce_library.name in result.stdout - assert module_org.name in result.stdout - - -@pytest.mark.tier2 -@pytest.mark.usefixtures('enable_capsule_for_registration') -@pytest.mark.no_containers -def test_global_registration_with_gpg_repo_and_default_package( - session, module_activation_key, default_os, default_smart_proxy, rhel7_contenthost -): - """Host registration form produces a correct registration command and host is - registered successfully with gpg repo enabled and have default package - installed. - - :id: b5738b20-e281-4d0b-ac78-dcdc177b8c9f - - :expectedresults: Host is successfully registered, gpg repo is enabled - and default package is installed. - - :CaseLevel: Integration - - :steps: - 1. create and sync repository - 2. create the content view and activation-key - 3. update the 'host_packages' parameter in organization with package name e.g. vim - 4. open the global registration form and update the gpg repo and key - 5. check host is registered successfully with installed same package - 6. check gpg repo is exist in registered host - - :parametrized: yes - """ - client = rhel7_contenthost - repo_name = 'foreman_register' - repo_url = settings.repos.gr_yum_repo.url - repo_gpg_url = settings.repos.gr_yum_repo.gpg_url - with session: - cmd = session.host.get_register_command( - { - 'general.operating_system': default_os.title, - 'general.capsule': default_smart_proxy.name, - 'general.activation_keys': module_activation_key.name, - 'general.insecure': True, - 'advanced.force': True, - 'advanced.install_packages': 'mlocate vim', - 'advanced.repository': repo_url, - 'advanced.repository_gpg_key_url': repo_gpg_url, - } - ) - - # rhel repo required for insights client installation, - # syncing it to the satellite would take too long - client.create_custom_repos(rhel7=settings.repos.rhel7_os) - # run curl - result = client.execute(cmd) - assert result.status == 0 - result = client.execute('yum list installed | grep mlocate') - assert result.status == 0 - assert 'mlocate' in result.stdout - result = client.execute(f'yum -v repolist {repo_name}') - assert result.status == 0 - assert repo_url in result.stdout - - -@pytest.mark.tier2 -@pytest.mark.rhel_ver_match('[^6].*') -def test_global_registration_upgrade_subscription_manager( - session, module_activation_key, module_os, rhel_contenthost -): - """Host registration form produces a correct registration command and - subscription-manager can be updated from a custom repository before - registration is completed. - - :id: b7a44f32-90b2-4fd6-b65b-5a3d2a5c5deb - - :customerscenario: true - - :expectedresults: Host is successfully registered, repo is enabled - on advanced tab and subscription-manager is updated. - - :CaseLevel: Integration - - :steps: - 1. Create activation-key - 2. Open the global registration form, add repo and activation key - 3. Add 'subscription-manager' to install packages field - 4. Check subscription-manager was installed from repo_name - - :parametrized: yes - - :BZ: 1923320 - """ - client = rhel_contenthost - repo_name = 'foreman_register' - rhel_ver = rhel_contenthost.os_version.major - repo_url = settings.repos.get(f'rhel{rhel_ver}_os') - if isinstance(repo_url, dict): - repo_url = repo_url['baseos'] - # Ensure subs-man is installed from repo_name by removing existing package. - result = client.execute('rpm --erase --nodeps subscription-manager') - assert result.status == 0 - with session: - cmd = session.host.get_register_command( - { - 'general.operating_system': module_os.title, - 'general.activation_keys': module_activation_key.name, - 'general.insecure': True, - 'advanced.force': True, - 'advanced.install_packages': 'subscription-manager', - 'advanced.repository': repo_url, - } - ) - - # run curl - result = client.execute(cmd) - assert result.status == 0 - result = client.execute('yum repolist') - assert repo_name in result.stdout - assert result.status == 0 - - -@pytest.mark.tier3 -@pytest.mark.usefixtures('enable_capsule_for_registration') -def test_global_re_registration_host_with_force_ignore_error_options( - session, module_activation_key, default_os, default_smart_proxy, rhel7_contenthost -): - """If the ignore_error and force checkbox is checked then registered host can - get re-registered without any error. - - :id: 8f0ecc13-5d18-4adb-acf5-3f3276dccbb7 - - :expectedresults: Verify the force and ignore checkbox options - - :CaseLevel: Integration - - :steps: - 1. create and sync repository - 2. create the content view and activation-key - 3. open the global registration form and select --force and --Ignore Errors option - 4. registered the host with generated curl command - 5. re-register the same host again and check it is getting registered - - :parametrized: yes - """ - client = rhel7_contenthost - with session: - cmd = session.host.get_register_command( - { - 'general.operating_system': default_os.title, - 'general.capsule': default_smart_proxy.name, - 'general.activation_keys': module_activation_key.name, - 'general.insecure': True, - 'advanced.force': True, - 'advanced.ignore_error': True, - } - ) - client.execute(cmd) - result = client.execute('subscription-manager identity') - assert result.status == 0 - # rerun the register command - client.execute(cmd) - result = client.execute('subscription-manager identity') - assert result.status == 0 - - -@pytest.mark.tier2 -@pytest.mark.usefixtures('enable_capsule_for_registration') -def test_global_registration_token_restriction( - session, module_activation_key, rhel7_contenthost, default_os, default_smart_proxy, target_sat -): - """Global registration token should be only used for registration call, it - should be restricted for any other api calls. - - :id: 4528b5c6-0a6d-40cd-857a-68b76db2179b - - :expectedresults: global registration token should be restricted for any api calls - other than the registration - - :CaseLevel: Integration - - :steps: - 1. open the global registration form and generate the curl token - 2. use that curl token to execute other api calls e.g. GET /hosts, /users - - :parametrized: yes - """ - client = rhel7_contenthost - with session: - cmd = session.host.get_register_command( - { - 'general.operating_system': default_os.title, - 'general.capsule': default_smart_proxy.name, - 'general.activation_keys': module_activation_key.name, - 'general.insecure': True, - } - ) - - pattern = re.compile("Authorization.*(?=')") - auth_header = re.search(pattern, cmd).group() - - # build curl - curl_users = f'curl -X GET -k -H {auth_header} -i {target_sat.url}/api/users/' - curl_hosts = f'curl -X GET -k -H {auth_header} -i {target_sat.url}/api/hosts/' - for curl_cmd in (curl_users, curl_hosts): - result = client.execute(curl_cmd) - assert result.status == 0 - 'Unable to authenticate user' in result.stdout - - @pytest.mark.tier4 @pytest.mark.upgrade def test_positive_bulk_delete_host(session, smart_proxy_location, target_sat, function_org): @@ -1760,8 +991,6 @@ def test_positive_bulk_delete_host(session, smart_proxy_location, target_sat, fu :expectedresults: All selected hosts should be deleted successfully :BZ: 1368026 - - :CaseLevel: System """ host_template = target_sat.api.Host(organization=function_org, location=smart_proxy_location) host_template.create_missing() @@ -1789,115 +1018,6 @@ def test_positive_bulk_delete_host(session, smart_proxy_location, target_sat, fu assert not values['table'] -@pytest.mark.on_premises_provisioning -@pytest.mark.tier4 -def test_positive_provision_end_to_end( - session, - module_org, - smart_proxy_location, - module_libvirt_domain, - module_libvirt_hostgroup, - module_libvirt_resource, - target_sat, -): - """Provision Host on libvirt compute resource - - :id: 2678f95f-0c0e-4b46-a3c1-3f9a954d3bde - - :expectedresults: Host is provisioned successfully - - :CaseLevel: System - """ - hostname = gen_string('alpha').lower() - root_pwd = gen_string('alpha', 15) - with session: - session.host.create( - { - 'host.name': hostname, - 'host.organization': module_org.name, - 'host.location': smart_proxy_location.name, - 'host.hostgroup': module_libvirt_hostgroup.name, - 'host.inherit_deploy_option': False, - 'host.deploy': module_libvirt_resource, - 'provider_content.virtual_machine.memory': '2 GB', - 'operating_system.root_password': root_pwd, - 'interfaces.interface.network_type': 'Physical (Bridge)', - 'interfaces.interface.network': settings.vlan_networking.bridge, - 'additional_information.comment': 'Libvirt provision using valid data', - } - ) - name = f'{hostname}.{module_libvirt_domain.name}' - assert session.host.search(name)[0]['Name'] == name - wait_for( - lambda: session.host.get_details(name)['properties']['properties_table']['Build'] - != 'Pending installation', - timeout=1800, - delay=30, - fail_func=session.browser.refresh, - silent_failure=True, - handle_exception=True, - ) - target_sat.api.Host( - id=target_sat.api.Host().search(query={'search': f'name={name}'})[0].id - ).delete() - assert ( - session.host.get_details(name)['properties']['properties_table']['Build'] == 'Installed' - ) - - -@pytest.mark.on_premises_provisioning -@pytest.mark.run_in_one_thread -@pytest.mark.tier4 -@pytest.mark.parametrize('setting_update', ['destroy_vm_on_host_delete'], indirect=True) -def test_positive_delete_libvirt( - session, - module_org, - smart_proxy_location, - module_libvirt_domain, - module_libvirt_hostgroup, - module_libvirt_resource, - setting_update, - remove_vm_on_delete, - target_sat, -): - """Create a new Host on libvirt compute resource and delete it - afterwards - - :id: 6a9175e7-bb96-4de3-bc45-ba6c10dd14a4 - - :customerscenario: true - - :expectedresults: Proper warning message is displayed on delete attempt - and host deleted successfully afterwards - - :BZ: 1243223 - - :CaseLevel: System - """ - hostname = gen_string('alpha').lower() - root_pwd = gen_string('alpha', 15) - with session: - session.host.create( - { - 'host.name': hostname, - 'host.organization': module_org.name, - 'host.location': smart_proxy_location.name, - 'host.hostgroup': module_libvirt_hostgroup.name, - 'host.inherit_deploy_option': False, - 'host.deploy': module_libvirt_resource, - 'provider_content.virtual_machine.memory': '1 GB', - 'operating_system.root_password': root_pwd, - 'interfaces.interface.network_type': 'Physical (Bridge)', - 'interfaces.interface.network': settings.vlan_networking.bridge, - 'additional_information.comment': 'Delete host that provisioned on Libvirt', - } - ) - name = f'{hostname}.{module_libvirt_domain.name}' - assert session.host.search(name)[0]['Name'] == name - session.host.delete(name) - assert not target_sat.api.Host().search(query={'search': f'name="{hostname}"'}) - - # ------------------------------ NEW HOST UI DETAILS ---------------------------- @pytest.mark.tier4 def test_positive_read_details_page_from_new_ui(session, host_ui_options): @@ -1906,8 +1026,6 @@ def test_positive_read_details_page_from_new_ui(session, host_ui_options): :id: ef0c5942-9049-11ec-8029-98fa9b6ecd5a :expectedresults: Host is created and has expected content - - :CaseLevel: System """ with session: api_values, host_name = host_ui_options @@ -1925,45 +1043,6 @@ def test_positive_read_details_page_from_new_ui(session, host_ui_options): assert values['overview']['details']['details']['comment'] == 'Host with fake data' -@pytest.mark.tier4 -@pytest.mark.rhel_ver_match('8') -def test_rex_new_ui(session, target_sat, rex_contenthost): - """Run remote execution using the new host details page - - :id: ee625595-4995-43b2-9e6d-633c9b33ff93 - - :steps: - 1. Navigate to Overview tab - 2. Schedule a job - 3. Wait for the job to finish - 4. Job is visible in Recent jobs card - - :expectedresults: Remote execution succeeded and the job is visible on Recent jobs card on - Overview tab - - :CaseLevel: System - """ - hostname = rex_contenthost.hostname - job_args = { - 'job_category': 'Commands', - 'job_template': 'Run Command - Script Default', - 'template_content.command': 'ls', - } - with session: - session.location.select(loc_name=DEFAULT_LOC) - session.host_new.schedule_job(hostname, job_args) - task_result = target_sat.wait_for_tasks( - search_query=(f'Remote action: Run ls on {hostname}'), - search_rate=2, - max_tries=30, - ) - task_status = target_sat.api.ForemanTask(id=task_result[0].id).poll() - assert task_status['result'] == 'success' - recent_jobs = session.host_new.get_details(hostname, "overview.recent_jobs")['overview'] - assert "Run ls" == recent_jobs['recent_jobs']['finished']['table'][0]['column0'] - assert "succeeded" == recent_jobs['recent_jobs']['finished']['table'][0]['column2'] - - @pytest.mark.tier4 def test_positive_manage_table_columns(session, current_sat_org, current_sat_location): """Set custom columns of the hosts table. @@ -1978,8 +1057,6 @@ def test_positive_manage_table_columns(session, current_sat_org, current_sat_loc :expectedresults: Check if the custom columns were set properly, i.e., are displayed or not displayed in the table. - :CaseLevel: System - :BZ: 1813274 :customerscenario: true @@ -1989,6 +1066,7 @@ def test_positive_manage_table_columns(session, current_sat_org, current_sat_loc 'Last report': False, 'Comment': False, 'Installable updates': True, + 'RHEL Lifecycle status': False, 'Registered': True, 'Last checkin': True, 'IPv4': True, @@ -2028,8 +1106,6 @@ def test_positive_host_details_read_templates( :BZ: 2128038 :customerscenario: true - - :CaseLevel: System """ host = target_sat.api.Host().search(query={'search': f'name={target_sat.hostname}'})[0] api_templates = [template['name'] for template in host.list_provisioning_templates()] @@ -2080,30 +1156,27 @@ def test_positive_update_delete_package( 9. Delete the package :expectedresults: The package is updated and deleted - """ client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) with session: session.location.select(loc_name=DEFAULT_LOC) if not is_open('BZ:2132680'): product_name = module_repos_collection_with_setup.custom_product.name repos = session.host_new.get_repo_sets(client.hostname, product_name) - assert 'Enabled' == repos[0].status + assert repos[0].status == 'Enabled' session.host_new.override_repo_sets( client.hostname, product_name, "Override to disabled" ) - assert 'Disabled' == repos[0].status + assert repos[0].status == 'Disabled' session.host_new.install_package(client.hostname, FAKE_8_CUSTOM_PACKAGE_NAME) result = client.run(f'yum install -y {FAKE_7_CUSTOM_PACKAGE}') assert result.status != 0 session.host_new.override_repo_sets( client.hostname, product_name, "Override to enabled" ) - assert 'Enabled' == repos[0].status + assert repos[0].status == 'Enabled' # refresh repos on system client.run('subscription-manager repos') # install package @@ -2160,7 +1233,7 @@ def test_positive_update_delete_package( task_status = target_sat.api.ForemanTask(id=task_result[0].id).poll() assert task_status['result'] == 'success' packages = session.host_new.get_packages(client.hostname, FAKE_8_CUSTOM_PACKAGE_NAME) - assert 'table' not in packages.keys() + assert 'table' not in packages result = client.run(f'rpm -q {FAKE_8_CUSTOM_PACKAGE}') assert result.status != 0 @@ -2200,14 +1273,11 @@ def test_positive_apply_erratum( 5. Select errata and apply via rex. :expectedresults: The erratum is applied - """ # install package client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) errata_id = settings.repos.yum_3.errata[25] client.run(f'yum install -y {FAKE_7_CUSTOM_PACKAGE}') result = client.run(f'rpm -q {FAKE_7_CUSTOM_PACKAGE}') @@ -2240,7 +1310,7 @@ def test_positive_apply_erratum( assert task_status['result'] == 'success' # verify values = session.host_new.get_details(client.hostname, widget_names='content.errata') - assert 'table' not in values['content']['errata'].keys() + assert 'table' not in values['content']['errata'] result = client.run( 'yum update --assumeno --security | grep "No packages needed for security"' ) @@ -2283,14 +1353,11 @@ def test_positive_crud_module_streams( 5. Reset the Module stream :expectedresults: Module streams can be enabled, installed, removed and reset using the new UI. - """ module_name = 'duck' client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) with session: session.location.select(loc_name=DEFAULT_LOC) streams = session.host_new.get_module_streams(client.hostname, module_name) @@ -2375,8 +1442,6 @@ def test_positive_inherit_puppet_env_from_host_group_when_action( :expectedresults: Expected puppet environment is inherited to the host :BZ: 1414914 - - :CaseLevel: System """ host = session_puppet_enabled_sat.api.Host( organization=module_puppet_org, location=module_puppet_loc @@ -2429,8 +1494,6 @@ def test_positive_create_with_puppet_class( :id: d883f169-1105-435c-8422-a7160055734a :expectedresults: Host is created and contains correct puppet class - - :CaseLevel: System """ host_template = session_puppet_enabled_sat.api.Host( @@ -2490,8 +1553,6 @@ def test_positive_inherit_puppet_env_from_host_group_when_create( :expectedresults: Expected puppet environment is inherited to the form :BZ: 1414914 - - :CaseLevel: Integration """ hg_name = gen_string('alpha') @@ -2543,8 +1604,6 @@ def test_positive_set_multi_line_and_with_spaces_parameter_value( 2. host parameter value is the same when restored from yaml format :BZ: 1315282 - - :CaseLevel: System """ host_template = session_puppet_enabled_sat.api.Host( organization=module_puppet_org, location=module_puppet_loc @@ -2599,13 +1658,11 @@ def test_positive_tracer_enable_reload(tracer_install_host, target_sat): :id: c9ebd4a8-6db3-4d0e-92a2-14951c26769b - :caseComponent: katello-tracer - - :Team: Phoenix + :CaseComponent: katello-tracer - :CaseLevel: System + :Team: Phoenix-subscriptions - :Steps: + :steps: 1. Register a RHEL host to Satellite. 2. Prepare katello-tracer to be installed 3. Navigate to the Traces tab in New Host UI @@ -2613,7 +1670,6 @@ def test_positive_tracer_enable_reload(tracer_install_host, target_sat): :expectedresults: The Tracer tab message updates accordingly during the process, and displays the state the correct Title - """ host = ( target_sat.api.Host().search(query={'search': tracer_install_host.hostname})[0].read_json() @@ -2635,3 +1691,203 @@ def test_positive_tracer_enable_reload(tracer_install_host, target_sat): ) tracer = session.host_new.get_tracer(tracer_install_host.hostname) assert tracer['title'] == "No applications to restart" + + +@pytest.mark.tier2 +def test_all_hosts_delete(target_sat, function_org, function_location, new_host_ui): + """Create a host and delete it through All Hosts UI + + :id: 42b4560c-bb57-4c58-928e-e5fd5046b93f + + :expectedresults: Successful deletion of a host through the table dropdown + + :CaseComponent:Hosts-Content + + :Team: Phoenix-subscriptions + """ + host = target_sat.api.Host(organization=function_org, location=function_location).create() + with target_sat.ui_session() as session: + session.organization.select(function_org.name) + session.location.select(function_location.name) + assert session.all_hosts.delete(host.name) + + +@pytest.mark.tier2 +def test_all_hosts_bulk_delete(target_sat, function_org, function_location, new_host_ui): + """Create several hosts, and delete them via Bulk Actions in All Hosts UI + + :id: af1b4a66-dd83-47c3-904b-e8627119cc53 + + :expectedresults: Successful deletion of multiple hosts at once through Bulk Action + + :CaseComponent:Hosts-Content + + :Team: Phoenix-subscriptions + """ + for _ in range(10): + target_sat.api.Host(organization=function_org, location=function_location).create() + with target_sat.ui_session() as session: + session.organization.select(function_org.name) + session.location.select(function_location.name) + assert session.all_hosts.bulk_delete_all() + + +@pytest.fixture(scope='module') +def change_content_source_prep( + module_target_sat, + module_sca_manifest_org, + module_capsule_configured, + module_location, +): + """ + This fixture sets up all the necessary entities for tests + exercising the Change of the hosts's content source. + + It creates a new product in the organization, + creates a new repository in the product, + creates a new lce, + creates a new CV in the organization, adds the repository to the CV, + publishes the CV, and promotes the published version to the lifecycle environment, + creates a new activation key for the CV in the lce, + registers the RHEL content host with the activation key, + updates the capsule's taxonomies + adds the lifecycle environment to the capsule's content. + + Fixture returns module_target_sat, org, lce, capsule, content_view, loc, ak + """ + product_name, lce_name = (gen_string('alpha') for _ in range(2)) + + org = module_sca_manifest_org + loc = module_location + + product = module_target_sat.api.Product( + name=product_name, + organization=org.id, + ).create() + + repository = module_target_sat.api.Repository( + product=product, + content_type=REPO_TYPE['file'], + url=CUSTOM_FILE_REPO, + ).create() + + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'name': lce_name, 'organization-id': org.id} + ) + + # Create CV + content_view = module_target_sat.api.ContentView(organization=org.id).create() + # Add repos to CV + content_view.repository = [repository] + content_view = content_view.update(['repository']) + # Publish that CV and promote it + content_view.publish() + content_view.read().version[0].promote(data={'environment_ids': lce.id}) + + ak = module_target_sat.api.ActivationKey( + content_view=content_view, organization=org.id, environment=lce.id + ).create() + + # Edit capsule's taxonomies + capsule = module_target_sat.cli.Capsule.update( + { + 'name': module_capsule_configured.hostname, + 'organization-ids': org.id, + 'location-ids': loc.id, + } + ) + + module_target_sat.cli.Capsule.content_add_lifecycle_environment( + { + 'id': module_capsule_configured.nailgun_capsule.id, + 'organization-id': org.id, + 'lifecycle-environment': lce.name, + } + ) + + return module_target_sat, org, lce, capsule, content_view, loc, ak + + +@pytest.mark.no_containers +@pytest.mark.rhel_ver_match('[78]') +def test_change_content_source(session, change_content_source_prep, rhel_contenthost): + """ + This test excercises different ways to change host's content source + + :id: 5add68c3-16b1-496d-9b24-f5388013351d + + :expectedresults: Job invocation page should be correctly generated + by the change content source action, generated script should also be correct + + :CaseComponent:Hosts-Content + + :Team: Phoenix-content + """ + + module_target_sat, org, lce, capsule, content_view, loc, ak = change_content_source_prep + + rhel_contenthost.register(org, loc, ak.name, module_target_sat) + + with module_target_sat.ui_session() as session: + session.organization.select(org_name=org.name) + session.location.select(loc_name=ANY_CONTEXT['location']) + + # STEP 1: Test the part where you use "Update hosts manually" button + # Set the content source to the checked-out capsule + # Check that generated script contains correct name of new content source + rhel_contenthost_pre_values = rhel_contenthost.nailgun_host.content_facet_attributes + generated_script = session.host.change_content_source_get_script( + entities_list=[ + rhel_contenthost.hostname, + ], + content_source=capsule[0]['name'], + lce=lce.name, + content_view=content_view.name, + ) + rhel_contenthost_post_values = rhel_contenthost.nailgun_host.content_facet_attributes + content_source_from_script = re.search(r'--server.hostname=\"(.*?)\"', generated_script) + + assert content_source_from_script.group(1) == capsule[0]['name'] + assert rhel_contenthost_post_values['content_source']['name'] == capsule[0]['name'] + assert rhel_contenthost_post_values['content_view']['name'] == content_view.name + assert rhel_contenthost_post_values['lifecycle_environment']['name'] == lce.name + + session.browser.refresh() + + # Step 2: Test the part where you use "Run job invocation" button + # Change the rhel_contenthost's content source back to what it was before STEP 1 + # Check the prefilled job invocation page + session.host.change_content_source( + entities_list=[ + rhel_contenthost.hostname, + ], + content_source=rhel_contenthost_pre_values['content_source']['name'], + lce=rhel_contenthost_pre_values['lifecycle_environment']['name'], + content_view=rhel_contenthost_pre_values['content_view']['name'], + run_job_invocation=True, + ) + # Getting the data from the prefilled job invocation form + selected_category_and_template = session.jobinvocation.get_job_category_and_template() + selected_targeted_hosts = session.jobinvocation.get_targeted_hosts() + + assert selected_category_and_template['job_category'] == 'Katello' + assert ( + selected_category_and_template['job_template'] + == 'Configure host for new content source' + ) + assert selected_targeted_hosts['selected_hosts'] == [rhel_contenthost.hostname] + + session.jobinvocation.submit_prefilled_view() + rhel_contenthost_post_values = rhel_contenthost.nailgun_host.content_facet_attributes + assert ( + rhel_contenthost_post_values['content_source']['name'] + == rhel_contenthost_pre_values['content_source']['name'] + ) + assert ( + rhel_contenthost_post_values['content_view']['name'] + == rhel_contenthost_post_values['content_view']['name'] + ) + assert ( + rhel_contenthost_post_values['lifecycle_environment']['name'] + == rhel_contenthost_post_values['lifecycle_environment']['name'] + ) diff --git a/tests/foreman/ui/test_hostcollection.py b/tests/foreman/ui/test_hostcollection.py index b9b75313e3c..06af17c4a5a 100644 --- a/tests/foreman/ui/test_hostcollection.py +++ b/tests/foreman/ui/test_hostcollection.py @@ -4,23 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: HostCollections :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import time -import pytest from broker import Broker -from nailgun import entities +from manifester import Manifester +import pytest from robottelo import constants from robottelo.config import settings @@ -29,25 +24,34 @@ @pytest.fixture(scope='module') -def module_org(): - org = entities.Organization().create() +def module_manifest(): + with Manifester(manifest_category=settings.manifest.entitlement) as manifest: + yield manifest + + +@pytest.fixture(scope='module') +def module_org_with_parameter(module_target_sat, module_manifest): # adding remote_execution_connect_by_ip=Yes at org level - entities.Parameter( + org = module_target_sat.api.Organization(simple_content_access=False).create() + module_target_sat.api.Parameter( name='remote_execution_connect_by_ip', parameter_type='boolean', value='Yes', organization=org.id, ).create() + module_target_sat.upload_manifest(org.id, module_manifest.content) return org @pytest.fixture(scope='module') -def module_lce(module_org): - return entities.LifecycleEnvironment(organization=module_org).create() +def module_lce(module_target_sat, module_org_with_parameter): + return module_target_sat.api.LifecycleEnvironment( + organization=module_org_with_parameter + ).create() @pytest.fixture(scope='module') -def module_repos_collection(module_org, module_lce, module_target_sat): +def module_repos_collection(module_org_with_parameter, module_lce, module_target_sat): repos_collection = module_target_sat.cli_factory.RepositoryCollection( distro=constants.DISTRO_DEFAULT, repositories=[ @@ -56,7 +60,7 @@ def module_repos_collection(module_org, module_lce, module_target_sat): module_target_sat.cli_factory.YumRepository(url=settings.repos.yum_6.url), ], ) - repos_collection.setup_content(module_org.id, module_lce.id, upload_manifest=True) + repos_collection.setup_content(module_org_with_parameter.id, module_lce.id) return repos_collection @@ -86,23 +90,27 @@ def vm_content_hosts_module_stream( @pytest.fixture -def vm_host_collection(module_org, vm_content_hosts): +def vm_host_collection(module_target_sat, module_org_with_parameter, vm_content_hosts): host_ids = [ - entities.Host().search(query={'search': f'name={host.hostname}'})[0].id + module_target_sat.api.Host().search(query={'search': f'name={host.hostname}'})[0].id for host in vm_content_hosts ] - host_collection = entities.HostCollection(host=host_ids, organization=module_org).create() - return host_collection + return module_target_sat.api.HostCollection( + host=host_ids, organization=module_org_with_parameter + ).create() @pytest.fixture -def vm_host_collection_module_stream(module_org, vm_content_hosts_module_stream): +def vm_host_collection_module_stream( + module_target_sat, module_org_with_parameter, vm_content_hosts_module_stream +): host_ids = [ - entities.Host().search(query={'search': f'name={host.hostname}'})[0].id + module_target_sat.api.Host().search(query={'search': f'name={host.hostname}'})[0].id for host in vm_content_hosts_module_stream ] - host_collection = entities.HostCollection(host=host_ids, organization=module_org).create() - return host_collection + return module_target_sat.api.HostCollection( + host=host_ids, organization=module_org_with_parameter + ).create() def _run_remote_command_on_content_hosts(command, vm_clients): @@ -128,7 +136,7 @@ def _is_package_installed( if result.status == 0 and expect_installed: installed += 1 break - elif result.status != 0 and not expect_installed: + if result.status != 0 and not expect_installed: installed -= 1 break if ind < retries - 1: @@ -138,8 +146,7 @@ def _is_package_installed( if expect_installed: return installed == len(vm_clients) - else: - return bool(installed) + return bool(installed) def _install_package_with_assertion(vm_clients, package_name): @@ -199,21 +206,23 @@ def _get_content_repository_urls(repos_collection, lce, content_view, module_tar @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_end_to_end(session, module_org, smart_proxy_location): +def test_positive_end_to_end( + session, module_target_sat, module_org_with_parameter, smart_proxy_location +): """Perform end to end testing for host collection component :id: 1d40bc74-8e05-42fa-b6e3-2999dc3b730d :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ hc_name = gen_string('alpha') new_name = gen_string('alpha') description = gen_string('alpha') - host = entities.Host(organization=module_org, location=smart_proxy_location).create() + host = module_target_sat.api.Host( + organization=module_org_with_parameter, location=smart_proxy_location + ).create() with session: session.location.select(smart_proxy_location.name) # Create new host collection @@ -244,7 +253,9 @@ def test_positive_end_to_end(session, module_org, smart_proxy_location): @pytest.mark.tier2 -def test_negative_install_via_remote_execution(session, module_org, smart_proxy_location): +def test_negative_install_via_remote_execution( + session, module_target_sat, module_org_with_parameter, smart_proxy_location +): """Test basic functionality of the Hosts collection UI install package via remote execution. @@ -254,14 +265,16 @@ def test_negative_install_via_remote_execution(session, module_org, smart_proxy_ :expectedresults: The package is not installed, and the job invocation status contains some expected values: hosts information, jos status. - - :CaseLevel: Integration """ hosts = [] for _ in range(2): - hosts.append(entities.Host(organization=module_org, location=smart_proxy_location).create()) - host_collection = entities.HostCollection( - host=[host.id for host in hosts], organization=module_org + hosts.append( + module_target_sat.api.Host( + organization=module_org_with_parameter, location=smart_proxy_location + ).create() + ) + host_collection = module_target_sat.api.HostCollection( + host=[host.id for host in hosts], organization=module_org_with_parameter ).create() with session: session.location.select(smart_proxy_location.name) @@ -278,7 +291,9 @@ def test_negative_install_via_remote_execution(session, module_org, smart_proxy_ @pytest.mark.tier2 -def test_negative_install_via_custom_remote_execution(session, module_org, smart_proxy_location): +def test_negative_install_via_custom_remote_execution( + session, module_target_sat, module_org_with_parameter, smart_proxy_location +): """Test basic functionality of the Hosts collection UI install package via remote execution - customize first. @@ -288,14 +303,16 @@ def test_negative_install_via_custom_remote_execution(session, module_org, smart :expectedresults: The package is not installed, and the job invocation status contains some expected values: hosts information, jos status. - - :CaseLevel: Integration """ hosts = [] for _ in range(2): - hosts.append(entities.Host(organization=module_org, location=smart_proxy_location).create()) - host_collection = entities.HostCollection( - host=[host.id for host in hosts], organization=module_org + hosts.append( + module_target_sat.api.Host( + organization=module_org_with_parameter, location=smart_proxy_location + ).create() + ) + host_collection = module_target_sat.api.HostCollection( + host=[host.id for host in hosts], organization=module_org_with_parameter ).create() with session: session.location.select(smart_proxy_location.name) @@ -313,23 +330,21 @@ def test_negative_install_via_custom_remote_execution(session, module_org, smart @pytest.mark.upgrade @pytest.mark.tier3 -def test_positive_add_host(session): +def test_positive_add_host(session, module_target_sat): """Check if host can be added to Host Collection :id: 80824c9f-15a1-4f76-b7ac-7d9ca9f6ed9e :expectedresults: Host is added to Host Collection successfully - - :CaseLevel: System """ hc_name = gen_string('alpha') - org = entities.Organization().create() - loc = entities.Location().create() - cv = entities.ContentView(organization=org).create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = module_target_sat.api.Organization().create() + loc = module_target_sat.api.Location().create() + cv = module_target_sat.api.ContentView(organization=org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() cv.publish() cv.read().version[0].promote(data={'environment_ids': lce.id}) - host = entities.Host( + host = module_target_sat.api.Host( organization=org, location=loc, content_facet_attributes={'content_view_id': cv.id, 'lifecycle_environment_id': lce.id}, @@ -347,7 +362,7 @@ def test_positive_add_host(session): @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_install_package( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Install a package to hosts inside host collection remotely @@ -355,11 +370,9 @@ def test_positive_install_package( :expectedresults: Package was successfully installed on all the hosts in host collection - - :CaseLevel: System """ with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) session.hostcollection.manage_packages( vm_host_collection.name, @@ -373,7 +386,7 @@ def test_positive_install_package( @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_remove_package( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Remove a package from hosts inside host collection remotely @@ -381,12 +394,10 @@ def test_positive_remove_package( :expectedresults: Package was successfully removed from all the hosts in host collection - - :CaseLevel: System """ _install_package_with_assertion(vm_content_hosts, constants.FAKE_0_CUSTOM_PACKAGE) with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) session.hostcollection.manage_packages( vm_host_collection.name, @@ -401,7 +412,7 @@ def test_positive_remove_package( @pytest.mark.tier3 def test_positive_upgrade_package( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Upgrade a package on hosts inside host collection remotely @@ -409,12 +420,10 @@ def test_positive_upgrade_package( :expectedresults: Package was successfully upgraded on all the hosts in host collection - - :CaseLevel: System """ _install_package_with_assertion(vm_content_hosts, constants.FAKE_1_CUSTOM_PACKAGE) with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) session.hostcollection.manage_packages( vm_host_collection.name, @@ -428,7 +437,7 @@ def test_positive_upgrade_package( @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_install_package_group( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Install a package group to hosts inside host collection remotely @@ -436,11 +445,9 @@ def test_positive_install_package_group( :expectedresults: Package group was successfully installed on all the hosts in host collection - - :CaseLevel: System """ with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) session.hostcollection.manage_packages( vm_host_collection.name, @@ -455,7 +462,7 @@ def test_positive_install_package_group( @pytest.mark.tier3 def test_positive_remove_package_group( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Remove a package group from hosts inside host collection remotely @@ -463,8 +470,6 @@ def test_positive_remove_package_group( :expectedresults: Package group was successfully removed on all the hosts in host collection - - :CaseLevel: System """ for client in vm_content_hosts: result = client.run(f'yum groups install -y {constants.FAKE_0_CUSTOM_PACKAGE_GROUP_NAME}') @@ -472,7 +477,7 @@ def test_positive_remove_package_group( for package in constants.FAKE_0_CUSTOM_PACKAGE_GROUP: assert _is_package_installed(vm_content_hosts, package) with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) session.hostcollection.manage_packages( vm_host_collection.name, @@ -488,7 +493,7 @@ def test_positive_remove_package_group( @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_install_errata( - session, module_org, smart_proxy_location, vm_content_hosts, vm_host_collection + session, module_org_with_parameter, smart_proxy_location, vm_content_hosts, vm_host_collection ): """Install an errata to the hosts inside host collection remotely @@ -496,12 +501,10 @@ def test_positive_install_errata( :expectedresults: Errata was successfully installed in all the hosts in host collection - - :CaseLevel: System """ _install_package_with_assertion(vm_content_hosts, constants.FAKE_1_CUSTOM_PACKAGE) with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) result = session.hostcollection.install_errata( vm_host_collection.name, @@ -518,7 +521,7 @@ def test_positive_install_errata( @pytest.mark.tier3 def test_positive_change_assigned_content( session, - module_org, + module_org_with_parameter, smart_proxy_location, module_lce, vm_content_hosts, @@ -566,13 +569,11 @@ def test_positive_change_assigned_content( names :BZ: 1315280 - - :CaseLevel: System """ new_lce_name = gen_string('alpha') new_cv_name = gen_string('alpha') new_lce = module_target_sat.api.LifecycleEnvironment( - name=new_lce_name, organization=module_org + name=new_lce_name, organization=module_org_with_parameter ).create() content_view = module_target_sat.api.ContentView( id=module_repos_collection.setup_content_data['content_view']['id'] @@ -603,7 +604,7 @@ def test_positive_change_assigned_content( assert len(client_repo_urls) assert set(expected_repo_urls) == set(client_repo_urls) with session: - session.organization.select(org_name=module_org.name) + session.organization.select(org_name=module_org_with_parameter.name) session.location.select(smart_proxy_location.name) task_values = session.hostcollection.change_assigned_content( vm_host_collection.name, new_lce.name, new_content_view.name @@ -628,12 +629,14 @@ def test_positive_change_assigned_content( @pytest.mark.tier3 -def test_negative_hosts_limit(session, module_org, smart_proxy_location): +def test_negative_hosts_limit( + session, module_target_sat, module_org_with_parameter, smart_proxy_location +): """Check that Host limit actually limits usage :id: 57b70977-2110-47d9-be3b-461ad15c70c7 - :Steps: + :steps: 1. Create Host Collection entity that can contain only one Host (using Host Limit field) 2. Create Host and add it to Host Collection. Check that it was @@ -643,20 +646,18 @@ def test_negative_hosts_limit(session, module_org, smart_proxy_location): :expectedresults: Second host is not added to Host Collection and appropriate error is shown - - :CaseLevel: System """ hc_name = gen_string('alpha') - org = entities.Organization().create() - cv = entities.ContentView(organization=org).create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = module_target_sat.api.Organization().create() + cv = module_target_sat.api.ContentView(organization=org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() cv.publish() cv.read().version[0].promote(data={'environment_ids': lce.id}) hosts = [] for _ in range(2): hosts.append( - entities.Host( - organization=module_org, + module_target_sat.api.Host( + organization=module_org_with_parameter, location=smart_proxy_location, content_facet_attributes={ 'content_view_id': cv.id, @@ -672,9 +673,10 @@ def test_negative_hosts_limit(session, module_org, smart_proxy_location): session.hostcollection.associate_host(hc_name, hosts[0].name) with pytest.raises(AssertionError) as context: session.hostcollection.associate_host(hc_name, hosts[1].name) - assert "cannot have more than 1 host(s) associated with host collection '{}'".format( - hc_name - ) in str(context.value) + assert ( + f"cannot have more than 1 host(s) associated with host collection '{hc_name}'" + in str(context.value) + ) @pytest.mark.tier3 @@ -698,7 +700,7 @@ def test_positive_install_module_stream( :id: e5d882e0-3520-4cb6-8629-ef4c18692868 - :Steps: + :steps: 1. Run dnf upload profile to sync module streams from hosts to Satellite 2. Navigate to host_collection 3. Install the module stream duck @@ -707,8 +709,6 @@ def test_positive_install_module_stream( :expectedresults: Module-Stream should get installed on all the hosts in host collection - - :CaseLevel: System """ _run_remote_command_on_content_hosts('dnf -y upload-profile', vm_content_hosts_module_stream) with session: @@ -748,7 +748,7 @@ def test_positive_install_modular_errata( :id: 8d6fb447-af86-4084-a147-7910f0cecdef - :Steps: + :steps: 1. Generate modular errata by installing older version of module stream 2. Run dnf upload-profile 3. Install the modular errata by 'remote execution' @@ -756,8 +756,6 @@ def test_positive_install_modular_errata( :expectedresults: Modular Errata should get installed on all hosts in host collection. - - :CaseLevel: System """ stream = "0" version = "20180704111719" diff --git a/tests/foreman/ui/test_hostgroup.py b/tests/foreman/ui/test_hostgroup.py index 39878c78c94..718b5091fca 100644 --- a/tests/foreman/ui/test_hostgroup.py +++ b/tests/foreman/ui/test_hostgroup.py @@ -6,23 +6,17 @@ :CaseComponent: HostGroup -:CaseLevel: Integration - :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT +from robottelo.constants import DEFAULT_CV, ENVIRONMENT @pytest.mark.e2e @@ -180,7 +174,7 @@ def test_positive_create_new_host(): :id: 49704437-5ca1-46cb-b74e-de58396add37 - :Steps: + :steps: 1. Create hostgroup with the Content Source field populated. 2. Create host from Hosts > Create Host, selecting the hostgroup in the Host Group field. @@ -203,7 +197,7 @@ def test_positive_nested_host_groups( :id: 547f8e72-df65-48eb-aeb1-6b5fd3cbf4e5 - :Steps: + :steps: 1. Create the parent host-group. 2. Create, Update and Delete the nested host-group. @@ -277,7 +271,7 @@ def test_positive_clone_host_groups( :id: 9f02dcc5-98aa-48bd-8114-edd3a0be65c1 - :Steps: + :steps: 1. Create the host-group. 2. Clone the host-group created in step 1 3. Update and Delete the cloned host-group. diff --git a/tests/foreman/ui/test_http_proxy.py b/tests/foreman/ui/test_http_proxy.py index 50fb7909ebe..10d2249be30 100644 --- a/tests/foreman/ui/test_http_proxy.py +++ b/tests/foreman/ui/test_http_proxy.py @@ -2,28 +2,32 @@ :Requirement: HttpProxy -:CaseLevel: Acceptance - -:CaseComponent: Repositories +:CaseComponent: HTTPProxy :team: Phoenix-content -:TestType: Functional - :CaseImportance: High :CaseAutomation: Automated -:Upstream: No """ +from box import Box +from fauxfactory import gen_integer, gen_string, gen_url import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string -from fauxfactory import gen_url from robottelo.config import settings -from robottelo.constants import DOCKER_REPO_UPSTREAM_NAME -from robottelo.constants import REPO_TYPE +from robottelo.constants import DOCKER_REPO_UPSTREAM_NAME, REPO_TYPE, REPOS +from robottelo.hosts import ProxyHostError + + +@pytest.fixture +def function_spec_char_user(target_sat, session_auth_proxy): + """Creates a user with special character password on the auth HTTP proxy""" + name = gen_string('alpha').lower() # lower! + passwd = gen_string('punctuation').replace("'", '') + session_auth_proxy.add_user(name, passwd) + yield Box(name=name, passwd=passwd) + session_auth_proxy.remove_user(name) @pytest.mark.tier2 @@ -34,10 +38,6 @@ def test_positive_create_update_delete(module_org, module_location, target_sat): :id: 0c7cdf3d-778f-427a-9a2f-42ad7c23aa15 :expectedresults: All expected CRUD actions finished successfully - - :CaseLevel: Integration - - :CaseImportance: High """ http_proxy_name = gen_string('alpha', 15) updated_proxy_name = gen_string('alpha', 15) @@ -63,8 +63,8 @@ def test_positive_create_update_delete(module_org, module_location, target_sat): assert http_proxy_values['http_proxy']['name'] == http_proxy_name assert http_proxy_values['http_proxy']['url'] == http_proxy_url assert http_proxy_values['http_proxy']['username'] == username - assert http_proxy_values['locations']['resources']['assigned'][0] == module_location.name - assert http_proxy_values['organizations']['resources']['assigned'][0] == module_org.name + assert module_location.name in http_proxy_values['locations']['resources']['assigned'] + assert module_org.name in http_proxy_values['organizations']['resources']['assigned'] # Update http_proxy with new name session.http_proxy.update(http_proxy_name, {'http_proxy.name': updated_proxy_name}) assert session.http_proxy.search(updated_proxy_name)[0]['Name'] == updated_proxy_name @@ -83,12 +83,18 @@ def test_positive_assign_http_proxy_to_products_repositories( :id: 2b803f9c-8d5d-4467-8eba-18244ebc0201 - :expectedresults: HTTP Proxy is assigned to all repos present - in Products. + :setup: + 1. Create an Organization and Location. - :CaseImportance: Critical + :steps: + 1. Create two HTTP proxies. + 2. Create two products and two repos in each product with various HTTP proxy policies. + 3. Set the HTTP proxy through bulk action for both products. + + :expectedresults: + 1. HTTP Proxy is assigned to all repos present in Products. """ - # create HTTP proxies + # Create two HTTP proxies http_proxy_a = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.un_auth_proxy_url, @@ -103,16 +109,18 @@ def test_positive_assign_http_proxy_to_products_repositories( organization=[module_org.id], location=[module_location.id], ).create() - # Create products + # Create two products product_a = target_sat.api.Product( organization=module_org.id, ).create() product_b = target_sat.api.Product( organization=module_org.id, ).create() - # Create repositories from UI. + # Create two repositories in each product from UI with target_sat.ui_session() as session: repo_a1_name = gen_string('alpha') + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.repository.create( product_a.name, { @@ -160,7 +168,7 @@ def test_positive_assign_http_proxy_to_products_repositories( 'repo_content.http_proxy_policy': 'No HTTP Proxy', }, ) - # Add http_proxy to products + # Set the HTTP proxy through bulk action for both products session.product.search('') session.product.manage_http_proxy( [product_a.name, product_b.name], @@ -197,10 +205,10 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar :id: e93733e1-5c05-4b7f-89e4-253b9ce55a5a - :Steps: + :steps: 1. Navigate to Infrastructure > Http Proxies 2. Create a Http Proxy - 3. GoTo to Administer > Settings > content tab + 3. Go to Administer > Settings > Content tab 4. Update the "Default HTTP Proxy" with created above. 5. Update "Default HTTP Proxy" to "no global default". @@ -211,10 +219,7 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar :expectedresults: Setting "Default HTTP Proxy" to "no global default" result in success.''' :CaseImportance: Medium - - :CaseLevel: Acceptance """ - property_name = setting_update.name http_proxy_a = target_sat.api.HTTPProxy( @@ -225,6 +230,8 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar ).create() with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.settings.update( f'name = {property_name}', f'{http_proxy_a.name} ({http_proxy_a.url})' ) @@ -242,32 +249,30 @@ def test_set_default_http_proxy(module_org, module_location, setting_update, tar def test_check_http_proxy_value_repository_details( function_org, function_location, function_product, setting_update, target_sat ): - - """Deleted Global Http Proxy is reflected in repository details page". + """Global Http Proxy is reflected in repository details page". :id: 3f64255a-ef6c-4acb-b99b-e5579133b564 - :Steps: + :steps: 1. Create Http Proxy (Go to Infrastructure > Http Proxies > New Http Proxy) - 2. GoTo to Administer > Settings > content tab + 2. Go to Administer > Settings > Content tab 3. Update the "Default HTTP Proxy" with created above. - 4. Create repository with Global Default Http Proxy. - 5. Delete the Http Proxy + 4. Create repository, check the Global Default Http Proxy is used. + 5. Delete the Http Proxy. + 6. Check it no longer appears on the Settings and repository page. :BZ: 1820193 :parametrized: yes :expectedresults: - 1. After deletion of "Default Http Proxy" its field on settings page should be - set to no global defult - 2. "HTTP Proxy" field in repository details page should be set to Global Default (None). + 1. Repository is automatically created with relevant Global Default Http Proxy. + 2. After Http Proxy deletion + - its field on Settings page should be set to Empty. + - "HTTP Proxy" field in repository details page should be set to Global Default (None). :CaseImportance: Medium - - :CaseLevel: Acceptance """ - property_name = setting_update.name repo_name = gen_string('alpha') http_proxy_a = target_sat.api.HTTPProxy( @@ -291,45 +296,102 @@ def test_check_http_proxy_value_repository_details( 'repo_content.upstream_url': settings.repos.yum_0.url, }, ) + repo_values = session.repository.read(function_product.name, repo_name) + assert ( + repo_values['repo_content']['http_proxy_policy'] + == f'Global Default ({http_proxy_a.name})' + ) + session.http_proxy.delete(http_proxy_a.name) result = session.settings.read(f'name = {property_name}') assert result['table'][0]['Value'] == "Empty" - session.repository.search(function_product.name, repo_name)[0]['Name'] repo_values = session.repository.read(function_product.name, repo_name) assert repo_values['repo_content']['http_proxy_policy'] == 'Global Default (None)' @pytest.mark.tier3 @pytest.mark.run_in_one_thread -@pytest.mark.stubbed -def test_http_proxy_containing_special_characters(): +def test_http_proxy_containing_special_characters( + request, + target_sat, + session_auth_proxy, + function_spec_char_user, + module_sca_manifest_org, + default_location, +): """Test Manifest refresh and redhat repository sync with http proxy special characters in password. :id: 16082c6a-9320-4a9a-bd6c-5687b099c940 - :customerscenario: true - - :Steps: - 1. Navigate to Infrastructure > Http Proxies - 2. Create HTTP Proxy with special characters in password. - 3. Go To to Administer > Settings > content tab - 4. Fill the details related to HTTP Proxy and click on "Test connection" button. - 5. Update the "Default HTTP Proxy" with created above. - 6. Refresh manifest. - 7. Enable and sync any redhat repositories. + :setup: + 1. Have an authenticated HTTP proxy. + 2. At the Proxy side create a user with special characters in password + (via function_spec_user fixture), let's call him the spec-char user. - :BZ: 1844840 + :steps: + 1. Check that no logs exist for the spec-char user at the proxy side yet. + 2. Create a proxy via UI using the spec-char user. + 3. Update settings to use the proxy for the content ops. + 4. Refresh the manifest, check it went through the proxy. + 5. Enable and sync some RH repository, check it went through the proxy. :expectedresults: - 1. "Test connection" button workes as expected. - 2. Manifest refresh, repository enable/disable and repository sync operation - finished successfully. + 1. HTTP proxy can be created via UI using the spec-char user. + 2. Manifest refresh, repository enable and sync succeed and are performed + through the HTTP proxy. - :CaseAutomation: NotAutomated + :BZ: 1844840 - :CaseImportance: High + :customerscenario: true """ + # Check that no logs exist for the spec-char user at the proxy side yet. + with pytest.raises(ProxyHostError): + session_auth_proxy.get_log(tail=100, grep=function_spec_char_user.name) + + # Create a proxy via UI using the spec-char user. + proxy_name = gen_string('alpha') + with target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + session.http_proxy.create( + { + 'http_proxy.name': proxy_name, + 'http_proxy.url': settings.http_proxy.auth_proxy_url, + 'http_proxy.username': function_spec_char_user.name, + 'http_proxy.password': function_spec_char_user.passwd, + 'locations.resources.assigned': [default_location.name], + 'organizations.resources.assigned': [module_sca_manifest_org.name], + } + ) + request.addfinalizer( + lambda: target_sat.api.HTTPProxy() + .search(query={'search': f'name={proxy_name}'})[0] + .delete() + ) + + # Update settings to use the proxy for the content ops. + session.settings.update( + 'name = content_default_http_proxy', + f'{proxy_name} ({settings.http_proxy.auth_proxy_url})', + ) + + # Refresh the manifest, check it went through the proxy. + target_sat.cli.Subscription.refresh_manifest( + {'organization-id': module_sca_manifest_org.id} + ) + assert session_auth_proxy.get_log( + tail=100, grep=f'CONNECT subscription.rhsm.redhat.com.*{function_spec_char_user.name}' + ), 'RHSM connection not found in proxy log' + + # Enable and sync some RH repository, check it went through the proxy. + repo_id = target_sat.api_factory.enable_sync_redhat_repo( + REPOS['rhae2'], module_sca_manifest_org.id + ) + repo = target_sat.api.Repository(id=repo_id).read() + assert session_auth_proxy.get_log( + tail=100, grep=f'CONNECT cdn.redhat.com.*{function_spec_char_user.name}' + ), 'CDN connection not found in proxy log' + assert repo.content_counts['rpm'] > 0, 'Where is my content?!' @pytest.mark.tier2 @@ -350,8 +412,6 @@ def test_positive_repo_discovery(setup_http_proxy, module_target_sat, module_org :expectedresults: Repository is discovered and created. - :team: Phoenix-content - :BZ: 2011303, 2042473 :parametrized: yes diff --git a/tests/foreman/ui/test_jobinvocation.py b/tests/foreman/ui/test_jobinvocation.py deleted file mode 100644 index 2dbf8f0cf4a..00000000000 --- a/tests/foreman/ui/test_jobinvocation.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Test class for Job Invocation procedure - -:Requirement: JobInvocation - -:CaseAutomation: Automated - -:CaseLevel: Acceptance - -:CaseComponent: RemoteExecution - -:Team: Endeavour - -:TestType: Functional - -:CaseImportance: High - -:Upstream: No -""" -import pytest -from inflection import camelize - -from robottelo.utils.datafactory import gen_string - - -@pytest.fixture -def module_rhel_client_by_ip(module_org, smart_proxy_location, rhel7_contenthost, target_sat): - """Setup a broker rhel client to be used in remote execution by ip""" - rhel7_contenthost.configure_rex(satellite=target_sat, org=module_org) - target_sat.api_factory.update_vm_host_location( - rhel7_contenthost, location_id=smart_proxy_location.id - ) - yield rhel7_contenthost - - -@pytest.mark.tier4 -def test_positive_run_default_job_template_by_ip( - session, module_org, smart_proxy_location, module_rhel_client_by_ip -): - """Run a job template on a host connected by ip - - :id: 9a90aa9a-00b4-460e-b7e6-250360ee8e4d - - :Setup: Use pre-defined job template. - - :Steps: - - 1. Set remote_execution_connect_by_ip on host to true - 2. Navigate to an individual host and click Run Job - 3. Select the job and appropriate template - 4. Run the job - - :expectedresults: Verify the job was successfully ran against the host - - :parametrized: yes - - :CaseLevel: System - """ - hostname = module_rhel_client_by_ip.hostname - with session: - session.organization.select(module_org.name) - session.location.select(smart_proxy_location.name) - assert session.host.search(hostname)[0]['Name'] == hostname - session.jobinvocation.run( - { - 'job_category': 'Commands', - 'job_template': 'Run Command - Script Default', - 'search_query': f'name ^ {hostname}', - 'template_content.command': 'ls', - } - ) - session.jobinvocation.wait_job_invocation_state(entity_name='Run ls', host_name=hostname) - status = session.jobinvocation.read(entity_name='Run ls', host_name=hostname) - assert status['overview']['hosts_table'][0]['Status'] == 'success' - - -@pytest.mark.tier4 -def test_positive_run_custom_job_template_by_ip( - session, module_org, smart_proxy_location, module_rhel_client_by_ip -): - """Run a job template on a host connected by ip - - :id: e283ae09-8b14-4ce1-9a76-c1bbd511d58c - - :Setup: Create a working job template. - - :Steps: - - 1. Set remote_execution_connect_by_ip on host to true - 2. Navigate to an individual host and click Run Job - 3. Select the job and appropriate template - 4. Run the job - - :expectedresults: Verify the job was successfully ran against the host - - :parametrized: yes - - :CaseLevel: System - """ - hostname = module_rhel_client_by_ip.hostname - job_template_name = gen_string('alpha') - with session: - session.organization.select(module_org.name) - session.location.select(smart_proxy_location.name) - assert session.host.search(hostname)[0]['Name'] == hostname - session.jobtemplate.create( - { - 'template.name': job_template_name, - 'template.template_editor.rendering_options': 'Editor', - 'template.template_editor.editor': '<%= input("command") %>', - 'job.provider_type': 'Script', - 'inputs': [{'name': 'command', 'required': True, 'input_type': 'User input'}], - } - ) - assert session.jobtemplate.search(job_template_name)[0]['Name'] == job_template_name - session.jobinvocation.run( - { - 'job_category': 'Miscellaneous', - 'job_template': job_template_name, - 'search_query': f'name ^ {hostname}', - 'template_content.command': 'ls', - } - ) - job_description = f'{camelize(job_template_name.lower())} with inputs command="ls"' - session.jobinvocation.wait_job_invocation_state( - entity_name=job_description, host_name=hostname - ) - status = session.jobinvocation.read(entity_name=job_description, host_name=hostname) - assert status['overview']['hosts_table'][0]['Status'] == 'success' - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_schedule_recurring_host_job(self): - """Using the new Host UI, schedule a recurring job on a Host - - :id: 5052be04-28ab-4349-8bee-851ef76e4ffa - - :caseComponent: Ansible - - :Team: Rocket - - :Steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign a role to host. - 4. Navigate to the new UI for the given Host. - 5. Select the Jobs subtab. - 6. Click the Schedule Recurring Job button, and using the popup, schedule a - recurring Job. - 7. Navigate to Job Invocations. - - :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed - time - - """ - - -@pytest.mark.stubbed -@pytest.mark.tier2 -def test_positive_schedule_recurring_hostgroup_job(self): - """Using the new recurring job scheduler, schedule a recurring job on a Hostgroup - - :id: c65db99b-11fe-4a32-89d0-0a4692b07efe - - :caseComponent: Ansible - - :Team: Rocket - - :Steps: - 1. Register a RHEL host to Satellite. - 2. Import all roles available by default. - 3. Assign a role to host. - 4. Navigate to the Host Group page. - 5. Select the "Configure Ansible Job" action. - 6. Click the Schedule Recurring Job button, and using the popup, schedule a - recurring Job. - 7. Navigate to Job Invocations. - - :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed - time - - """ diff --git a/tests/foreman/ui/test_jobtemplate.py b/tests/foreman/ui/test_jobtemplate.py index 494e9ce5d54..e2be74da640 100644 --- a/tests/foreman/ui/test_jobtemplate.py +++ b/tests/foreman/ui/test_jobtemplate.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest @pytest.mark.e2e @@ -29,8 +24,6 @@ def test_positive_end_to_end(session, module_org, module_location, target_sat): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ template_name = gen_string('alpha') @@ -208,32 +201,3 @@ def test_positive_end_to_end(session, module_org, module_location, target_sat): for name in (template_new_name, template_clone_name): session.jobtemplate.delete(name) assert not session.jobtemplate.search(name) - - -@pytest.mark.skip_if_open('BZ:1705866') -@pytest.mark.tier2 -def test_positive_clone_job_template_with_foreign_input_sets(session): - """Clone job template with foreign input sets - - :id: 7f502750-b8a2-4223-8d3c-47be95781e34 - - :expectedresults: Job template can be cloned with foreign input sets and - new template contain foreign input sets from parent - - :BZ: 1705866 - """ - child_name = gen_string('alpha') - parent_name = 'Install Group - Katello Script Default' - with session: - parent = session.jobtemplate.read(parent_name, widget_names='job')['job'][ - 'foreign_input_sets' - ] - session.jobtemplate.clone(parent_name, {'template.name': child_name}) - child = session.jobtemplate.read(child_name, widget_names='job')['job'][ - 'foreign_input_sets' - ] - assert len(parent) == len(child) - assert parent[0]['target_template'] == child[0]['target_template'] - assert parent[0]['include_all'] == child[0]['include_all'] - assert parent[0]['include'] == child[0]['include'] - assert parent[0]['exclude'] == child[0]['exclude'] diff --git a/tests/foreman/ui/test_ldap_authentication.py b/tests/foreman/ui/test_ldap_authentication.py index 01ac2987fe4..a6e3af6d612 100644 --- a/tests/foreman/ui/test_ldap_authentication.py +++ b/tests/foreman/ui/test_ldap_authentication.py @@ -4,35 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Integration - -:CaseComponent: LDAP +:CaseComponent: Authentication :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import os -import pyotp -import pytest -from airgun.session import Session from fauxfactory import gen_url -from nailgun import entities from navmazing import NavigationTriesExceeded +import pyotp +import pytest from robottelo.config import settings -from robottelo.constants import ANY_CONTEXT -from robottelo.constants import CERT_PATH -from robottelo.constants import LDAP_ATTR -from robottelo.constants import PERMISSIONS +from robottelo.constants import ANY_CONTEXT, CERT_PATH, LDAP_ATTR, PERMISSIONS from robottelo.utils.datafactory import gen_string - pytestmark = [pytest.mark.run_in_one_thread] EXTERNAL_GROUP_NAME = 'foobargroup' @@ -63,49 +52,49 @@ def set_certificate_in_satellite(server_type, target_sat, hostname=None): raise AssertionError(f'Failed to restart the httpd after applying {server_type} cert') -@pytest.fixture() -def ldap_usergroup_name(): +@pytest.fixture +def ldap_usergroup_name(target_sat): """Return some random usergroup name, and attempt to delete such usergroup when test finishes. """ usergroup_name = gen_string('alphanumeric') yield usergroup_name - user_groups = entities.UserGroup().search(query={'search': f'name="{usergroup_name}"'}) + user_groups = target_sat.api.UserGroup().search(query={'search': f'name="{usergroup_name}"'}) if user_groups: user_groups[0].delete() -@pytest.fixture() -def ldap_tear_down(): +@pytest.fixture +def ldap_tear_down(target_sat): """Teardown the all ldap settings user, usergroup and ldap delete""" yield - ldap_auth_sources = entities.AuthSourceLDAP().search() + ldap_auth_sources = target_sat.api.AuthSourceLDAP().search() for ldap_auth in ldap_auth_sources: - users = entities.User(auth_source=ldap_auth).search() + users = target_sat.api.User(auth_source=ldap_auth).search() for user in users: user.delete() ldap_auth.delete() -@pytest.fixture() -def external_user_count(): +@pytest.fixture +def external_user_count(target_sat): """return the external auth source user count""" - users = entities.User().search() - yield len([user for user in users if user.auth_source_name == 'External']) + users = target_sat.api.User().search() + return len([user for user in users if user.auth_source_name == 'External']) -@pytest.fixture() -def groups_teardown(): +@pytest.fixture +def groups_teardown(target_sat): """teardown for groups created for external/remote groups""" yield # tier down groups for group_name in ('sat_users', 'sat_admins', EXTERNAL_GROUP_NAME): - user_groups = entities.UserGroup().search(query={'search': f'name="{group_name}"'}) + user_groups = target_sat.api.UserGroup().search(query={'search': f'name="{group_name}"'}) if user_groups: user_groups[0].delete() -@pytest.fixture() +@pytest.fixture def rhsso_groups_teardown(default_sso_host): """Teardown the rhsso groups""" yield @@ -113,7 +102,7 @@ def rhsso_groups_teardown(default_sso_host): default_sso_host.delete_rhsso_group(group_name) -@pytest.fixture() +@pytest.fixture def multigroup_setting_cleanup(default_ipa_host): """Adding and removing the user to/from ipa group""" sat_users = settings.ipa.groups @@ -123,7 +112,7 @@ def multigroup_setting_cleanup(default_ipa_host): default_ipa_host.remove_user_from_usergroup(idm_users[1], sat_users[0]) -@pytest.fixture() +@pytest.fixture def ipa_add_user(default_ipa_host): """Create an IPA user and delete it""" test_user = gen_string('alpha') @@ -148,8 +137,6 @@ def test_positive_end_to_end(session, ldap_auth_source, ldap_tear_down): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High :parametrized: yes @@ -183,7 +170,7 @@ def test_positive_end_to_end(session, ldap_auth_source, ldap_tear_down): @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA', 'OPENLDAP'], indirect=True) @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_create_org_and_loc(session, ldap_auth_source, ldap_tear_down): +def test_positive_create_org_and_loc(session, ldap_auth_source, ldap_tear_down, target_sat): """Create LDAP auth_source with org and loc assigned. :id: 4f595af4-fc01-44c6-a614-a9ec827e3c3c @@ -201,8 +188,8 @@ def test_positive_create_org_and_loc(session, ldap_auth_source, ldap_tear_down): :parametrized: yes """ ldap_data, auth_source = ldap_auth_source - org = entities.Organization().create() - loc = entities.Location().create() + org = target_sat.api.Organization().create() + loc = target_sat.api.Location().create() ldap_auth_name = gen_string('alphanumeric') with session: session.ldapauthentication.create( @@ -251,7 +238,7 @@ def test_positive_add_katello_role( :id: aa5e3bf4-cb42-43a4-93ea-a2eea54b847a - :Steps: + :steps: 1. Create an UserGroup. 2. Assign some foreman roles to UserGroup. 3. Create and associate an External UserGroup. @@ -267,7 +254,7 @@ def test_positive_add_katello_role( auth_source_name = f'LDAP-{auth_source.name}' ak_name = gen_string('alpha') user_permissions = {'Katello::ActivationKey': PERMISSIONS['Katello::ActivationKey']} - katello_role = entities.Role().create() + katello_role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(katello_role, user_permissions) with session: session.usergroup.create( @@ -280,7 +267,9 @@ def test_positive_add_katello_role( ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name session.usergroup.refresh_external_group(ldap_usergroup_name, EXTERNAL_GROUP_NAME) - with Session(test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd']) as session: + with target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as session: with pytest.raises(NavigationTriesExceeded): session.architecture.search('') session.activationkey.create({'name': ak_name}) @@ -319,8 +308,8 @@ def test_positive_update_external_roles( ak_name = gen_string('alpha') auth_source_name = f'LDAP-{auth_source.name}' location_name = gen_string('alpha') - foreman_role = entities.Role().create() - katello_role = entities.Role().create() + foreman_role = target_sat.api.Role().create() + katello_role = target_sat.api.Role().create() foreman_permissions = {'Location': PERMISSIONS['Location']} katello_permissions = {'Katello::ActivationKey': PERMISSIONS['Katello::ActivationKey']} target_sat.api_factory.create_role_permissions(foreman_role, foreman_permissions) @@ -335,19 +324,23 @@ def test_positive_update_external_roles( } ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded): ldapsession.architecture.search('') ldapsession.location.create({'name': location_name}) - location = entities.Location().search(query={'search': f'name="{location_name}"'})[0] + location = target_sat.api.Location().search( + query={'search': f'name="{location_name}"'} + )[0] assert location.name == location_name session.usergroup.update( ldap_usergroup_name, {'roles.resources.assigned': [katello_role.name]} ) session.usergroup.refresh_external_group(ldap_usergroup_name, EXTERNAL_GROUP_NAME) - with Session(test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd']) as session: + with target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as session: session.activationkey.create({'name': ak_name}) assert session.activationkey.search(ak_name)[0]['Name'] == ak_name current_user = session.activationkey.read(ak_name, 'current_user')['current_user'] @@ -385,7 +378,7 @@ def test_positive_delete_external_roles( ldap_data, auth_source = ldap_auth_source auth_source_name = f'LDAP-{auth_source.name}' location_name = gen_string('alpha') - foreman_role = entities.Role().create() + foreman_role = target_sat.api.Role().create() foreman_permissions = {'Location': PERMISSIONS['Location']} target_sat.api_factory.create_role_permissions(foreman_role, foreman_permissions) with session: @@ -398,22 +391,23 @@ def test_positive_delete_external_roles( } ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded): ldapsession.architecture.search('') ldapsession.location.create({'name': location_name}) - location = entities.Location().search(query={'search': f'name="{location_name}"'})[0] + location = target_sat.api.Location().search( + query={'search': f'name="{location_name}"'} + )[0] assert location.name == location_name session.usergroup.update( ldap_usergroup_name, {'roles.resources.unassigned': [foreman_role.name]} ) - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] - ) as ldapsession: - with pytest.raises(NavigationTriesExceeded): - ldapsession.location.create({'name': gen_string('alpha')}) + ) as ldapsession, pytest.raises(NavigationTriesExceeded): + ldapsession.location.create({'name': gen_string('alpha')}) @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA'], indirect=True) @@ -452,8 +446,8 @@ def test_positive_update_external_user_roles( ak_name = gen_string('alpha') auth_source_name = f'LDAP-{auth_source.name}' location_name = gen_string('alpha') - foreman_role = entities.Role().create() - katello_role = entities.Role().create() + foreman_role = target_sat.api.Role().create() + katello_role = target_sat.api.Role().create() foreman_permissions = {'Location': PERMISSIONS['Location']} katello_permissions = {'Katello::ActivationKey': PERMISSIONS['Katello::ActivationKey']} target_sat.api_factory.create_role_permissions(foreman_role, foreman_permissions) @@ -468,17 +462,21 @@ def test_positive_update_external_user_roles( } ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: ldapsession.location.create({'name': location_name}) - location = entities.Location().search(query={'search': f'name="{location_name}"'})[0] + location = target_sat.api.Location().search( + query={'search': f'name="{location_name}"'} + )[0] assert location.name == location_name session.location.select(ANY_CONTEXT['location']) session.user.update( ldap_data['ldap_user_name'], {'roles.resources.assigned': [katello_role.name]} ) - with Session(test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd']) as session: + with target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as session: with pytest.raises(NavigationTriesExceeded): ldapsession.architecture.search('') session.activationkey.create({'name': ak_name}) @@ -496,6 +494,7 @@ def test_positive_add_admin_role_with_org_loc( module_org, ldap_tear_down, ldap_auth_source, + target_sat, ): """Associate Admin role to User Group with org and loc set. [belonging to external User Group.] @@ -507,7 +506,7 @@ def test_positive_add_admin_role_with_org_loc( :setup: LDAP Auth Source should be created with Org and Location Associated. - :Steps: + :steps: 1. Create an UserGroup. 2. Assign admin role to UserGroup. 3. Create and associate an External UserGroup. @@ -532,7 +531,9 @@ def test_positive_add_admin_role_with_org_loc( } ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name - with Session(test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd']) as session: + with target_sat.ui_session( + test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] + ) as session: session.location.create({'name': location_name}) assert session.location.search(location_name)[0]['Name'] == location_name location = session.location.read(location_name, ['current_user', 'primary']) @@ -567,7 +568,7 @@ def test_positive_add_foreman_role_with_org_loc( :setup: LDAP Auth Source should be created with Org and Location Associated. - :Steps: + :steps: 1. Create an UserGroup. 2. Assign some foreman roles to UserGroup. @@ -587,7 +588,7 @@ def test_positive_add_foreman_role_with_org_loc( 'Location': ['assign_locations'], 'Organization': ['assign_organizations'], } - foreman_role = entities.Role().create() + foreman_role = module_target_sat.api.Role().create() module_target_sat.api_factory.create_role_permissions(foreman_role, user_permissions) with session: session.usergroup.create( @@ -600,7 +601,7 @@ def test_positive_add_foreman_role_with_org_loc( ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name session.usergroup.refresh_external_group(ldap_usergroup_name, EXTERNAL_GROUP_NAME) - with Session( + with module_target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded): @@ -633,7 +634,7 @@ def test_positive_add_katello_role_with_org( :setup: LDAP Auth Source should be created with Organization associated. - :Steps: + :steps: 1. Create an UserGroup. 2. Assign some katello roles to UserGroup. 3. Create and associate an External UserGroup. @@ -652,9 +653,9 @@ def test_positive_add_katello_role_with_org( 'Location': ['assign_locations'], 'Organization': ['assign_organizations'], } - katello_role = entities.Role().create() + katello_role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(katello_role, user_permissions) - different_org = entities.Organization().create() + different_org = target_sat.api.Organization().create() with session: session.usergroup.create( { @@ -666,7 +667,7 @@ def test_positive_add_katello_role_with_org( ) assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name session.usergroup.refresh_external_group(ldap_usergroup_name, EXTERNAL_GROUP_NAME) - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded): @@ -675,9 +676,9 @@ def test_positive_add_katello_role_with_org( results = session.activationkey.search(ak_name) assert results[0]['Name'] == ak_name session.organization.select(different_org.name) - assert not session.activationkey.search(ak_name)[0]['Name'] == ak_name + assert session.activationkey.search(ak_name)[0]['Name'] != ak_name ak = ( - entities.ActivationKey(organization=module_org) + target_sat.api.ActivationKey(organization=module_org) .search(query={'search': f'name={ak_name}'})[0] .read() ) @@ -710,7 +711,7 @@ def test_positive_create_user_in_ldap_mode(session, ldap_auth_source, ldap_tear_ @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA'], indirect=True) @pytest.mark.tier2 -def test_positive_login_user_no_roles(test_name, ldap_tear_down, ldap_auth_source): +def test_positive_login_user_no_roles(test_name, ldap_tear_down, ldap_auth_source, target_sat): """Login with LDAP Auth for user with no roles/rights :id: 7dc8d9a7-ff08-4d8e-a842-d370ffd69741 @@ -727,7 +728,7 @@ def test_positive_login_user_no_roles(test_name, ldap_tear_down, ldap_auth_sourc :parametrized: yes """ ldap_data, auth_source = ldap_auth_source - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: ldapsession.task.read_all() @@ -754,17 +755,16 @@ def test_positive_login_user_basic_roles( """ ldap_data, auth_source = ldap_auth_source name = gen_string('alpha') - role = entities.Role().create() + role = target_sat.api.Role().create() permissions = {'Architecture': PERMISSIONS['Architecture']} target_sat.api_factory.create_role_permissions(role, permissions) - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] - ) as ldapsession: - with pytest.raises(NavigationTriesExceeded): - ldapsession.usergroup.search('') + ) as ldapsession, pytest.raises(NavigationTriesExceeded): + ldapsession.usergroup.search('') with session: session.user.update(ldap_data['ldap_user_name'], {'roles.resources.assigned': [role.name]}) - with Session( + with target_sat.ui_session( test_name, ldap_data['ldap_user_name'], ldap_data['ldap_user_passwd'] ) as ldapsession: ldapsession.architecture.create({'name': name}) @@ -774,7 +774,7 @@ def test_positive_login_user_basic_roles( @pytest.mark.upgrade @pytest.mark.tier2 def test_positive_login_user_password_otp( - auth_source_ipa, default_ipa_host, test_name, ldap_tear_down + auth_source_ipa, default_ipa_host, test_name, ldap_tear_down, target_sat ): """Login with password with time based OTP @@ -792,16 +792,19 @@ def test_positive_login_user_password_otp( otp_pass = ( f"{default_ipa_host.ldap_user_passwd}{generate_otp(default_ipa_host.time_based_secret)}" ) - with Session(test_name, default_ipa_host.ipa_otp_username, otp_pass) as ldapsession: - with pytest.raises(NavigationTriesExceeded): - ldapsession.user.search('') - users = entities.User().search(query={'search': f'login="{default_ipa_host.ipa_otp_username}"'}) + with target_sat.ui_session( + test_name, default_ipa_host.ipa_otp_username, otp_pass + ) as ldapsession, pytest.raises(NavigationTriesExceeded): + ldapsession.user.search('') + users = target_sat.api.User().search( + query={'search': f'login="{default_ipa_host.ipa_otp_username}"'} + ) assert users[0].login == default_ipa_host.ipa_otp_username @pytest.mark.tier2 def test_negative_login_user_with_invalid_password_otp( - auth_source_ipa, default_ipa_host, test_name, ldap_tear_down + auth_source_ipa, default_ipa_host, test_name, ldap_tear_down, target_sat ): """Login with password with time based OTP @@ -819,7 +822,9 @@ def test_negative_login_user_with_invalid_password_otp( password_with_otp = ( f"{default_ipa_host.ldap_user_passwd}{gen_string(str_type='numeric', length=6)}" ) - with Session(test_name, default_ipa_host.ipa_otp_username, password_with_otp) as ldapsession: + with target_sat.ui_session( + test_name, default_ipa_host.ipa_otp_username, password_with_otp + ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: ldapsession.user.search('') assert error.typename == 'NavigationTriesExceeded' @@ -847,7 +852,7 @@ def test_positive_test_connection_functionality(session, ldap_auth_source): @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA', 'OPENLDAP'], indirect=True) @pytest.mark.tier2 -def test_negative_login_with_incorrect_password(test_name, ldap_auth_source): +def test_negative_login_with_incorrect_password(test_name, ldap_auth_source, target_sat): """Attempt to login in Satellite an user with the wrong password :id: 3f09de90-a656-11ea-aa43-4ceb42ab8dbc @@ -864,7 +869,7 @@ def test_negative_login_with_incorrect_password(test_name, ldap_auth_source): """ ldap_data, auth_source = ldap_auth_source incorrect_password = gen_string('alphanumeric') - with Session( + with target_sat.ui_session( test_name, user=ldap_data['ldap_user_name'], password=incorrect_password ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: @@ -873,7 +878,9 @@ def test_negative_login_with_incorrect_password(test_name, ldap_auth_source): @pytest.mark.tier2 -def test_negative_login_with_disable_user(default_ipa_host, auth_source_ipa, ldap_tear_down): +def test_negative_login_with_disable_user( + default_ipa_host, auth_source_ipa, ldap_tear_down, target_sat +): """Disabled IDM user cannot login :id: 49f28006-aa1f-11ea-90d3-4ceb42ab8dbc @@ -884,7 +891,7 @@ def test_negative_login_with_disable_user(default_ipa_host, auth_source_ipa, lda :expectedresults: Login fails """ - with Session( + with target_sat.ui_session( user=default_ipa_host.disabled_user_ipa, password=default_ipa_host.ldap_user_passwd ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: @@ -894,7 +901,7 @@ def test_negative_login_with_disable_user(default_ipa_host, auth_source_ipa, lda @pytest.mark.tier2 def test_email_of_the_user_should_be_copied( - session, default_ipa_host, auth_source_ipa, ldap_tear_down + session, default_ipa_host, auth_source_ipa, ldap_tear_down, target_sat ): """Email of the user created in idm server ( set as external authorization source ) should be copied to the satellite. @@ -915,7 +922,7 @@ def test_email_of_the_user_should_be_copied( if 'Email' in line: _, result = line.split(': ', 2) break - with Session( + with target_sat.ui_session( user=default_ipa_host.ldap_user_name, password=default_ipa_host.ldap_user_passwd ) as ldapsession: ldapsession.bookmark.search('controller = hosts') @@ -942,10 +949,10 @@ def test_deleted_idm_user_should_not_be_able_to_login( """ test_user = gen_string('alpha') default_ipa_host.create_user(test_user) - with Session(user=test_user, password=settings.ipa.password) as ldapsession: + with target_sat.ui_session(user=test_user, password=settings.ipa.password) as ldapsession: ldapsession.bookmark.search('controller = hosts') default_ipa_host.delete_user(test_user) - with Session(user=test_user, password=settings.ipa.password) as ldapsession: + with target_sat.ui_session(user=test_user, password=settings.ipa.password) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: ldapsession.user.search('') assert error.typename == 'NavigationTriesExceeded' @@ -953,13 +960,13 @@ def test_deleted_idm_user_should_not_be_able_to_login( @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA', 'OPENLDAP'], indirect=True) @pytest.mark.tier2 -def test_onthefly_functionality(session, ldap_auth_source, ldap_tear_down): +def test_onthefly_functionality(session, ldap_auth_source, ldap_tear_down, target_sat): """User will not be created automatically in Satellite if onthefly is disabled :id: 6998de30-ef77-11ea-a0ce-0c7a158cbff4 - :Steps: + :steps: 1. Create an auth source with onthefly disabled 2. Try login with a user from auth source @@ -988,7 +995,7 @@ def test_onthefly_functionality(session, ldap_auth_source, ldap_tear_down): 'attribute_mappings.mail': LDAP_ATTR['mail'], } ) - with Session( + with target_sat.ui_session( user=ldap_data['ldap_user_name'], password=ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: @@ -1039,7 +1046,9 @@ def test_timeout_and_cac_card_ejection(): @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA', 'OPENLDAP'], indirect=True) @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1670397') -def test_verify_attribute_of_users_are_updated(session, ldap_auth_source, ldap_tear_down): +def test_verify_attribute_of_users_are_updated( + session, ldap_auth_source, ldap_tear_down, target_sat +): """Verify if attributes of LDAP user are updated upon first login when onthefly is disabled @@ -1047,7 +1056,7 @@ def test_verify_attribute_of_users_are_updated(session, ldap_auth_source, ldap_t :customerscenario: true - :Steps: + :steps: 1. Create authsource with onthefly disabled 2. Create a user manually and select the authsource created 3. Attributes of the user (like names and email) should be synced. @@ -1087,7 +1096,7 @@ def test_verify_attribute_of_users_are_updated(session, ldap_auth_source, ldap_t 'roles.admin': True, } ) - with Session( + with target_sat.ui_session( user=ldap_data['ldap_user_name'], password=ldap_data['ldap_user_passwd'] ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: @@ -1104,7 +1113,7 @@ def test_verify_attribute_of_users_are_updated(session, ldap_auth_source, ldap_t @pytest.mark.parametrize('ldap_auth_source', ['AD', 'IPA', 'OPENLDAP'], indirect=True) @pytest.mark.tier2 def test_login_failure_if_internal_user_exist( - session, test_name, ldap_auth_source, module_org, module_location, ldap_tear_down + session, test_name, ldap_auth_source, module_org, module_location, ldap_tear_down, target_sat ): """Verify the failure of login for the AD/IPA user in case same username internal user exists @@ -1126,19 +1135,21 @@ def test_login_failure_if_internal_user_exist( try: internal_username = ldap_data['ldap_user_name'] internal_password = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( admin=True, default_organization=module_org, default_location=module_location, login=internal_username, password=internal_password, ).create() - with Session(test_name, internal_username, ldap_data['ldap_user_passwd']) as ldapsession: + with target_sat.ui_session( + test_name, internal_username, ldap_data['ldap_user_passwd'] + ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: ldapsession.user.search('') assert error.typename == 'NavigationTriesExceeded' finally: - entities.User(id=user.id).delete() + target_sat.api.User(id=user.id).delete() @pytest.mark.skip_if_open("BZ:1812688") @@ -1176,7 +1187,7 @@ def test_userlist_with_external_admin( auth_source_name = f'LDAP-{auth_source_ipa.name}' user_permissions = {'Katello::ActivationKey': PERMISSIONS['Katello::ActivationKey']} - katello_role = entities.Role().create() + katello_role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(katello_role, user_permissions) with session: session.usergroup.create( @@ -1195,16 +1206,17 @@ def test_userlist_with_external_admin( 'external_groups.auth_source': auth_source_name, } ) - with Session(user=idm_user, password=settings.server.ssh_password) as ldapsession: + with target_sat.ui_session(user=idm_user, password=settings.server.ssh_password) as ldapsession: assert idm_user in ldapsession.task.read_all()['current_user'] # verify the users count with local admin and remote/external admin - with Session(user=idm_admin, password=settings.server.ssh_password) as remote_admin_session: - with Session( - user=settings.server.admin_username, password=settings.server.admin_password - ) as local_admin_session: - assert local_admin_session.user.search(idm_user)[0]['Username'] == idm_user - assert remote_admin_session.user.search(idm_user)[0]['Username'] == idm_user + with target_sat.ui_session( + user=idm_admin, password=settings.server.ssh_password + ) as remote_admin_session, target_sat.ui_session( + user=settings.server.admin_username, password=settings.server.admin_password + ) as local_admin_session: + assert local_admin_session.user.search(idm_user)[0]['Username'] == idm_user + assert remote_admin_session.user.search(idm_user)[0]['Username'] == idm_user @pytest.mark.skip_if_open('BZ:1883209') @@ -1224,7 +1236,7 @@ def test_positive_group_sync_open_ldap_authsource( :BZ: 1883209 - :Steps: + :steps: 1. Create an UserGroup. 2. Assign some foreman roles to UserGroup. 3. Create and associate an External OpenLDAP UserGroup. @@ -1235,7 +1247,7 @@ def test_positive_group_sync_open_ldap_authsource( ak_name = gen_string('alpha') auth_source_name = f'LDAP-{auth_source_open_ldap.name}' user_permissions = {'Katello::ActivationKey': PERMISSIONS['Katello::ActivationKey']} - katello_role = entities.Role().create() + katello_role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(katello_role, user_permissions) with session: session.usergroup.create( @@ -1249,7 +1261,7 @@ def test_positive_group_sync_open_ldap_authsource( assert session.usergroup.search(ldap_usergroup_name)[0]['Name'] == ldap_usergroup_name session.usergroup.refresh_external_group(ldap_usergroup_name, EXTERNAL_GROUP_NAME) user_name = open_ldap_data.open_ldap_user - with Session(test_name, user_name, open_ldap_data.password) as session: + with target_sat.ui_session(test_name, user_name, open_ldap_data.password) as session: with pytest.raises(NavigationTriesExceeded): session.architecture.search('') session.activationkey.create({'name': ak_name}) @@ -1271,7 +1283,7 @@ def test_verify_group_permissions( :id: 7e2ef59c-0c68-11eb-b6f3-0c7a158cbff4 - :Steps: + :steps: 1. Create two usergroups and link it with external group having a user in common 2. Give those usergroup different permissions @@ -1285,7 +1297,7 @@ def test_verify_group_permissions( idm_users = settings.ipa.group_users auth_source_name = f'LDAP-{auth_source_ipa.name}' user_permissions = {None: ['access_dashboard']} - katello_role = entities.Role().create() + katello_role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(katello_role, user_permissions) with session: session.usergroup.create( @@ -1305,21 +1317,23 @@ def test_verify_group_permissions( } ) location_name = gen_string('alpha') - with Session(user=idm_users[1], password=settings.server.ssh_password) as ldapsession: + with target_sat.ui_session( + user=idm_users[1], password=settings.server.ssh_password + ) as ldapsession: ldapsession.location.create({'name': location_name}) - location = entities.Location().search(query={'search': f'name="{location_name}"'})[0] + location = target_sat.api.Location().search(query={'search': f'name="{location_name}"'})[0] assert location.name == location_name @pytest.mark.tier2 def test_verify_ldap_filters_ipa( - session, ipa_add_user, auth_source_ipa, default_ipa_host, ldap_tear_down + session, ipa_add_user, auth_source_ipa, default_ipa_host, ldap_tear_down, target_sat ): """Verifying ldap filters in authsource to restrict access :id: 0052b272-08b1-11eb-80c6-0c7a158cbff4 - :Steps: + :steps: 1. Create authsource with onthefly enabled and ldap filter 2. Verify login from users according to the filter @@ -1330,7 +1344,9 @@ def test_verify_ldap_filters_ipa( # 'test_user' able to login before the filter is applied. test_user = ipa_add_user - with Session(user=test_user, password=default_ipa_host.ldap_user_passwd) as ldapsession: + with target_sat.ui_session( + user=test_user, password=default_ipa_host.ldap_user_passwd + ) as ldapsession: ldapsession.task.read_all() # updating the authsource with filter @@ -1339,7 +1355,9 @@ def test_verify_ldap_filters_ipa( session.ldapauthentication.update(auth_source_ipa.name, {'account.ldap_filter': ldap_data}) # 'test_user' not able login as it gets filtered out - with Session(user=test_user, password=default_ipa_host.ldap_user_passwd) as ldapsession: + with target_sat.ui_session( + user=test_user, password=default_ipa_host.ldap_user_passwd + ) as ldapsession: with pytest.raises(NavigationTriesExceeded) as error: ldapsession.user.search('') assert error.typename == 'NavigationTriesExceeded' diff --git a/tests/foreman/ui/test_lifecycleenvironment.py b/tests/foreman/ui/test_lifecycleenvironment.py index 396662708c8..9c389fdd19d 100644 --- a/tests/foreman/ui/test_lifecycleenvironment.py +++ b/tests/foreman/ui/test_lifecycleenvironment.py @@ -4,30 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: LifecycleEnvironments :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from airgun.session import Session from navmazing import NavigationTriesExceeded +import pytest from robottelo.config import settings -from robottelo.constants import ENVIRONMENT -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_2_CUSTOM_PACKAGE -from robottelo.constants import FAKE_3_CUSTOM_PACKAGE_NAME +from robottelo.constants import ( + ENVIRONMENT, + FAKE_0_CUSTOM_PACKAGE, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, + FAKE_2_CUSTOM_PACKAGE, + FAKE_3_CUSTOM_PACKAGE_NAME, +) from robottelo.utils.datafactory import gen_string @@ -40,8 +36,6 @@ def test_positive_end_to_end(session): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ lce_name = gen_string('alpha') @@ -77,8 +71,6 @@ def test_positive_create_chain(session): :id: ed3d2c88-ef0a-4a1a-9f11-5bdb2119fc18 :expectedresults: Environment is created - - :CaseLevel: Integration """ lce_path_name = gen_string('alpha') lce_name = gen_string('alpha') @@ -118,8 +110,6 @@ def test_positive_search_lce_content_view_packages_by_full_name(session, module_ :expectedresults: only the searched packages where found :BZ: 1432155 - - :CaseLevel: System """ packages = [ {'name': FAKE_0_CUSTOM_PACKAGE_NAME, 'full_names': [FAKE_0_CUSTOM_PACKAGE]}, @@ -171,8 +161,6 @@ def test_positive_search_lce_content_view_packages_by_name(session, module_org, :expectedresults: only the searched packages where found :BZ: 1432155 - - :CaseLevel: System """ packages = [ {'name': FAKE_0_CUSTOM_PACKAGE_NAME, 'packages_count': 1}, @@ -216,8 +204,6 @@ def test_positive_search_lce_content_view_module_streams_by_name(session, module 5. Search by module stream names :expectedresults: only the searched module streams where found - - :CaseLevel: System """ module_streams = [ {'name': FAKE_1_CUSTOM_PACKAGE_NAME, 'streams_count': 2}, @@ -252,7 +238,7 @@ def test_positive_custom_user_view_lce(session, test_name, target_sat): :BZ: 1420511 - :Steps: + :steps: As an admin user: @@ -279,8 +265,6 @@ def test_positive_custom_user_view_lce(session, test_name, target_sat): :expectedresults: The additional lifecycle environment is viewable and accessible by the custom user. - - :CaseLevel: Integration """ role_name = gen_string('alpha') lce_name = gen_string('alpha') @@ -313,7 +297,7 @@ def test_positive_custom_user_view_lce(session, test_name, target_sat): lce_values = session.lifecycleenvironment.read_all() assert lce_name in lce_values['lce'] # ensure the created user also can find the created lifecycle environment link - with Session(test_name, user_login, user_password) as non_admin_session: + with target_sat.ui_session(test_name, user_login, user_password) as non_admin_session: # to ensure that the created user has only the assigned # permissions, check that hosts menu tab does not exist with pytest.raises(NavigationTriesExceeded): diff --git a/tests/foreman/ui/test_location.py b/tests/foreman/ui/test_location.py index 06b51bf2341..98d2d2a8749 100644 --- a/tests/foreman/ui/test_location.py +++ b/tests/foreman/ui/test_location.py @@ -4,27 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: OrganizationsandLocations :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from fauxfactory import gen_ipaddr -from fauxfactory import gen_string +from fauxfactory import gen_ipaddr, gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import ANY_CONTEXT -from robottelo.constants import INSTALL_MEDIUM_URL -from robottelo.constants import LIBVIRT_RESOURCE_URL +from robottelo.constants import ANY_CONTEXT, INSTALL_MEDIUM_URL, LIBVIRT_RESOURCE_URL @pytest.mark.e2e @@ -37,8 +29,6 @@ def test_positive_end_to_end(session): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Critical """ loc_parent = entities.Location().create() @@ -128,8 +118,6 @@ def test_positive_update_with_all_users(session): was enabled and then disabled afterwards :BZ: 1321543, 1479736, 1479736 - - :CaseLevel: Integration """ user = entities.User().create() loc = entities.Location().create() @@ -164,8 +152,6 @@ def test_positive_add_org_hostgroup_template(session): :expectedresults: organization, hostgroup, provisioning template are added to location - - :CaseLevel: Integration """ org = entities.Organization().create() loc = entities.Location().create() @@ -199,7 +185,6 @@ def test_positive_add_org_hostgroup_template(session): @pytest.mark.skip_if_not_set('libvirt') -@pytest.mark.on_premises_provisioning @pytest.mark.tier2 def test_positive_update_compresource(session): """Add/Remove compute resource from/to location @@ -207,8 +192,6 @@ def test_positive_update_compresource(session): :id: 1d24414a-666d-490d-89b9-cd0704684cdd :expectedresults: compute resource is added and removed from the location - - :CaseLevel: Integration """ url = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname resource = entities.LibvirtComputeResource(url=url).create() diff --git a/tests/foreman/ui/test_media.py b/tests/foreman/ui/test_media.py index 8d944c250bb..75a39f80edb 100644 --- a/tests/foreman/ui/test_media.py +++ b/tests/foreman/ui/test_media.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: Low -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.constants import INSTALL_MEDIUM_URL @@ -32,8 +27,6 @@ def test_positive_end_to_end(session, module_org, module_location): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_modulestreams.py b/tests/foreman/ui/test_modulestreams.py index 1bf848a7721..f8a67ed15c6 100644 --- a/tests/foreman/ui/test_modulestreams.py +++ b/tests/foreman/ui/test_modulestreams.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings @@ -56,8 +51,6 @@ def test_positive_module_stream_details_search_in_repo(session, module_org, modu :expectedresults: Content search functionality works as intended and expected module_streams are present inside of repository - :CaseLevel: Integration - :BZ: 1948758 """ with session: diff --git a/tests/foreman/ui/test_operatingsystem.py b/tests/foreman/ui/test_operatingsystem.py index 7d7272a948b..1324b556230 100644 --- a/tests/foreman/ui/test_operatingsystem.py +++ b/tests/foreman/ui/test_operatingsystem.py @@ -4,62 +4,21 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Provisioning :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from nailgun import entities -from robottelo.constants import DEFAULT_TEMPLATE from robottelo.constants import HASH_TYPE from robottelo.utils.datafactory import gen_string -@pytest.fixture(scope='module') -def module_org(): - return entities.Organization().create() - - @pytest.mark.tier2 -def test_positive_update_with_params(session): - """Set Operating System parameter - - :id: 05b504d8-2518-4359-a53a-f577339f1ebe - - :expectedresults: OS is updated with new parameter - - :CaseLevel: Integration - """ - name = gen_string('alpha') - major_version = gen_string('numeric', 2) - param_name = gen_string('alpha') - param_value = gen_string('alpha') - with session: - session.operatingsystem.create( - {'operating_system.name': name, 'operating_system.major': major_version} - ) - os_full_name = f'{name} {major_version}' - assert session.operatingsystem.search(name)[0]['Title'] == os_full_name - session.operatingsystem.update( - os_full_name, {'parameters.os_params': {'name': param_name, 'value': param_value}} - ) - values = session.operatingsystem.read(os_full_name) - assert len(values['parameters']) == 1 - assert values['parameters']['os_params'][0]['name'] == param_name - assert values['parameters']['os_params'][0]['value'] == param_value - - -@pytest.mark.tier2 -def test_positive_end_to_end(session): +def test_positive_end_to_end(session, module_org, module_location, target_sat): """Create all possible entities that required for operating system and then test all scenarios like create/read/update/delete for it @@ -67,8 +26,6 @@ def test_positive_end_to_end(session): :expectedresults: All scenarios flows work properly - :CaseLevel: Integration - :CaseImportance: Critical """ name = gen_string('alpha') @@ -77,18 +34,17 @@ def test_positive_end_to_end(session): description = gen_string('alpha') family = 'Red Hat' hash = HASH_TYPE['md5'] - architecture = entities.Architecture().create() - org = entities.Organization().create() - loc = entities.Location().create() - ptable = entities.PartitionTable( - organization=[org], location=[loc], os_family='Redhat' + architecture = target_sat.api.Architecture().create() + ptable = target_sat.api.PartitionTable( + organization=[module_org], location=[module_location], os_family='Redhat' ).create() - medium = entities.Media(organization=[org], location=[loc]).create() + medium = target_sat.api.Media(organization=[module_org], location=[module_location]).create() param_name = gen_string('alpha') param_value = gen_string('alpha') with session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=loc.name) + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + # Create session.operatingsystem.create( { 'operating_system.name': name, @@ -113,54 +69,77 @@ def test_positive_end_to_end(session): assert os['operating_system']['password_hash'] == hash assert len(os['operating_system']['architectures']['assigned']) == 1 assert os['operating_system']['architectures']['assigned'][0] == architecture.name + assert os['templates']['resources']['Provisioning template'] == 'Kickstart default' assert ptable.name in os['partition_table']['resources']['assigned'] assert os['installation_media']['resources']['assigned'][0] == medium.name assert len(os['parameters']['os_params']) == 1 assert os['parameters']['os_params'][0]['name'] == param_name assert os['parameters']['os_params'][0]['value'] == param_value + template_name = gen_string('alpha') new_description = gen_string('alpha') + new_name = gen_string('alpha') + new_major_version = gen_string('numeric', 2) + new_minor_version = gen_string('numeric', 2) + new_family = 'Red Hat CoreOS' + new_hash = HASH_TYPE['sha512'] + new_architecture = target_sat.api.Architecture().create() + new_ptable = target_sat.api.PartitionTable( + organization=[module_org], location=[module_location], os_family='Redhat' + ).create() + new_medium = target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() + new_param_value = gen_string('alpha') + session.provisioningtemplate.create( + { + 'template.name': template_name, + 'template.default': True, + 'type.snippet': False, + 'template.template_editor.editor': gen_string('alpha'), + 'type.template_type': 'Provisioning template', + 'association.applicable_os.assigned': [os['operating_system']['description']], + 'organizations.resources.assigned': [module_org.name], + 'locations.resources.assigned': [module_location.name], + } + ) + # Update session.operatingsystem.update( - description, {'operating_system.description': new_description} + description, + { + 'operating_system.name': new_name, + 'templates.resources': {'Provisioning template': template_name}, + 'operating_system.description': new_description, + 'operating_system.major': new_major_version, + 'operating_system.minor': new_minor_version, + 'operating_system.family': new_family, + 'operating_system.password_hash': new_hash, + 'operating_system.architectures.assigned': [new_architecture.name], + 'partition_table.resources.assigned': [new_ptable.name], + 'installation_media.resources.assigned': [new_medium.name], + 'parameters.os_params': {'name': param_name, 'value': new_param_value}, + }, ) - assert not session.operatingsystem.search(description) - assert session.operatingsystem.search(new_description)[0]['Title'] == new_description - assert session.partitiontable.search(ptable.name)[0]['Operating Systems'] == new_description + os = session.operatingsystem.read(new_description) + assert os['operating_system']['name'] == new_name + assert os['operating_system']['major'] == new_major_version + assert os['operating_system']['minor'] == new_minor_version + assert os['operating_system']['description'] == new_description + assert os['operating_system']['family'] == new_family + assert os['operating_system']['password_hash'] == new_hash + assert new_architecture.name in os['operating_system']['architectures']['assigned'] + assert new_ptable.name in os['partition_table']['resources']['assigned'] + assert new_medium.name in os['installation_media']['resources']['assigned'] + assert os['templates']['resources']['Provisioning template'] == template_name + assert len(os['parameters']['os_params']) == 1 + assert os['parameters']['os_params'][0]['name'] == param_name + assert os['parameters']['os_params'][0]['value'] == new_param_value + # Delete session.operatingsystem.delete(new_description) assert not session.operatingsystem.search(new_description) @pytest.mark.tier2 -def test_positive_update_template(session, module_org): - """Update operating system with new provisioning template value - - :id: 0b90eb24-8fc9-4e42-8709-6eee8ffbbdb5 - - :expectedresults: OS is updated with new template - - :CaseLevel: Integration - """ - name = gen_string('alpha') - major_version = gen_string('numeric', 2) - os = entities.OperatingSystem(name=name, major=major_version, family='Redhat').create() - template = entities.ProvisioningTemplate( - organization=[module_org], - snippet=False, - template_kind=entities.TemplateKind().search(query={'search': 'name="provision"'})[0], - operatingsystem=[os], - ).create() - with session: - os_full_name = f'{name} {major_version}' - values = session.operatingsystem.read(os_full_name) - assert values['templates']['resources']['Provisioning template'] == DEFAULT_TEMPLATE - session.operatingsystem.update( - os_full_name, {'templates.resources': {'Provisioning template': template.name}} - ) - values = session.operatingsystem.read(os_full_name) - assert values['templates']['resources']['Provisioning template'] == template.name - - -@pytest.mark.tier2 -def test_positive_verify_os_name(session): +def test_positive_verify_os_name(session, target_sat): """Check that the Operating System name is displayed correctly :id: 2cb9cdcf-1723-4317-ab0a-971e7b2dd161 @@ -169,14 +148,12 @@ def test_positive_verify_os_name(session): :BZ: 1778503 - :CaseLevel: Component - :CaseImportance: Low """ name = gen_string('alpha') major_version = gen_string('numeric', 2) os_full_name = f"{name} {major_version}" - entities.OperatingSystem(name=name, major=major_version).create() + target_sat.api.OperatingSystem(name=name, major=major_version).create() with session: values = session.operatingsystem.search(os_full_name) assert values[0]['Title'] == os_full_name diff --git a/tests/foreman/ui/test_organization.py b/tests/foreman/ui/test_organization.py index b9b370f86c8..3aa913484fb 100644 --- a/tests/foreman/ui/test_organization.py +++ b/tests/foreman/ui/test_organization.py @@ -4,34 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: OrganizationsandLocations :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import DEFAULT_ORG -from robottelo.constants import INSTALL_MEDIUM_URL -from robottelo.constants import LIBVIRT_RESOURCE_URL +from robottelo.constants import DEFAULT_ORG, INSTALL_MEDIUM_URL, LIBVIRT_RESOURCE_URL from robottelo.logging import logger CUSTOM_REPO_ERRATA_ID = settings.repos.yum_0.errata[0] @pytest.fixture(scope='module') -def module_repos_col(module_org, module_lce, module_target_sat, request): - module_target_sat.upload_manifest(org_id=module_org.id) +def module_repos_col(request, module_entitlement_manifest_org, module_lce, module_target_sat): repos_collection = module_target_sat.cli_factory.RepositoryCollection( repositories=[ # As Satellite Tools may be added as custom repo and to have a "Fully entitled" host, @@ -39,15 +31,15 @@ def module_repos_col(module_org, module_lce, module_target_sat, request): module_target_sat.cli_factory.YumRepository(url=settings.repos.yum_0.url), ], ) - repos_collection.setup_content(module_org.id, module_lce.id) + repos_collection.setup_content(module_entitlement_manifest_org.id, module_lce.id) yield repos_collection @request.addfinalizer def _cleanup(): try: - module_target_sat.api.Subscription(organization=module_org).delete_manifest( - data={'organization_id': module_org.id} - ) + module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org + ).delete_manifest(data={'organization_id': module_entitlement_manifest_org.id}) except Exception: logger.exception('Exception cleaning manifest:') @@ -62,8 +54,6 @@ def test_positive_end_to_end(session): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Critical """ name = gen_string('alpha') @@ -117,7 +107,12 @@ def test_positive_end_to_end(session): ) assert session.organization.search(new_name) org_values = session.organization.read(new_name, widget_names=widget_list) - assert not session.organization.delete(new_name) + with pytest.raises(AssertionError) as context: + assert not session.organization.delete(new_name) + assert ( + 'The current organization cannot be deleted. Please switch to a ' + 'different organization before deleting.' in str(context.value) + ) assert user.login in org_values['users']['resources']['assigned'] assert media.name in org_values['media']['resources']['assigned'] assert template.name in org_values['provisioning_templates']['resources']['assigned'] @@ -206,8 +201,6 @@ def test_positive_create_with_all_users(session): :expectedresults: Organization and user entities assigned to each other :BZ: 1321543 - - :CaseLevel: Integration """ user = entities.User().create() org = entities.Organization().create() @@ -224,7 +217,6 @@ def test_positive_create_with_all_users(session): @pytest.mark.skip_if_not_set('libvirt') -@pytest.mark.on_premises_provisioning @pytest.mark.tier2 def test_positive_update_compresource(session): """Add/Remove compute resource from/to organization. @@ -232,8 +224,6 @@ def test_positive_update_compresource(session): :id: a49349b9-4637-4ef6-b65b-bd3eccb5a12a :expectedresults: Compute resource is added and then removed. - - :CaseLevel: Integration """ url = f'{LIBVIRT_RESOURCE_URL}{settings.libvirt.libvirt_hostname}' resource = entities.LibvirtComputeResource(url=url).create() @@ -256,7 +246,7 @@ def test_positive_update_compresource(session): @pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_with_manifest_lces(session, target_sat): +def test_positive_delete_with_manifest_lces(session, target_sat, function_entitlement_manifest_org): """Create Organization with valid values and upload manifest. Then try to delete that organization. @@ -264,12 +254,9 @@ def test_positive_delete_with_manifest_lces(session, target_sat): :expectedresults: Organization is deleted successfully. - :CaseLevel: Integration - :CaseImportance: Critical """ - org = entities.Organization().create() - target_sat.upload_manifest(org.id) + org = function_entitlement_manifest_org with session: session.organization.select(org.name) session.lifecycleenvironment.create({'name': 'DEV'}) @@ -281,11 +268,11 @@ def test_positive_delete_with_manifest_lces(session, target_sat): assert not session.organization.search(org.name) -@pytest.mark.skip('Skipping due to manifest refresh issues') -@pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_download_debug_cert_after_refresh(session, target_sat): +def test_positive_download_debug_cert_after_refresh( + session, target_sat, function_entitlement_manifest_org +): """Create organization with valid manifest. Download debug certificate for that organization and refresh added manifest for few times in a row @@ -294,13 +281,10 @@ def test_positive_download_debug_cert_after_refresh(session, target_sat): :expectedresults: Scenario passed successfully - :CaseLevel: Integration - :CaseImportance: High """ - org = entities.Organization().create() + org = function_entitlement_manifest_org try: - target_sat.upload_manifest(org.id) with session: session.organization.select(org.name) for _ in range(3): @@ -318,14 +302,12 @@ def test_positive_errata_view_organization_switch( :id: faad9cf3-f8d5-49a6-87d1-431837b67675 - :Steps: Create an Organization having a product synced which contains errata. + :steps: Create an Organization having a product synced which contains errata. :expectedresults: Verify that the errata belonging to one Organization is not showing in the Default organization. :CaseImportance: High - - :CaseLevel: Integration """ rc = module_target_sat.cli_factory.RepositoryCollection( repositories=[module_target_sat.cli_factory.YumRepository(settings.repos.yum_3.url)] @@ -347,7 +329,7 @@ def test_positive_product_view_organization_switch(session, module_org, module_p :id: 50cc459a-3a23-433a-99b9-9f3b929e6d64 - :Steps: + :steps: 1. Create an Organization having a product and verify that product is present in the Organization. 2. Switch the Organization to default and verify that product is not visible in it. @@ -355,11 +337,9 @@ def test_positive_product_view_organization_switch(session, module_org, module_p :expectedresults: Verify that the Product belonging to one Organization is not visible in another organization. - :CaseLevel: Integration - :CaseImportance: High """ with session: assert session.product.search(module_product.name) session.organization.select(org_name="Default Organization") - assert not session.product.search(module_product.name) == module_product.name + assert session.product.search(module_product.name) != module_product.name diff --git a/tests/foreman/ui/test_oscapcontent.py b/tests/foreman/ui/test_oscapcontent.py index 6a86a8ce9e1..5669fb9e854 100644 --- a/tests/foreman/ui/test_oscapcontent.py +++ b/tests/foreman/ui/test_oscapcontent.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High -:Upstream: No """ import os import pytest -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings +from robottelo.config import robottelo_tmp_dir, settings from robottelo.constants import DataFile from robottelo.utils.datafactory import gen_string @@ -46,7 +40,7 @@ def test_positive_end_to_end( :id: 9870555d-0b60-41ab-a481-81d4d3f78fec - :Steps: + :steps: 1. Create an openscap content. 2. Read values from created entity. @@ -54,8 +48,6 @@ def test_positive_end_to_end( 4. Delete openscap content :expectedresults: All expected CRUD actions finished successfully - - :CaseLevel: Integration """ title = gen_string('alpha') new_title = gen_string('alpha') @@ -94,7 +86,7 @@ def test_negative_create_with_same_name(session, oscap_content_path, default_org :id: f5c6491d-b83c-4ca2-afdf-4bb93e6dd92b - :Steps: + :steps: 1. Create an openscap content. 2. Provide all the appropriate parameters. @@ -129,7 +121,7 @@ def test_external_disa_scap_content(session, default_org, default_location): :id: 5f29254e-7c15-45e1-a2ec-4da1d3d8d74d - :Steps: + :steps: 1. Create an openscap content with external DISA SCAP content. 2. Assert that openscap content has been created. diff --git a/tests/foreman/ui/test_oscappolicy.py b/tests/foreman/ui/test_oscappolicy.py index e0d47e152a3..712e6f37481 100644 --- a/tests/foreman/ui/test_oscappolicy.py +++ b/tests/foreman/ui/test_oscappolicy.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High -:Upstream: No """ -import pytest from nailgun import entities +import pytest from robottelo.constants import OSCAP_PROFILE from robottelo.utils.datafactory import gen_string @@ -47,7 +42,7 @@ def test_positive_check_dashboard( :customerscenario: true - :Steps: + :steps: 1. Create new host group 2. Create new host using host group from step 1 @@ -58,8 +53,6 @@ def test_positive_check_dashboard( data :BZ: 1424936 - - :CaseLevel: Integration """ name = gen_string('alpha') oscap_content_title = gen_string('alpha') @@ -124,8 +117,6 @@ def test_positive_end_to_end( :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Critical """ name = '{} {}'.format(gen_string('alpha'), gen_string('alpha')) diff --git a/tests/foreman/ui/test_oscaptailoringfile.py b/tests/foreman/ui/test_oscaptailoringfile.py index 007f0b44557..0f6bdcdfc63 100644 --- a/tests/foreman/ui/test_oscaptailoringfile.py +++ b/tests/foreman/ui/test_oscaptailoringfile.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SCAPPlugin -:Team: Rocket - -:TestType: Functional +:Team: Endeavour :CaseImportance: High -:Upstream: No """ -import pytest from nailgun import entities +import pytest from robottelo.utils.datafactory import gen_string @@ -30,8 +25,6 @@ def test_positive_end_to_end(session, tailoring_file_path, default_org, default_ :id: 9aebccb8-6837-4583-8a8a-8883480ab688 :expectedresults: All expected CRUD actions finished successfully - - :CaseLevel: Integration """ name = gen_string('alpha') new_name = gen_string('alpha') diff --git a/tests/foreman/ui/test_package.py b/tests/foreman/ui/test_package.py index 7d32d67e319..643608811d1 100644 --- a/tests/foreman/ui/test_package.py +++ b/tests/foreman/ui/test_package.py @@ -4,25 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import RPM_TO_UPLOAD +from robottelo.constants import RPM_TO_UPLOAD, DataFile @pytest.fixture(scope='module') @@ -60,12 +54,11 @@ def module_yum_repo2(module_product): @pytest.fixture(scope='module') -def module_rh_repo(module_org, module_target_sat): - module_target_sat.upload_manifest(module_org.id) +def module_rh_repo(module_entitlement_manifest_org, module_target_sat): rhst = module_target_sat.cli_factory.SatelliteToolsRepository(cdn=True) repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch=rhst.data['arch'], - org_id=module_org.id, + org_id=module_entitlement_manifest_org.id, product=rhst.data['product'], repo=rhst.data['repository'], reposet=rhst.data['repository-set'], @@ -84,8 +77,6 @@ def test_positive_search_in_repo(session, module_org, module_yum_repo): :expectedresults: Content search functionality works as intended and expected packages are present inside of repository - - :CaseLevel: Integration """ with session: session.organization.select(org_name=module_org.name) @@ -110,8 +101,6 @@ def test_positive_search_in_multiple_repos(session, module_org, module_yum_repo, expected packages are present inside of repositories :BZ: 1514457 - - :CaseLevel: Integration """ with session: session.organization.select(org_name=module_org.name) @@ -141,8 +130,6 @@ def test_positive_check_package_details(session, module_org, module_yum_repo): :expectedresults: Package is present inside of repository and has all expected values in details section - :CaseLevel: Integration - :customerscenario: true """ with session: @@ -183,8 +170,6 @@ def test_positive_check_custom_package_details(session, module_org, module_yum_r :expectedresults: Package is present inside of repository and it possible to view its details - :CaseLevel: Integration - :customerscenario: true :BZ: 1387766, 1394390 @@ -212,8 +197,6 @@ def test_positive_rh_repo_search_and_check_file_list(session, module_org, module :expectedresults: Content search functionality works as intended and package contains expected list of files - - :CaseLevel: System """ with session: session.organization.select(org_name=module_org.name) diff --git a/tests/foreman/ui/test_partitiontable.py b/tests/foreman/ui/test_partitiontable.py index 2b3e2099b15..127dc1ec67f 100644 --- a/tests/foreman/ui/test_partitiontable.py +++ b/tests/foreman/ui/test_partitiontable.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.constants import DataFile @@ -37,8 +32,6 @@ def test_positive_create_default_for_organization(session): :expectedresults: New partition table is created and is present in the list of selected partition tables for any new organization - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -66,8 +59,6 @@ def test_positive_create_custom_organization(session): :expectedresults: New partition table is created and is not present in the list of selected partition tables for any new organization - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -95,8 +86,6 @@ def test_positive_create_default_for_location(session): :expectedresults: New partition table is created and is present in the list of selected partition tables for any new location - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -124,8 +113,6 @@ def test_positive_create_custom_location(session): :expectedresults: New partition table is created and is not present in the list of selected partition tables for any new location - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -152,8 +139,6 @@ def test_positive_delete_with_lock_and_unlock(session): :expectedresults: New partition table is created and not deleted when locked and only deleted after unlock - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -167,7 +152,7 @@ def test_positive_delete_with_lock_and_unlock(session): ) assert session.partitiontable.search(name)[0]['Name'] == name session.partitiontable.lock(name) - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 - TODO determine better exception session.partitiontable.delete(name) session.partitiontable.unlock(name) session.partitiontable.delete(name) @@ -182,8 +167,6 @@ def test_positive_clone(session): :expectedresults: New partition table is created and cloned successfully - :CaseLevel: Integration - :CaseImportance: Medium """ name = gen_string('alpha') @@ -219,8 +202,6 @@ def test_positive_end_to_end(session, module_org, module_location, template_data :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_product.py b/tests/foreman/ui/test_product.py index 2abb16fea74..71a468ccb8e 100644 --- a/tests/foreman/ui/test_product.py +++ b/tests/foreman/ui/test_product.py @@ -4,32 +4,27 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from datetime import timedelta -import pytest from fauxfactory import gen_choice from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import REPO_TYPE -from robottelo.constants import SYNC_INTERVAL -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import parametrized -from robottelo.utils.datafactory import valid_cron_expressions -from robottelo.utils.datafactory import valid_data_list +from robottelo.constants import REPO_TYPE, SYNC_INTERVAL, DataFile +from robottelo.utils.datafactory import ( + gen_string, + parametrized, + valid_cron_expressions, + valid_data_list, +) @pytest.fixture(scope='module') @@ -46,8 +41,6 @@ def test_positive_end_to_end(session, module_org): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Critical """ product_name = gen_string('alpha') @@ -113,8 +106,6 @@ def test_positive_create_in_different_orgs(session, product_name): :expectedresults: Product is created successfully in both organizations. - - :CaseLevel: Integration """ orgs = [entities.Organization().create() for _ in range(2)] with session: @@ -134,8 +125,6 @@ def test_positive_product_create_with_create_sync_plan(session, module_org): :expectedresults: Ensure sync get created and assigned to Product. - :CaseLevel: Integration - :CaseImportance: Medium """ product_name = gen_string('alpha') @@ -180,7 +169,7 @@ def test_positive_bulk_action_advanced_sync(session, module_org): :customerscenario: true - :Steps: + :steps: 1. Enable or create a repository and sync it. 2. Navigate to Content > Product > click on the product. 3. Click Select Action > Advanced Sync. diff --git a/tests/foreman/ui/test_provisioningtemplate.py b/tests/foreman/ui/test_provisioningtemplate.py index e14ba4a83ef..da2b8446dce 100644 --- a/tests/foreman/ui/test_provisioningtemplate.py +++ b/tests/foreman/ui/test_provisioningtemplate.py @@ -4,23 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ProvisioningTemplates :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from airgun.session import Session -from nailgun import entities -from robottelo.config import settings from robottelo.constants import DataFile from robottelo.utils.datafactory import gen_string @@ -30,13 +22,13 @@ def template_data(): return DataFile.OS_TEMPLATE_DATA_FILE.read_text() -@pytest.fixture() -def clone_setup(module_org, module_location): +@pytest.fixture +def clone_setup(target_sat, module_org, module_location): name = gen_string('alpha') content = gen_string('alpha') - os_list = [entities.OperatingSystem().create().title for _ in range(2)] + os_list = [target_sat.api.OperatingSystem().create().title for _ in range(2)] return { - 'pt': entities.ProvisioningTemplate( + 'pt': target_sat.api.ProvisioningTemplate( name=name, organization=[module_org], location=[module_location], @@ -48,21 +40,21 @@ def clone_setup(module_org, module_location): @pytest.mark.tier2 -def test_positive_clone(session, clone_setup): +def test_positive_clone(module_org, module_location, target_sat, clone_setup): """Assure ability to clone a provisioning template :id: 912f1619-4bb0-4e0f-88ce-88b5726fdbe0 - :Steps: + :steps: 1. Go to Provisioning template UI 2. Choose a template and attempt to clone it :expectedresults: The template is cloned - - :CaseLevel: Integration """ clone_name = gen_string('alpha') - with session: + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.provisioningtemplate.clone( clone_setup['pt'].name, { @@ -70,55 +62,47 @@ def test_positive_clone(session, clone_setup): 'association.applicable_os.assigned': clone_setup['os_list'], }, ) - pt = entities.ProvisioningTemplate().search(query={'search': f'name=={clone_name}'}) + pt = target_sat.api.ProvisioningTemplate().search(query={'search': f'name=={clone_name}'}) assigned_oses = [os.read() for os in pt[0].read().operatingsystem] - assert ( - pt - ), 'Template {} expected to exist but is not included in the search' 'results'.format( - clone_name - ) + assert pt, f'Template {clone_name} expected to exist but is not included in the search' assert set(clone_setup['os_list']) == {f'{os.name} {os.major}' for os in assigned_oses} @pytest.mark.tier2 -def test_positive_clone_locked(session, target_sat): +def test_positive_clone_locked(target_sat): """Assure ability to clone a locked provisioning template :id: 2df8550a-fe7d-405f-ab48-2896554cda12 - :Steps: + :steps: 1. Go to Provisioning template UI 2. Choose a locked provisioning template and attempt to clone it :expectedresults: The template is cloned - - :CaseLevel: Integration """ clone_name = gen_string('alpha') - with session: + with target_sat.ui_session() as session: session.provisioningtemplate.clone( 'Kickstart default', { 'template.name': clone_name, }, ) - pt = target_sat.api.ProvisioningTemplate().search(query={'search': f'name=={clone_name}'}) - assert pt, ( - f'Template {clone_name} expected to exist but is not included in the search' 'results' - ) + assert target_sat.api.ProvisioningTemplate().search( + query={'search': f'name=={clone_name}'} + ), f'Template {clone_name} expected to exist but is not included in the search' @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_end_to_end(session, module_org, module_location, template_data, target_sat): +@pytest.mark.e2e +def test_positive_end_to_end(module_org, module_location, template_data, target_sat): """Perform end to end testing for provisioning template component :id: b44d4cc8-b78e-47cf-9993-0bb871ac2c96 :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') @@ -138,7 +122,9 @@ def test_positive_end_to_end(session, module_org, module_location, template_data 'input_content.description': gen_string('alpha'), } ] - with session: + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.provisioningtemplate.create( { 'template.name': name, @@ -154,10 +140,9 @@ def test_positive_end_to_end(session, module_org, module_location, template_data 'locations.resources.assigned': [module_location.name], } ) - assert target_sat.api.ProvisioningTemplate().search(query={'search': f'name=={name}'}), ( - 'Provisioning template {} expected to exist but is not included in the search' - 'results'.format(name) - ) + assert target_sat.api.ProvisioningTemplate().search( + query={'search': f'name=={name}'} + ), f'Provisioning template {name} expected to exist but is not included in the search' pt = session.provisioningtemplate.read(name) assert pt['template']['name'] == name assert pt['template']['default'] is True @@ -176,54 +161,51 @@ def test_positive_end_to_end(session, module_org, module_location, template_data updated_pt = target_sat.api.ProvisioningTemplate().search( query={'search': f'name=={new_name}'} ) - assert updated_pt, ( - 'Provisioning template {} expected to exist but is not included in the search' - 'results'.format(new_name) - ) + assert ( + updated_pt + ), f'Provisioning template {new_name} expected to exist but is not included in the search' updated_pt = updated_pt[0].read() assert updated_pt.snippet is True, 'Snippet attribute not updated for Provisioning Template' - assert not updated_pt.template_kind, 'Snippet template is {}'.format( - updated_pt.template_kind - ) + assert not updated_pt.template_kind, f'Snippet template is {updated_pt.template_kind}' session.provisioningtemplate.delete(new_name) assert not target_sat.api.ProvisioningTemplate().search( query={'search': f'name=={new_name}'} - ), ( - 'Provisioning template {} expected to be removed but is included in the search ' - 'results'.format(new_name) - ) + ), f'Provisioning template {new_name} expected to be removed but is included in the search' -@pytest.mark.skip_if_open("BZ:1767040") -@pytest.mark.tier3 -def test_negative_template_search_using_url(): - """Satellite should not show full trace on web_browser after invalid search in url - - :id: aeb365dc-49de-11eb-bf99-d46d6dd3b5b2 - - :customerscenario: true +@pytest.mark.tier2 +def test_positive_verify_supported_templates_rhlogo(target_sat, module_org, module_location): + """Verify presense of RH logo on supported provisioning template - :expectedresults: Satellite should not show full trace and show correct error message + :id: 2df8550a-fe7d-405f-ab48-2896554cda14 - :CaseLevel: Integration + :steps: + 1. Go to Provisioning template UI + 2. Choose a any provisioning template and check if its supported or not - :CaseImportance: High + :expectedresults: Supported templates will have the RH logo and not supported will have no logo. - :BZ: 1767040 + :BZ: 2211210, 2238346 """ - with Session( - url='/templates/provisioning_templates?search=&page=1"sadfasf', login=False - ) as session: - login_details = { - 'username': settings.server.admin_username, - 'password': settings.server.admin_password, - } - session.login.login(login_details) - error_page = session.browser.selenium.page_source - error_helper_message = ( - "Please include in your report the full error log that can be acquired by running" - ) - trace_link_word = "Full trace" - assert error_helper_message in error_page - assert trace_link_word not in error_page - assert "foreman-rake errors:fetch_log request_id=" in error_page + template_name = '"Kickstart default"' + pt = target_sat.api.ProvisioningTemplate().search(query={'search': f'name={template_name}'})[0] + pt_clone = pt.clone(data={'name': f'{pt.name} {gen_string("alpha")}'}) + + random_templates = { + 'ansible_provisioning_callback': {'supported': True, 'locked': True}, + pt.name: {'supported': True, 'locked': True}, + pt_clone['name']: {'supported': False, 'locked': False}, + 'Windows default provision': {'supported': False, 'locked': True}, + } + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + for template in random_templates: + assert ( + session.provisioningtemplate.is_locked(template) + == random_templates[template]['locked'] + ) + assert ( + session.provisioningtemplate.is_supported(template) + == random_templates[template]['supported'] + ) diff --git a/tests/foreman/ui/test_puppetclass.py b/tests/foreman/ui/test_puppetclass.py index c8f08292249..b44ad73e7a7 100644 --- a/tests/foreman/ui/test_puppetclass.py +++ b/tests/foreman/ui/test_puppetclass.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Low -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest @pytest.mark.tier2 @@ -29,8 +24,6 @@ def test_positive_end_to_end(session_puppet_enabled_sat, module_puppet_org, modu :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_puppetenvironment.py b/tests/foreman/ui/test_puppetenvironment.py index 729eb7b190b..57502a51191 100644 --- a/tests/foreman/ui/test_puppetenvironment.py +++ b/tests/foreman/ui/test_puppetenvironment.py @@ -4,22 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Low -:Upstream: No """ import pytest -from robottelo.constants import DEFAULT_CV -from robottelo.constants import ENVIRONMENT +from robottelo.constants import DEFAULT_CV, ENVIRONMENT from robottelo.utils.datafactory import gen_string @@ -32,8 +26,6 @@ def test_positive_end_to_end(session_puppet_enabled_sat, module_puppet_org, modu :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') @@ -75,8 +67,6 @@ def test_positive_availability_for_host_and_hostgroup_in_multiple_orgs( :BZ: 543178 - :CaseLevel: Integration - :CaseImportance: High """ env_name = gen_string('alpha') diff --git a/tests/foreman/ui/test_registration.py b/tests/foreman/ui/test_registration.py new file mode 100644 index 00000000000..d8b979ba0c2 --- /dev/null +++ b/tests/foreman/ui/test_registration.py @@ -0,0 +1,698 @@ +"""Tests for registration. + +:Requirement: Registration + +:CaseComponent: Registration + +:CaseAutomation: Automated + +:CaseImportance: Critical + +:Team: Rocket +""" +from datetime import datetime +import re + +from airgun.exceptions import DisabledWidgetError +from airgun.session import Session +from fauxfactory import gen_string +import pytest + +from robottelo import constants +from robottelo.config import settings +from robottelo.constants import FAKE_1_CUSTOM_PACKAGE, FAKE_7_CUSTOM_PACKAGE, REPO_TYPE + +pytestmark = pytest.mark.tier1 + + +def test_positive_verify_default_values_for_global_registration( + module_target_sat, + default_org, +): + """Check for all the Default values pre-populated in the global registration template + + :id: 34122bf3-ae23-47ca-ba3d-da0653d8fd33 + + :expectedresults: Default fields in the form should be auto-populated + e.g. organization, location, rex, insights setup, etc + + :steps: + 1. Check for the default values in the global registration template + """ + module_target_sat.cli_factory.make_activation_key( + {'organization-id': default_org.id, 'name': gen_string('alpha')} + ) + with module_target_sat.ui_session() as session: + cmd = session.host.get_register_command( + full_read=True, + ) + assert cmd['general']['organization'] == 'Default Organization' + assert cmd['general']['location'] == 'Default Location' + assert cmd['general']['capsule'] == 'Nothing to select.' + assert cmd['general']['operating_system'] == '' + assert cmd['general']['host_group'] == 'Nothing to select.' + assert cmd['general']['insecure'] is False + assert cmd['advanced']['setup_rex'] == 'Inherit from host parameter (yes)' + assert cmd['advanced']['setup_insights'] == 'Inherit from host parameter (yes)' + assert cmd['advanced']['token_life_time'] == '4' + assert cmd['advanced']['rex_pull_mode'] == 'Inherit from host parameter (no)' + assert cmd['advanced']['update_packages'] is False + assert cmd['advanced']['ignore_error'] is False + assert cmd['advanced']['force'] is False + + +@pytest.mark.tier2 +def test_positive_org_loc_change_for_registration( + module_activation_key, + module_entitlement_manifest_org, + module_location, + target_sat, +): + """Changing the organization and location to check if correct org and loc is updated on the global registration page as well as in the command + + :id: e83ed6bc-ceae-4021-87fe-3ecde1cbf347 + + :expectedresults: organization and location is updated correctly on the global registration page as well as in the command. + + :CaseImportance: Medium + """ + org = module_entitlement_manifest_org + new_org = target_sat.api.Organization().create() + new_loc = target_sat.api.Location().create() + target_sat.api.ActivationKey(organization=new_org).create() + with target_sat.ui_session() as session: + session.organization.select(org_name=org.name) + session.location.select(loc_name=module_location.name) + cmd = session.host.get_register_command() + expected_pairs = [ + f'organization_id={org.id}', + f'location_id={module_location.id}', + ] + for pair in expected_pairs: + assert pair in cmd + # changing the org and loc to check if correct org and loc is updated on the registration command + session.organization.select(org_name=new_org.name) + session.location.select(loc_name=new_loc.name) + cmd = session.host.get_register_command() + expected_pairs = [ + f'organization_id={new_org.id}', + f'location_id={new_loc.id}', + ] + for pair in expected_pairs: + assert pair in cmd + + +def test_negative_global_registration_without_ak( + module_target_sat, + module_entitlement_manifest_org, + module_location, +): + """Attempt to register a host without ActivationKey + + :id: 34122bf3-ae23-47ca-ba3d-da0653d8fd36 + + :expectedresults: Generate command is disabled without ActivationKey + """ + org = module_entitlement_manifest_org + with module_target_sat.ui_session() as session: + session.organization.select(org_name=org.name) + session.location.select(loc_name=module_location.name) + with pytest.raises(DisabledWidgetError) as context: + session.host.get_register_command() + assert 'Generate registration command button is disabled' in str(context.value) + + +@pytest.mark.e2e +@pytest.mark.no_containers +@pytest.mark.tier3 +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_global_registration_end_to_end( + session, + module_activation_key, + module_org, + smart_proxy_location, + default_os, + default_smart_proxy, + rhel_contenthost, + target_sat, +): + """Host registration form produces a correct registration command and host is + registered successfully with it, remote execution and insights are set up + + :id: a02658bf-097e-47a8-8472-5d9f649ba07a + + :customerscenario: true + + :BZ: 1993874 + + :expectedresults: Host is successfully registered, remote execution and insights + client work out of the box + + :parametrized: yes + """ + # make sure global parameters for rex and insights are set to true + insights_cp = ( + target_sat.api.CommonParameter() + .search(query={'search': 'name=host_registration_insights'})[0] + .read() + ) + rex_cp = ( + target_sat.api.CommonParameter() + .search(query={'search': 'name=host_registration_remote_execution'})[0] + .read() + ) + + if not insights_cp.value: + target_sat.api.CommonParameter(id=insights_cp.id, value=1).update(['value']) + if not rex_cp.value: + target_sat.api.CommonParameter(id=rex_cp.id, value=1).update(['value']) + + # rex interface + iface = 'eth0' + # fill in the global registration form + with session: + cmd = session.host.get_register_command( + { + 'general.operating_system': default_os.title, + 'general.activation_keys': module_activation_key.name, + 'advanced.update_packages': True, + 'advanced.rex_interface': iface, + 'general.insecure': True, + } + ) + expected_pairs = [ + f'organization_id={module_org.id}', + f'activation_keys={module_activation_key.name}', + f'location_id={smart_proxy_location.id}', + f'operatingsystem_id={default_os.id}', + f'{default_smart_proxy.name}', + 'insecure', + 'update_packages=true', + ] + for pair in expected_pairs: + assert pair in cmd + # rhel repo required for insights client installation, + # syncing it to the satellite would take too long + rhelver = rhel_contenthost.os_version.major + if rhelver > 7: + rhel_contenthost.create_custom_repos(**settings.repos[f'rhel{rhelver}_os']) + else: + rhel_contenthost.create_custom_repos( + **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} + ) + # make sure there will be package availabe for update + if rhel_contenthost.os_version.major == '6': + package = FAKE_1_CUSTOM_PACKAGE + repo_url = settings.repos.yum_1['url'] + else: + package = FAKE_7_CUSTOM_PACKAGE + repo_url = settings.repos.yum_3['url'] + rhel_contenthost.create_custom_repos(fake_yum=repo_url) + rhel_contenthost.execute(f"yum install -y {package}") + # run curl + result = rhel_contenthost.execute(cmd) + assert result.status == 0 + result = rhel_contenthost.execute('subscription-manager identity') + assert result.status == 0 + # Assert that a yum update was made this day ("Update" or "I, U" in history) + timezone_offset = rhel_contenthost.execute('date +"%:z"').stdout.strip() + tzinfo = datetime.strptime(timezone_offset, '%z').tzinfo + result = rhel_contenthost.execute('yum history | grep U') + assert result.status == 0 + assert datetime.now(tzinfo).strftime('%Y-%m-%d') in result.stdout + # Set "Connect to host using IP address" + target_sat.api.Parameter( + host=rhel_contenthost.hostname, + name='remote_execution_connect_by_ip', + parameter_type='boolean', + value='True', + ).create() + # run insights-client via REX + command = "insights-client --status" + invocation_command = target_sat.cli_factory.job_invocation( + { + 'job-template': 'Run Command - Script Default', + 'inputs': f'command={command}', + 'search-query': f"name ~ {rhel_contenthost.hostname}", + } + ) + # results provide all info but job invocation might not be finished yet + result = ( + target_sat.api.JobInvocation() + .search( + query={'search': f'id={invocation_command["id"]} and host={rhel_contenthost.hostname}'} + )[0] + .read() + ) + # make sure that task is finished + task_result = target_sat.wait_for_tasks( + search_query=(f'id = {result.task.id}'), search_rate=2, max_tries=60 + ) + assert task_result[0].result == 'success' + host = ( + target_sat.api.Host() + .search(query={'search': f'name={rhel_contenthost.hostname}'})[0] + .read() + ) + for interface in host.interface: + interface_result = target_sat.api.Interface(host=host.id).search( + query={'search': f'{interface.id}'} + )[0] + # more interfaces can be inside the host + if interface_result.identifier == iface: + assert interface_result.execution + + +@pytest.mark.tier2 +def test_global_registration_form_populate( + module_org, + session, + module_ak_with_cv, + module_lce, + module_promoted_cv, + default_architecture, + default_os, + target_sat, +): + """Host registration form should be populated automatically based on the host-group + + :id: b949e010-36b8-48b8-9907-36138342c72b + + :expectedresults: Some of the fields in the form should be populated based on host-group + e.g. activation key, operating system, life-cycle environment, host parameters for + remote-execution, insights setup. + + :steps: + 1. create and sync repository + 2. create the content view and activation-key + 3. create the host-group with activation key, operating system, host-parameters + 4. Open the global registration form and select the same host-group + 5. check host registration form should be populated automatically based on the host-group + + :BZ: 2056469, 1994654 + + :customerscenario: true + """ + hg_name = gen_string('alpha') + iface = gen_string('alpha') + group_params = {'name': 'host_packages', 'value': constants.FAKE_0_CUSTOM_PACKAGE} + target_sat.api.HostGroup( + name=hg_name, + organization=[module_org], + lifecycle_environment=module_lce, + architecture=default_architecture, + operatingsystem=default_os, + content_view=module_promoted_cv, + group_parameters_attributes=[group_params], + ).create() + new_org = target_sat.api.Organization().create() + new_ak = target_sat.api.ActivationKey(organization=new_org).create() + with session: + session.hostgroup.update( + hg_name, + { + 'activation_keys.activation_keys': module_ak_with_cv.name, + }, + ) + cmd = session.host.get_register_command( + { + 'general.host_group': hg_name, + 'advanced.rex_interface': iface, + 'general.insecure': True, + }, + full_read=True, + ) + assert hg_name in cmd['general']['host_group'] + assert module_ak_with_cv.name in cmd['general']['activation_key_helper'] + assert constants.FAKE_0_CUSTOM_PACKAGE in cmd['advanced']['install_packages_helper'] + + session.organization.select(org_name=new_org.name) + cmd = session.host.get_register_command( + { + 'general.organization': new_org.name, + 'general.operating_system': default_os.title, + 'general.insecure': True, + }, + full_read=True, + ) + assert new_org.name in cmd['general']['organization'] + assert new_ak.name in cmd['general']['activation_keys'] + + +@pytest.mark.tier2 +@pytest.mark.usefixtures('enable_capsule_for_registration') +@pytest.mark.no_containers +def test_global_registration_with_gpg_repo_and_default_package( + session, module_activation_key, default_os, default_smart_proxy, rhel8_contenthost +): + """Host registration form produces a correct registration command and host is + registered successfully with gpg repo enabled and have default package + installed. + + :id: b5738b20-e281-4d0b-ac78-dcdc177b8c9f + + :expectedresults: Host is successfully registered, gpg repo is enabled + and default package is installed. + + :steps: + 1. create and sync repository + 2. create the content view and activation-key + 3. update the 'host_packages' parameter in organization with package name e.g. vim + 4. open the global registration form and update the gpg repo and key + 5. check host is registered successfully with installed same package + 6. check gpg repo is exist in registered host + + :parametrized: yes + """ + client = rhel8_contenthost + repo_name = 'foreman_register' + repo_url = settings.repos.gr_yum_repo.url + repo_gpg_url = settings.repos.gr_yum_repo.gpg_url + with session: + cmd = session.host.get_register_command( + { + 'general.operating_system': default_os.title, + 'general.capsule': default_smart_proxy.name, + 'general.activation_keys': module_activation_key.name, + 'general.insecure': True, + 'advanced.force': True, + 'advanced.install_packages': 'mlocate vim', + 'advanced.repository': repo_url, + 'advanced.repository_gpg_key_url': repo_gpg_url, + } + ) + + # rhel repo required for insights client installation, + # syncing it to the satellite would take too long + rhelver = client.os_version.major + if rhelver > 7: + repos = {f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']['baseos']} + else: + repos = { + 'rhel7_os': settings.repos['rhel7_os'], + 'rhel7_extras': settings.repos['rhel7_extras'], + } + client.create_custom_repos(**repos) + # run curl + result = client.execute(cmd) + assert result.status == 0 + result = client.execute('yum list installed | grep mlocate') + assert result.status == 0 + assert 'mlocate' in result.stdout + result = client.execute(f'yum -v repolist {repo_name}') + assert result.status == 0 + assert repo_url in result.stdout + + +@pytest.mark.tier3 +@pytest.mark.usefixtures('enable_capsule_for_registration') +def test_global_re_registration_host_with_force_ignore_error_options( + session, module_activation_key, default_os, default_smart_proxy, rhel7_contenthost +): + """If the ignore_error and force checkbox is checked then registered host can + get re-registered without any error. + + :id: 8f0ecc13-5d18-4adb-acf5-3f3276dccbb7 + + :expectedresults: Verify the force and ignore checkbox options + + :steps: + 1. create and sync repository + 2. create the content view and activation-key + 3. open the global registration form and select --force and --Ignore Errors option + 4. registered the host with generated curl command + 5. re-register the same host again and check it is getting registered + + :parametrized: yes + """ + client = rhel7_contenthost + with session: + cmd = session.host.get_register_command( + { + 'general.operating_system': default_os.title, + 'general.capsule': default_smart_proxy.name, + 'general.activation_keys': module_activation_key.name, + 'general.insecure': True, + 'advanced.force': True, + 'advanced.ignore_error': True, + } + ) + client.execute(cmd) + result = client.execute('subscription-manager identity') + assert result.status == 0 + # rerun the register command + client.execute(cmd) + result = client.execute('subscription-manager identity') + assert result.status == 0 + + +@pytest.mark.tier2 +@pytest.mark.usefixtures('enable_capsule_for_registration') +def test_global_registration_token_restriction( + session, module_activation_key, rhel8_contenthost, default_os, default_smart_proxy, target_sat +): + """Global registration token should be only used for registration call, it + should be restricted for any other api calls. + + :id: 4528b5c6-0a6d-40cd-857a-68b76db2179b + + :expectedresults: global registration token should be restricted for any api calls + other than the registration + + :steps: + 1. open the global registration form and generate the curl token + 2. use that curl token to execute other api calls e.g. GET /hosts, /users + + :parametrized: yes + """ + client = rhel8_contenthost + with session: + cmd = session.host.get_register_command( + { + 'general.operating_system': default_os.title, + 'general.capsule': default_smart_proxy.name, + 'general.activation_keys': module_activation_key.name, + 'general.insecure': True, + } + ) + + pattern = re.compile("Authorization.*(?=')") + auth_header = re.search(pattern, cmd).group() + + # build curl + curl_users = f'curl -X GET -k -H {auth_header} -i {target_sat.url}/api/users/' + curl_hosts = f'curl -X GET -k -H {auth_header} -i {target_sat.url}/api/hosts/' + for curl_cmd in (curl_users, curl_hosts): + result = client.execute(curl_cmd) + assert result.status == 0 + assert 'Unable to authenticate user' in result.stdout + + +def test_positive_host_registration_with_non_admin_user( + test_name, + module_entitlement_manifest_org, + module_location, + target_sat, + rhel8_contenthost, + module_activation_key, +): + """Register hosts from a non-admin user with only register_hosts, edit_hosts + and view_organization permissions + + :id: 35458bbc-4556-41b9-ba26-ae0b15179731 + + :expectedresults: User with register hosts permission able to register hosts. + """ + user_password = gen_string('alpha') + org = module_entitlement_manifest_org + role = target_sat.api.Role(organization=[org]).create() + + user_permissions = { + 'Organization': ['view_organizations'], + 'Host': ['view_hosts'], + } + target_sat.api_factory.create_role_permissions(role, user_permissions) + user = target_sat.api.User( + role=[role], + admin=False, + password=user_password, + organization=[org], + location=[module_location], + default_organization=org, + default_location=module_location, + ).create() + role = target_sat.cli.Role.info({'name': 'Register hosts'}) + target_sat.cli.User.add_role({'id': user.id, 'role-id': role['id']}) + + with Session(test_name, user=user.login, password=user_password) as session: + + cmd = session.host_new.get_register_command( + { + 'general.insecure': True, + 'general.activation_keys': module_activation_key.name, + } + ) + + result = rhel8_contenthost.execute(cmd) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # Verify server.hostname and server.port from subscription-manager config + assert target_sat.hostname == rhel8_contenthost.subscription_config['server']['hostname'] + assert rhel8_contenthost.subscription_config['server']['port'] == constants.CLIENT_PORT + + +@pytest.mark.tier2 +def test_positive_global_registration_form( + session, module_activation_key, module_org, smart_proxy_location, default_os, target_sat +): + """Host registration form produces a correct curl command for various inputs + + :id: f81c2ec4-85b1-4372-8e63-464ddbf70296 + + :customerscenario: true + + :expectedresults: The curl command contains all required parameters + """ + # rex and insights parameters are only specified in curl when differing from + # inerited parameters + result = ( + target_sat.api.CommonParameter() + .search(query={'search': 'name=host_registration_remote_execution'})[0] + .read() + ) + rex_value = not result.value + result = ( + target_sat.api.CommonParameter() + .search(query={'search': 'name=host_registration_insights'})[0] + .read() + ) + insights_value = not result.value + hostgroup = target_sat.api.HostGroup( + organization=[module_org], location=[smart_proxy_location] + ).create() + iface = 'eth0' + with session: + cmd = session.host.get_register_command( + { + 'advanced.setup_insights': 'Yes (override)' if insights_value else 'No (override)', + 'advanced.setup_rex': 'Yes (override)' if rex_value else 'No (override)', + 'general.insecure': True, + 'general.host_group': hostgroup.name, + 'general.operating_system': default_os.title, + 'general.activation_keys': module_activation_key.name, + 'advanced.update_packages': True, + 'advanced.rex_interface': iface, + } + ) + expected_pairs = [ + f'organization_id={module_org.id}', + f'activation_keys={module_activation_key.name}', + f'hostgroup_id={hostgroup.id}', + f'location_id={smart_proxy_location.id}', + f'operatingsystem_id={default_os.id}', + f'remote_execution_interface={iface}', + f'setup_insights={"true" if insights_value else "false"}', + f'setup_remote_execution={"true" if rex_value else "false"}', + f'{target_sat.hostname}', + 'insecure', + 'update_packages=true', + ] + for pair in expected_pairs: + assert pair in cmd + + +@pytest.mark.tier2 +def test_global_registration_with_capsule_host( + session, + capsule_configured, + rhel8_contenthost, + module_org, + module_location, + module_product, + default_os, + module_lce_library, + target_sat, +): + """Host registration form produces a correct registration command and host is + registered successfully with selected capsule from form. + + :id: 6356c6d0-ee45-4ad7-8a0e-484d3490bc58 + + :expectedresults: Host is successfully registered with capsule host, + remote execution and insights client work out of the box + + :steps: + 1. create and sync repository + 2. create the content view and activation-key + 3. integrate capsule and sync content + 4. open the global registration form and select the same capsule + 5. check host is registered successfully with selected capsule + + :parametrized: yes + + :CaseAutomation: Automated + """ + client = rhel8_contenthost + repo = target_sat.api.Repository( + url=settings.repos.yum_1.url, + content_type=REPO_TYPE['yum'], + product=module_product, + ).create() + # Sync all repositories in the product + module_product.sync() + capsule = target_sat.api.Capsule(id=capsule_configured.nailgun_capsule.id).search( + query={'search': f'name={capsule_configured.hostname}'} + )[0] + module_org = target_sat.api.Organization(id=module_org.id).read() + module_org.smart_proxy.append(capsule) + module_location = target_sat.api.Location(id=module_location.id).read() + module_location.smart_proxy.append(capsule) + module_org.update(['smart_proxy']) + module_location.update(['smart_proxy']) + + # Associate the lifecycle environment with the capsule + capsule.content_add_lifecycle_environment(data={'environment_id': module_lce_library.id}) + result = capsule.content_lifecycle_environments() + # TODO result is not used, please add assert once you fix the test + + # Create a content view with the repository + cv = target_sat.api.ContentView(organization=module_org, repository=[repo]).create() + + # Publish new version of the content view + cv.publish() + cv = cv.read() + + assert len(cv.version) == 1 + + activation_key = target_sat.api.ActivationKey( + content_view=cv, environment=module_lce_library, organization=module_org + ).create() + + # Assert that a task to sync lifecycle environment to the capsule + # is started (or finished already) + sync_status = capsule.content_get_sync() + assert len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] + + # Wait till capsule sync finishes + for task in sync_status['active_sync_tasks']: + target_sat.api.ForemanTask(id=task['id']).poll() + with session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + cmd = session.host.get_register_command( + { + 'general.organization': module_org.name, + 'general.location': module_location.name, + 'general.operating_system': default_os.title, + 'general.capsule': capsule_configured.hostname, + 'general.activation_keys': activation_key.name, + 'general.insecure': True, + } + ) + client.create_custom_repos(rhel7=settings.repos.rhel7_os) + # run curl + client.execute(cmd) + result = client.execute('subscription-manager identity') + assert result.status == 0 + assert module_lce_library.name in result.stdout + assert module_org.name in result.stdout diff --git a/tests/foreman/ui/test_remoteexecution.py b/tests/foreman/ui/test_remoteexecution.py index 59fd554c146..021374ffc16 100644 --- a/tests/foreman/ui/test_remoteexecution.py +++ b/tests/foreman/ui/test_remoteexecution.py @@ -1,76 +1,75 @@ -"""Test class for Remote Execution Management UI +"""Test class for Job Invocation procedure :Requirement: Remoteexecution :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import datetime import time +from inflection import camelize import pytest from wait_for import wait_for -from robottelo.utils.datafactory import gen_string +from robottelo.utils.datafactory import ( + gen_string, +) -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.rhel_ver_match('8') -@pytest.mark.tier3 -def test_positive_run_default_job_template_by_ip(session, rex_contenthost, module_org): - """Run a job template against a single host by ip +def test_positive_run_default_job_template( + session, + target_sat, + rex_contenthost, + module_org, +): + """Run a job template on a host :id: a21eac46-1a22-472d-b4ce-66097159a868 :Setup: Use pre-defined job template. - :Steps: + :steps: - 1. Navigate to an individual host and click Run Job - 2. Select the job and appropriate template - 3. Run the job + 1. Get contenthost with rex enabled + 2. Navigate to an individual host and click Run Job + 3. Select the job and appropriate template + 4. Run the job - :expectedresults: Verify the job was successfully ran against the host + :expectedresults: Verify the job was successfully ran against the host, check also using the job widget on the main dashboard :parametrized: yes - :bz: 1898656 + :bz: 1898656, 2182353 :customerscenario: true - - :CaseLevel: Integration """ + hostname = rex_contenthost.hostname - with session: + + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') assert session.host.search(hostname)[0]['Name'] == hostname command = 'ls' - job_status = session.host.schedule_remote_job( - [hostname], + session.jobinvocation.run( { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', + 'target_hosts_and_inputs.targetting_type': 'Hosts', + 'target_hosts_and_inputs.targets': hostname, 'target_hosts_and_inputs.command': command, - 'advanced_fields.execution_order_randomized': True, - 'schedule.immediate': True, - }, + } ) - assert job_status['overview']['job_status'] == 'Success' - assert job_status['overview']['execution_order'] == 'Execution order: randomized' - assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'success' + session.jobinvocation.wait_job_invocation_state(entity_name='Run ls', host_name=hostname) + status = session.jobinvocation.read(entity_name='Run ls', host_name=hostname) + assert status['overview']['hosts_table'][0]['Status'] == 'success' # check status also on the job dashboard job_name = f'Run {command}' @@ -80,17 +79,60 @@ def test_positive_run_default_job_template_by_ip(session, rex_contenthost, modul assert job_name in [job['Name'] for job in success_jobs] -@pytest.mark.skip_if_open('BZ:2182353') +@pytest.mark.tier4 @pytest.mark.rhel_ver_match('8') -@pytest.mark.tier3 -def test_positive_run_custom_job_template_by_ip(session, module_org, rex_contenthost): - """Run a job template on a host connected by ip +def test_rex_through_host_details(session, target_sat, rex_contenthost, module_org): + """Run remote execution using the new host details page + + :id: ee625595-4995-43b2-9e6d-633c9b33ff93 + + :steps: + 1. Navigate to Overview tab + 2. Schedule a job + 3. Wait for the job to finish + 4. Job is visible in Recent jobs card + + :expectedresults: Remote execution succeeded and the job is visible on Recent jobs card on + Overview tab + """ + + hostname = rex_contenthost.hostname + + job_args = { + 'category_and_template.job_category': 'Commands', + 'category_and_template.job_template': 'Run Command - Script Default', + 'target_hosts_and_inputs.command': 'ls', + } + with target_sat.ui_session() as session: + session.organization.select(module_org.name) + session.host_new.schedule_job(hostname, job_args) + task_result = target_sat.wait_for_tasks( + search_query=(f'Remote action: Run ls on {hostname}'), + search_rate=2, + max_tries=30, + ) + task_status = target_sat.api.ForemanTask(id=task_result[0].id).poll() + assert task_status['result'] == 'success' + recent_jobs = session.host_new.get_details(hostname, "overview.recent_jobs")['overview'] + assert recent_jobs['recent_jobs']['finished']['table'][0]['column0'] == "Run ls" + assert recent_jobs['recent_jobs']['finished']['table'][0]['column2'] == "succeeded" + + +@pytest.mark.tier4 +@pytest.mark.rhel_ver_match('8') +@pytest.mark.parametrize( + 'ui_user', [{'admin': True}, {'admin': False}], indirect=True, ids=['adminuser', 'nonadminuser'] +) +def test_positive_run_custom_job_template( + session, module_org, default_location, target_sat, ui_user, rex_contenthost +): + """Run a job template on a host :id: 3a59eb15-67c4-46e1-ba5f-203496ec0b0c :Setup: Create a working job template. - :Steps: + :steps: 1. Set remote_execution_connect_by_ip on host to true 2. Navigate to an individual host and click Run Job @@ -101,14 +143,18 @@ def test_positive_run_custom_job_template_by_ip(session, module_org, rex_content :parametrized: yes - :CaseLevel: System + :bz: 2220965 + + :customerscenario: true """ hostname = rex_contenthost.hostname + ui_user.location.append(target_sat.api.Location(id=default_location.id)) + ui_user.update(['location']) job_template_name = gen_string('alpha') - with session: + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') + assert session.host.search(hostname)[0]['Name'] == hostname session.jobtemplate.create( { 'template.name': job_template_name, @@ -119,35 +165,35 @@ def test_positive_run_custom_job_template_by_ip(session, module_org, rex_content } ) assert session.jobtemplate.search(job_template_name)[0]['Name'] == job_template_name - assert session.host.search(hostname)[0]['Name'] == hostname - job_status = session.host.schedule_remote_job( - [hostname], + session.jobinvocation.run( { 'category_and_template.job_category': 'Miscellaneous', 'category_and_template.job_template': job_template_name, + 'target_hosts_and_inputs.targets': hostname, 'target_hosts_and_inputs.command': 'ls', - 'schedule.immediate': True, - }, + } ) - assert job_status['overview']['job_status'] == 'Success' - assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'success' + job_description = f'{camelize(job_template_name.lower())} with inputs command="ls"' + session.jobinvocation.wait_job_invocation_state( + entity_name=job_description, host_name=hostname + ) + status = session.jobinvocation.read(entity_name=job_description, host_name=hostname) + assert status['overview']['hosts_table'][0]['Status'] == 'success' -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.upgrade @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) -def test_positive_run_job_template_multiple_hosts_by_ip( - session, module_org, target_sat, registered_hosts +def test_positive_run_job_template_multiple_hosts( + session, module_org, target_sat, rex_contenthosts ): - """Run a job template against multiple hosts by ip + """Run a job template against multiple hosts :id: c4439ec0-bb80-47f6-bc31-fa7193bfbeeb :Setup: Create a working job template. - :Steps: + :steps: 1. Set remote_execution_connect_by_ip on hosts to true 2. Navigate to the hosts page and select at least two hosts @@ -156,25 +202,25 @@ def test_positive_run_job_template_multiple_hosts_by_ip( 5. Run the job :expectedresults: Verify the job was successfully ran against the hosts - - :CaseLevel: System """ + host_names = [] - for vm in registered_hosts: + for vm in rex_contenthosts: + # for vm in rex_contenthost: host_names.append(vm.hostname) vm.configure_rex(satellite=target_sat, org=module_org) - with session: + with target_sat.ui_session() as session: session.organization.select(module_org.name) - session.location.select('Default Location') - hosts = session.host.search(' or '.join([f'name="{hostname}"' for hostname in host_names])) - assert {host['Name'] for host in hosts} == set(host_names) + # session.location.select('Default Location') + for host in host_names: + assert session.host.search(host)[0]['Name'] == host + session.host.reset_search() job_status = session.host.schedule_remote_job( host_names, { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', - 'target_hosts_and_inputs.command': 'ls', - 'schedule.immediate': True, + 'target_hosts_and_inputs.command': 'sleep 5', }, ) assert job_status['overview']['job_status'] == 'Success' @@ -186,7 +232,6 @@ def test_positive_run_job_template_multiple_hosts_by_ip( ) -@pytest.mark.skip_if_open('BZ:2182353') @pytest.mark.rhel_ver_match('8') @pytest.mark.tier3 def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_contenthost): @@ -196,7 +241,7 @@ def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_cont :Setup: Use pre-defined job template. - :Steps: + :steps: 1. Set remote_execution_connect_by_ip on host to true 2. Navigate to an individual host and click Run Job @@ -211,22 +256,21 @@ def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_cont 2. Verify the job was successfully ran after the designated time :parametrized: yes - - :CaseLevel: System """ - job_time = 10 * 60 + job_time = 6 * 60 hostname = rex_contenthost.hostname with session: session.organization.select(module_org.name) session.location.select('Default Location') assert session.host.search(hostname)[0]['Name'] == hostname plan_time = session.browser.get_client_datetime() + datetime.timedelta(seconds=job_time) + command_to_run = 'sleep 10' job_status = session.host.schedule_remote_job( [hostname], { 'category_and_template.job_category': 'Commands', 'category_and_template.job_template': 'Run Command - Script Default', - 'target_hosts_and_inputs.command': 'ls', + 'target_hosts_and_inputs.command': command_to_run, 'schedule.future': True, 'schedule_future_execution.start_at_date': plan_time.strftime("%Y/%m/%d"), 'schedule_future_execution.start_at_time': plan_time.strftime("%H:%M"), @@ -240,34 +284,36 @@ def test_positive_run_scheduled_job_template_by_ip(session, module_org, rex_cont # the job_time must be significantly greater than job creation time. assert job_left_time > 0 assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'N/A' + assert job_status['overview']['hosts_table'][0]['Status'] in ('Awaiting start', 'N/A') # sleep 3/4 of the left time time.sleep(job_left_time * 3 / 4) - job_status = session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table') + job_status = session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + ) assert job_status['overview']['hosts_table'][0]['Host'] == hostname - assert job_status['overview']['hosts_table'][0]['Status'] == 'N/A' + assert job_status['overview']['hosts_table'][0]['Status'] in ('Awaiting start', 'N/A') # recalculate the job left time to be more accurate job_left_time = (plan_time - session.browser.get_client_datetime()).total_seconds() # the last read time should not take more than 1/4 of the last left time assert job_left_time > 0 wait_for( - lambda: session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table')[ - 'overview' - ]['hosts_table'][0]['Status'] + lambda: session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + )['overview']['hosts_table'][0]['Status'] == 'running', timeout=(job_left_time + 30), delay=1, ) # wait the job to change status to "success" wait_for( - lambda: session.jobinvocation.read('Run ls', hostname, 'overview.hosts_table')[ - 'overview' - ]['hosts_table'][0]['Status'] + lambda: session.jobinvocation.read( + f'Run {command_to_run}', hostname, 'overview.hosts_table' + )['overview']['hosts_table'][0]['Status'] == 'success', timeout=30, delay=1, ) - job_status = session.jobinvocation.read('Run ls', hostname, 'overview') + job_status = session.jobinvocation.read(f'Run {command_to_run}', hostname, 'overview') assert job_status['overview']['job_status'] == 'Success' assert job_status['overview']['hosts_table'][0]['Host'] == hostname assert job_status['overview']['hosts_table'][0]['Status'] == 'success' @@ -280,7 +326,7 @@ def test_positive_ansible_job_check_mode(session): :id: 7aeb7253-e555-4e28-977f-71f16d3c32e2 - :Steps: + :steps: 1. Set the value of the ansible_roles_check_mode parameter to true on a host 2. Associate one or more Ansible roles with the host @@ -289,11 +335,9 @@ def test_positive_ansible_job_check_mode(session): :expectedresults: Verify that the roles were run in check mode (i.e. no changes were made on the host) - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-RemoteExecution :Team: Rocket """ @@ -306,7 +350,7 @@ def test_positive_ansible_config_report_failed_tasks_errors(session): :id: 1a91e534-143f-4f35-953a-7ad8b7d2ddf3 - :Steps: + :steps: 1. Import Ansible roles 2. Assign Ansible roles to a host @@ -314,11 +358,9 @@ def test_positive_ansible_config_report_failed_tasks_errors(session): :expectedresults: Verify that any task failures are listed as errors in the config report - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -331,7 +373,7 @@ def test_positive_ansible_config_report_changes_notice(session): :id: 8c90f179-8b70-4932-a477-75dc3566c437 - :Steps: + :steps: 1. Import Ansible Roles 2. Assign Ansible roles to a host @@ -340,11 +382,9 @@ def test_positive_ansible_config_report_changes_notice(session): :expectedresults: Verify that any tasks that make changes on the host are listed as notice in the config report - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -357,40 +397,15 @@ def test_positive_ansible_variables_imported_with_roles(session): :id: 107c53e8-5a8a-4291-bbde-fbd66a0bb85e - :Steps: + :steps: 1. Import Ansible roles :expectedresults: Verify that any variables in the role were also imported to Satellite - :CaseLevel: System - - :CaseAutomation: NotAutomated - - :CaseComponent: Ansible - - :Team: Rocket - """ - - -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_roles_import_in_background(session): - """Verify that importing roles does not create a popup that blocks the UI - - :id: 4f1c7b76-9c67-42b2-9a73-980ca1f05abc - - :Steps: - - 1. Import Ansible roles - - :expectedresults: Verify that the UI is accessible while roles are importing - - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -403,18 +418,16 @@ def test_positive_ansible_roles_ignore_list(session): :id: 6fa1d8f0-b583-4a07-88eb-c9ae7fcd0219 - :Steps: + :steps: 1. Add roles to the ignore list in Administer > Settings > Ansible 2. Navigate to Configure > Roles :expectedresults: Verify that any roles on the ignore list are not available for import - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -428,7 +441,7 @@ def test_positive_ansible_variables_installed_with_collection(session): :id: 7ff88022-fe9b-482f-a6bb-3922036a1e1c - :Steps: + :steps: 1. Install an Ansible collection 2. Navigate to Configure > Variables @@ -436,11 +449,9 @@ def test_positive_ansible_variables_installed_with_collection(session): :expectedresults: Verify that any variables associated with the collection are present on Configure > Variables - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -453,7 +464,7 @@ def test_positive_install_ansible_collection_via_job_invocation(session): :id: d4096aef-f6fc-41b6-ae56-d19b1f49cd42 - :Steps: + :steps: 1. Enable a host for remote execution 2. Navigate to Hosts > Schedule Remote Job @@ -464,11 +475,9 @@ def test_positive_install_ansible_collection_via_job_invocation(session): :expectedresults: The Ansible collection is successfully installed on the host - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-RemoteExecution :Team: Rocket """ @@ -481,7 +490,7 @@ def test_positive_set_ansible_role_order_per_host(session): :id: 24fbcd60-7cd1-46ff-86ac-16d6b436202c - :Steps: + :steps: 1. Enable a host for remote execution 2. Navigate to Hosts > All Hosts > $hostname > Edit > Ansible Roles @@ -491,11 +500,9 @@ def test_positive_set_ansible_role_order_per_host(session): :expectedresults: The roles are run in the specified order - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @@ -508,7 +515,7 @@ def test_positive_set_ansible_role_order_per_hostgroup(session): :id: 9eb5bc8e-081a-45b9-8751-f4220c944da6 - :Steps: + :steps: 1. Enable a host for remote execution 2. Create a host group @@ -520,39 +527,61 @@ def test_positive_set_ansible_role_order_per_hostgroup(session): :expectedresults: The roles are run in the specified order - :CaseLevel: System - :CaseAutomation: NotAutomated - :CaseComponent: Ansible + :CaseComponent: Ansible-ConfigurationManagement :Team: Rocket """ @pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_matcher_field_highlight(session): - """Verify that Ansible variable matcher fields change color when modified +@pytest.mark.tier2 +def test_positive_schedule_recurring_host_job(self): + """Using the new Host UI, schedule a recurring job on a Host - :id: 67b45cfe-31bb-41a8-b88e-27917c68f33e + :id: 5052be04-28ab-4349-8bee-851ef76e4ffa - :Steps: + :caseComponent: Ansible-RemoteExecution - 1. Navigate to Configure > Variables > $variablename - 2. Select the "Override" checkbox in the "Default Behavior" section - 3. Click "+Add Matcher" in the "Specify Matcher" section - 4. Select an option from the "Attribute type" dropdown - 5. Add text to the attribute type input field - 6. Add text to the "Value" input field + :Team: Rocket - :expectedresults: The background of each field turns yellow when a change is made + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign a role to host. + 4. Navigate to the new UI for the given Host. + 5. Select the Jobs subtab. + 6. Click the Schedule Recurring Job button, and using the popup, schedule a + recurring Job. + 7. Navigate to Job Invocations. + + :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed + time + """ - :CaseLevel: System - :CaseAutomation: NotAutomated +@pytest.mark.stubbed +@pytest.mark.tier2 +def test_positive_schedule_recurring_hostgroup_job(self): + """Using the new recurring job scheduler, schedule a recurring job on a Hostgroup - :CaseComponent: Ansible + :id: c65db99b-11fe-4a32-89d0-0a4692b07efe + + :caseComponent: Ansible-RemoteExecution :Team: Rocket + + :steps: + 1. Register a RHEL host to Satellite. + 2. Import all roles available by default. + 3. Assign a role to host. + 4. Navigate to the Host Group page. + 5. Select the "Configure Ansible Job" action. + 6. Click the Schedule Recurring Job button, and using the popup, schedule a + recurring Job. + 7. Navigate to Job Invocations. + + :expectedresults: The scheduled Job appears in the Job Invocation list at the appointed + time """ diff --git a/tests/foreman/ui/test_reporttemplates.py b/tests/foreman/ui/test_reporttemplates.py index ae8f6bae77b..f8fbfff52d9 100644 --- a/tests/foreman/ui/test_reporttemplates.py +++ b/tests/foreman/ui/test_reporttemplates.py @@ -4,38 +4,32 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: Reporting :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import csv import json import os -from pathlib import Path -from pathlib import PurePath +from pathlib import Path, PurePath -import pytest -import yaml from lxml import etree from nailgun import entities +import pytest +import yaml -from robottelo.config import robottelo_tmp_dir -from robottelo.config import settings -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_1_CUSTOM_PACKAGE -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.ui.utils import create_fake_host +from robottelo.config import robottelo_tmp_dir, settings +from robottelo.constants import ( + DEFAULT_SUBSCRIPTION_NAME, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_1_CUSTOM_PACKAGE, + PRDS, + REPOS, + REPOSET, +) from robottelo.utils.datafactory import gen_string @@ -146,8 +140,6 @@ def test_positive_end_to_end(session, module_org, module_location): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: Critical """ name = gen_string('alpha') @@ -238,7 +230,7 @@ def test_positive_end_to_end(session, module_org, module_location): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_generate_registered_hosts_report(session, module_org, module_location): +def test_positive_generate_registered_hosts_report(target_sat, module_org, module_location): """Use provided Host - Registered Content Hosts report for testing :id: b44d4cd8-a78e-47cf-9993-0bb871ac2c96 @@ -246,23 +238,26 @@ def test_positive_generate_registered_hosts_report(session, module_org, module_l :expectedresults: The Host - Registered Content Hosts report is generated (with host filter) and it contains created host with correct data - :CaseLevel: Integration - :CaseImportance: High """ # generate Host Status report os_name = 'comma,' + gen_string('alpha') - os = entities.OperatingSystem(name=os_name).create() + os = target_sat.api.OperatingSystem(name=os_name).create() host_cnt = 3 host_templates = [ - entities.Host(organization=module_org, location=module_location, operatingsystem=os) + target_sat.api.Host(organization=module_org, location=module_location, operatingsystem=os) for i in range(host_cnt) ] for host_template in host_templates: host_template.create_missing() - with session: + with target_sat.ui_session() as session: + session.organization.select(module_org.name) + session.location.select(module_location.name) # create multiple hosts to test filtering - host_names = [create_fake_host(session, host_template) for host_template in host_templates] + host_names = [ + target_sat.ui_factory(session).create_fake_host(host_template) + for host_template in host_templates + ] host_name = host_names[1] # pick some that is not first and is not last file_path = session.reporttemplate.generate( 'Host - Registered Content Hosts', values={'hosts_filter': host_name} @@ -296,8 +291,6 @@ def test_positive_generate_subscriptions_report_json( :expectedresults: The Subscriptions report is generated in JSON - :CaseLevel: Integration - :CaseImportance: Medium """ # generate Subscriptions report @@ -384,7 +377,7 @@ def test_positive_autocomplete(session): @pytest.mark.tier2 def test_positive_schedule_generation_and_get_mail( - session, module_manifest_org, module_location, target_sat + session, module_entitlement_manifest_org, module_location, target_sat ): """Schedule generating a report. Request the result be sent via e-mail. @@ -439,7 +432,9 @@ def test_positive_schedule_generation_and_get_mail( target_sat.get(remote_path=str(gzip_path), local_path=str(local_gzip_file)) assert os.system(f'gunzip {local_gzip_file}') == 0 data = json.loads(local_file.read_text()) - subscription_search = target_sat.api.Subscription(organization=module_manifest_org).search() + subscription_search = target_sat.api.Subscription( + organization=module_entitlement_manifest_org + ).search() assert len(data) >= len(subscription_search) > 0 keys_expected = [ 'Account number', @@ -490,6 +485,7 @@ def test_negative_nonauthor_of_report_cant_download_it(session): 4. Wait for dynflow 5. As a different user, try to download the generated report :expectedresults: Report can't be downloaded. Error. + :CaseImportance: High """ diff --git a/tests/foreman/ui/test_repositories.py b/tests/foreman/ui/test_repositories.py new file mode 100644 index 00000000000..55f69a165d2 --- /dev/null +++ b/tests/foreman/ui/test_repositories.py @@ -0,0 +1,98 @@ +"""Test module for Repositories UI. + +:Requirement: Repository + +:CaseAutomation: Automated + +:CaseComponent: Repositories + +:team: Phoenix-content + +:CaseImportance: Critical + +""" +import pytest + + +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_custom_products_disabled_by_default( + session, + default_location, + setup_content, + rhel_contenthost, + target_sat, +): + """Verify that custom products should be disabled by default for content hosts + and activation keys + + :id: 05bdf790-a7a1-48b1-bbae-dc25b6ee7d58 + + :steps: + 1. Create custom product and upload repository + 2. Attach to activation key + 3. Register Host + 4. Assert that custom proudcts are disabled by default + + :expectedresults: Custom products should be disabled by default. + + :BZ: 1265120 + + :customerscenario: true + """ + ak, org, custom_repo = setup_content + rhel_contenthost.register(org, default_location, ak.name, target_sat) + assert rhel_contenthost.subscribed + with session: + session.organization.select(org.name) + session.location.select(default_location.name) + ak_details = session.activationkey.read(ak.name, widget_names='repository sets')[ + 'repository sets' + ]['table'][0] + assert 'Disabled' in ak_details['Status'] + repos = session.host_new.get_repo_sets(rhel_contenthost.hostname, custom_repo.name) + assert repos[0]['Repository'] == custom_repo.name + assert repos[0]['Status'] == 'Disabled' + assert repos[0]['Repository type'] == 'Custom' + + +@pytest.mark.rhel_ver_match('[^6]') +def test_positive_override_custom_products_on_existing_host( + session, + default_location, + setup_content, + rhel_contenthost, + target_sat, +): + """Verify that custom products can be easily enabled/disabled on existing host + using "select all" method + + :id: a3c9a8c8-a40f-448b-961a-7eb888883ba9 + + :steps: + 1. Create custom product and upload repository + 2. Attach to activation key + 3. Register Host + 4. Assert that custom proudcts are disabled by default + 5. Override custom products to enabled using new functionality + 6. Assert custom products are now enabled + + :expectedresults: Custom products should be easily enabled. + + :parametrized: yes + """ + ak, org, custom_repo = setup_content + client = rhel_contenthost + client.register(org, default_location, ak.name, target_sat) + assert client.subscribed + with session: + session.organization.select(org.name) + session.location.select(default_location.name) + repo = session.host_new.get_repo_sets(rhel_contenthost.hostname, custom_repo.name) + assert repo[0]['Repository'] == custom_repo.name + assert repo[0]['Status'] == 'Disabled' + session.host_new.bulk_override_repo_sets( + rhel_contenthost.hostname, 'Custom', "Override to enabled" + ) + repo = session.host_new.get_repo_sets(rhel_contenthost.hostname, custom_repo.name) + assert repo[0]['Repository'] == custom_repo.name + assert repo[0]['Status'] == 'Enabled' diff --git a/tests/foreman/ui/test_repository.py b/tests/foreman/ui/test_repository.py index a141a32df4b..1d9d40e10bb 100644 --- a/tests/foreman/ui/test_repository.py +++ b/tests/foreman/ui/test_repository.py @@ -4,72 +4,64 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta -from random import randint -from random import shuffle +from datetime import datetime, timedelta +from random import randint, shuffle -import pytest -from airgun.session import Session -from nailgun import entities from navmazing import NavigationTriesExceeded +import pytest from robottelo import constants from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import DataFile -from robottelo.constants import DOWNLOAD_POLICIES -from robottelo.constants import INVALID_URL -from robottelo.constants import PRDS -from robottelo.constants import REPO_TYPE -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants.repos import ANSIBLE_GALAXY -from robottelo.constants.repos import CUSTOM_3RD_PARTY_REPO -from robottelo.constants.repos import CUSTOM_RPM_SHA +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + DOWNLOAD_POLICIES, + INVALID_URL, + PRDS, + REPO_TYPE, + REPOS, + REPOSET, + DataFile, +) +from robottelo.constants.repos import ( + ANSIBLE_GALAXY, + CUSTOM_RPM_SHA, +) from robottelo.hosts import get_sat_version from robottelo.utils.datafactory import gen_string @pytest.fixture(scope='module') -def module_org(): - return entities.Organization().create() +def module_org(module_target_sat): + return module_target_sat.api.Organization().create() @pytest.fixture(scope='module') -def module_prod(module_org): - return entities.Product(organization=module_org).create() +def module_prod(module_org, module_target_sat): + return module_target_sat.api.Product(organization=module_org).create() @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_create_in_different_orgs(session, module_org): +def test_positive_create_in_different_orgs(session, module_org, module_target_sat): """Create repository in two different orgs with same name :id: 019c2242-8802-4bae-82c5-accf8f793dbc :expectedresults: Repository is created successfully for both organizations - - :CaseLevel: Integration """ repo_name = gen_string('alpha') - org2 = entities.Organization().create() - prod1 = entities.Product(organization=module_org).create() - prod2 = entities.Product(organization=org2).create() + org2 = module_target_sat.api.Organization().create() + prod1 = module_target_sat.api.Product(organization=module_org).create() + prod2 = module_target_sat.api.Product(organization=org2).create() with session: for org, prod in [[module_org, prod1], [org2, prod2]]: session.organization.select(org_name=org.name) @@ -98,8 +90,6 @@ def test_positive_create_as_non_admin_user(module_org, test_name, target_sat): :expectedresults: Repository successfully created :BZ: 1426393 - - :CaseLevel: Integration """ user_login = gen_string('alpha') user_password = gen_string('alphanumeric') @@ -114,9 +104,9 @@ def test_positive_create_as_non_admin_user(module_org, test_name, target_sat): 'sync_products', ], } - role = entities.Role().create() + role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(role, user_permissions) - entities.User( + target_sat.api.User( login=user_login, password=user_password, role=[role], @@ -124,8 +114,8 @@ def test_positive_create_as_non_admin_user(module_org, test_name, target_sat): default_organization=module_org, organization=[module_org], ).create() - product = entities.Product(organization=module_org).create() - with Session(test_name, user=user_login, password=user_password) as session: + product = target_sat.api.Product(organization=module_org).create() + with target_sat.ui_session(test_name, user=user_login, password=user_password) as session: # ensure that the created user is not a global admin user # check administer->organizations page with pytest.raises(NavigationTriesExceeded): @@ -144,22 +134,24 @@ def test_positive_create_as_non_admin_user(module_org, test_name, target_sat): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_create_yum_repo_same_url_different_orgs(session, module_prod): +def test_positive_create_yum_repo_same_url_different_orgs(session, module_prod, module_target_sat): """Create two repos with the same URL in two different organizations. :id: f4cb00ed-6faf-4c79-9f66-76cd333299cb :expectedresults: Repositories are created and have equal number of packages. - - :CaseLevel: Integration """ # Create first repository - repo = entities.Repository(product=module_prod, url=settings.repos.yum_0.url).create() + repo = module_target_sat.api.Repository( + product=module_prod, url=settings.repos.yum_0.url + ).create() repo.sync() # Create second repository - org = entities.Organization().create() - product = entities.Product(organization=org).create() - new_repo = entities.Repository(product=product, url=settings.repos.yum_0.url).create() + org = module_target_sat.api.Organization().create() + product = module_target_sat.api.Product(organization=org).create() + new_repo = module_target_sat.api.Repository( + product=product, url=settings.repos.yum_0.url + ).create() new_repo.sync() with session: # Check packages number in first repository @@ -189,8 +181,6 @@ def test_positive_create_as_non_admin_user_with_cv_published(module_org, test_na :expectedresults: New repository successfully created by non admin user :BZ: 1447829 - - :CaseLevel: Integration """ user_login = gen_string('alpha') user_password = gen_string('alphanumeric') @@ -205,9 +195,9 @@ def test_positive_create_as_non_admin_user_with_cv_published(module_org, test_na 'sync_products', ], } - role = entities.Role().create() + role = target_sat.api.Role().create() target_sat.api_factory.create_role_permissions(role, user_permissions) - entities.User( + target_sat.api.User( login=user_login, password=user_password, role=[role], @@ -215,18 +205,18 @@ def test_positive_create_as_non_admin_user_with_cv_published(module_org, test_na default_organization=module_org, organization=[module_org], ).create() - prod = entities.Product(organization=module_org).create() - repo = entities.Repository(product=prod, url=settings.repos.yum_2.url).create() + prod = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=prod, url=settings.repos.yum_2.url).create() repo.sync() - content_view = entities.ContentView(organization=module_org).create() + content_view = target_sat.api.ContentView(organization=module_org).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() - with Session(test_name, user_login, user_password) as session: + with target_sat.ui_session(test_name, user_login, user_password) as session: # ensure that the created user is not a global admin user # check administer->users page + pswd = gen_string('alphanumeric') with pytest.raises(NavigationTriesExceeded): - pswd = gen_string('alphanumeric') session.user.create( { 'user.login': gen_string('alphanumeric'), @@ -254,17 +244,15 @@ def test_positive_create_as_non_admin_user_with_cv_published(module_org, test_na @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') @pytest.mark.usefixtures('allow_repo_discovery') -def test_positive_discover_repo_via_existing_product(session, module_org): +def test_positive_discover_repo_via_existing_product(session, module_org, module_target_sat): """Create repository via repo-discovery under existing product :id: 9181950c-a756-456f-a46a-059e7a2add3c :expectedresults: Repository is discovered and created - - :CaseLevel: Integration """ repo_name = 'fakerepo01' - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with session: session.organization.select(org_name=module_org.name) session.product.discover_repo( @@ -288,8 +276,6 @@ def test_positive_discover_repo_via_new_product(session, module_org): :id: dc5281f8-1a8a-4a17-b746-728f344a1504 :expectedresults: Repository is discovered and created - - :CaseLevel: Integration """ product_name = gen_string('alpha') repo_name = 'fakerepo01' @@ -312,16 +298,16 @@ def test_positive_discover_repo_via_new_product(session, module_org): @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') @pytest.mark.usefixtures('allow_repo_discovery') -def test_positive_discover_module_stream_repo_via_existing_product(session, module_org): +def test_positive_discover_module_stream_repo_via_existing_product( + session, module_org, module_target_sat +): """Create repository with module streams via repo-discovery under an existing product. :id: e7b9e2c4-7ecd-4cde-8f74-961fbac8919c - :CaseLevel: Integration - :BZ: 1676642 - :Steps: + :steps: 1. Create a product. 2. From Content > Products, click on the Repo Discovery button. 3. Enter a url containing a yum repository with module streams, e.g., @@ -332,7 +318,7 @@ def test_positive_discover_module_stream_repo_via_existing_product(session, modu """ repo_name = gen_string('alpha') repo_label = gen_string('alpha') - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with session: session.organization.select(org_name=module_org.name) session.product.discover_repo( @@ -353,17 +339,15 @@ def test_positive_discover_module_stream_repo_via_existing_product(session, modu @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_custom_repo_yum(session, module_org): +def test_positive_sync_custom_repo_yum(session, module_org, module_target_sat): """Create Custom yum repos and sync it via the repos page. :id: afa218f4-e97a-4240-a82a-e69538d837a1 :expectedresults: Sync procedure for specific yum repository is successful - - :CaseLevel: Integration """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository(url=settings.repos.yum_1.url, product=product).create() with session: result = session.repository.synchronize(product.name, repo.name) assert result['result'] == 'success' @@ -376,18 +360,16 @@ def test_positive_sync_custom_repo_yum(session, module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_sync_custom_repo_docker(session, module_org): +def test_positive_sync_custom_repo_docker(session, module_org, module_target_sat): """Create Custom docker repos and sync it via the repos page. :id: 942e0b4f-3524-4f00-812d-bdad306f81de :expectedresults: Sync procedure for specific docker repository is successful - - :CaseLevel: Integration """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository( url=CONTAINER_REGISTRY_HUB, product=product, content_type=REPO_TYPE['docker'] ).create() with session: @@ -397,7 +379,7 @@ def test_positive_sync_custom_repo_docker(session, module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_resync_custom_repo_after_invalid_update(session, module_org): +def test_positive_resync_custom_repo_after_invalid_update(session, module_org, module_target_sat): """Create Custom yum repo and sync it via the repos page. Then try to change repo url to invalid one and re-sync that repository @@ -409,11 +391,9 @@ def test_positive_resync_custom_repo_after_invalid_update(session, module_org): procedure for specific yum repository is successful :BZ: 1487173, 1262313 - - :CaseLevel: Integration """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository(url=settings.repos.yum_1.url, product=product).create() with session: result = session.repository.synchronize(product.name, repo.name) assert result['result'] == 'success' @@ -431,7 +411,7 @@ def test_positive_resync_custom_repo_after_invalid_update(session, module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_resynchronize_rpm_repo(session, module_prod): +def test_positive_resynchronize_rpm_repo(session, module_prod, module_target_sat): """Check that repository content is resynced after packages were removed from repository @@ -439,11 +419,9 @@ def test_positive_resynchronize_rpm_repo(session, module_prod): :expectedresults: Repository has updated non-zero package count - :CaseLevel: Integration - :BZ: 1318004 """ - repo = entities.Repository( + repo = module_target_sat.api.Repository( url=settings.repos.yum_1.url, content_type=REPO_TYPE['yum'], product=module_prod ).create() with session: @@ -467,26 +445,24 @@ def test_positive_resynchronize_rpm_repo(session, module_prod): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_end_to_end_custom_yum_crud(session, module_org, module_prod): +def test_positive_end_to_end_custom_yum_crud(session, module_org, module_prod, module_target_sat): """Perform end to end testing for custom yum repository :id: 8baf11c9-019e-4625-a549-ec4cd9312f75 :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ repo_name = gen_string('alpha') checksum_type = 'sha256' new_repo_name = gen_string('alphanumeric') new_checksum_type = 'sha1' - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_FILE.read_bytes(), organization=module_org, ).create() - new_gpg_key = entities.GPGKey( + new_gpg_key = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_BETA_FILE.read_bytes(), organization=module_org, ).create() @@ -540,8 +516,6 @@ def test_positive_end_to_end_custom_module_streams_crud(session, module_org, mod :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ repo_name = gen_string('alpha') @@ -586,8 +560,6 @@ def test_positive_upstream_with_credentials(session, module_prod): 2. The custom repository upstream credentials are updated. 3. The credentials are cleared. - :CaseLevel: Integration - :CaseImportance: High :BZ: 1433481, 1743271 @@ -646,8 +618,7 @@ def test_positive_upstream_with_credentials(session, module_prod): # # :expectedresults: All expected CRUD actions finished successfully # -# :CaseLevel: Integration -# +# # # :CaseImportance: High # # :BZ: 1467722 @@ -686,10 +657,7 @@ def test_positive_sync_ansible_collection_gallaxy_repo(session, module_prod): :expectedresults: All content synced successfully - :CaseLevel: Integration - :CaseImportance: High - """ repo_name = f'gallaxy-{gen_string("alpha")}' requirements = ''' @@ -705,7 +673,7 @@ def test_positive_sync_ansible_collection_gallaxy_repo(session, module_prod): module_prod.name, { 'name': repo_name, - 'repo_type': REPO_TYPE['ansible_collection'], + 'repo_type': REPO_TYPE['ansible_collection'].replace('_', ' '), 'repo_content.requirements': requirements, 'repo_content.upstream_url': ANSIBLE_GALAXY, }, @@ -726,8 +694,6 @@ def test_positive_no_errors_on_repo_scan(target_sat, function_sca_manifest_org): :customerscenario: True :BZ: 1994212 - - :CaseLevel: Integration """ sat_rpm_extras = target_sat.cli_factory.RHELServerExtras(cdn=True) with target_sat.ui_session() as session: @@ -741,17 +707,14 @@ def test_positive_no_errors_on_repo_scan(target_sat, function_sca_manifest_org): @pytest.mark.tier2 -def test_positive_reposet_disable(session, target_sat): +def test_positive_reposet_disable(session, target_sat, function_entitlement_manifest_org): """Enable RH repo, sync it and then disable :id: de596c56-1327-49e8-86d5-a1ab907f26aa :expectedresults: RH repo was disabled - - :CaseLevel: Integration """ - org = entities.Organization().create() - target_sat.upload_manifest(org.id) + org = function_entitlement_manifest_org sat_tools_repo = target_sat.cli_factory.SatelliteToolsRepository(distro='rhel7', cdn=True) repository_name = sat_tools_repo.data['repository'] with session: @@ -773,7 +736,8 @@ def test_positive_reposet_disable(session, target_sat): ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) session.redhatrepository.disable(repository_name) assert not session.redhatrepository.search( f'name = "{repository_name}"', category='Enabled' @@ -782,7 +746,9 @@ def test_positive_reposet_disable(session, target_sat): @pytest.mark.run_in_one_thread @pytest.mark.tier2 -def test_positive_reposet_disable_after_manifest_deleted(session, target_sat): +def test_positive_reposet_disable_after_manifest_deleted( + session, function_entitlement_manifest_org, target_sat +): """Enable RH repo and sync it. Remove manifest and then disable repository @@ -793,12 +759,9 @@ def test_positive_reposet_disable_after_manifest_deleted(session, target_sat): :expectedresults: RH repo was disabled :BZ: 1344391 - - :CaseLevel: Integration """ - org = entities.Organization().create() - target_sat.upload_manifest(org.id) - sub = entities.Subscription(organization=org) + org = function_entitlement_manifest_org + sub = target_sat.api.Subscription(organization=org) sat_tools_repo = target_sat.cli_factory.SatelliteToolsRepository(distro='rhel7', cdn=True) repository_name = sat_tools_repo.data['repository'] repository_name_orphaned = f'{repository_name} (Orphaned)' @@ -823,7 +786,8 @@ def test_positive_reposet_disable_after_manifest_deleted(session, target_sat): ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) # Delete manifest sub.delete_manifest(data={'organization_id': org.id}) # Verify that the displayed repository name is correct @@ -837,7 +801,7 @@ def test_positive_reposet_disable_after_manifest_deleted(session, target_sat): @pytest.mark.tier2 -def test_positive_delete_random_docker_repo(session, module_org): +def test_positive_delete_random_docker_repo(session, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -845,13 +809,14 @@ def test_positive_delete_random_docker_repo(session, module_org): :expectedresults: Random repository can be deleted from random product without altering the other products. - - :CaseLevel: Integration """ entities_list = [] - products = [entities.Product(organization=module_org).create() for _ in range(randint(2, 5))] + products = [ + module_target_sat.api.Product(organization=module_org).create() + for _ in range(randint(2, 5)) + ] for product in products: - repo = entities.Repository( + repo = module_target_sat.api.Repository( url=CONTAINER_REGISTRY_HUB, product=product, content_type=REPO_TYPE['docker'] ).create() entities_list.append((product.name, repo.name)) @@ -866,7 +831,7 @@ def test_positive_delete_random_docker_repo(session, module_org): @pytest.mark.tier2 -def test_positive_delete_rhel_repo(session, module_org, target_sat): +def test_positive_delete_rhel_repo(session, module_entitlement_manifest_org, target_sat): """Enable and sync a Red Hat Repository, and then delete it :id: e96f369d-3e58-4824-802e-0b7e99d6d207 @@ -875,17 +840,14 @@ def test_positive_delete_rhel_repo(session, module_org, target_sat): :expectedresults: Repository can be successfully deleted - :CaseLevel: Integration - :BZ: 1152672 """ - target_sat.upload_manifest(module_org.id) sat_tools_repo = target_sat.cli_factory.SatelliteToolsRepository(distro='rhel7', cdn=True) repository_name = sat_tools_repo.data['repository'] product_name = sat_tools_repo.data['product'] with session: - session.organization.select(module_org.name) + session.organization.select(module_entitlement_manifest_org.name) session.redhatrepository.enable( sat_tools_repo.data['repository-set'], sat_tools_repo.data['arch'], @@ -903,7 +865,8 @@ def test_positive_delete_rhel_repo(session, module_org, target_sat): ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) session.repository.delete(product_name, repository_name) assert not session.redhatrepository.search( f'name = "{repository_name}"', category='Enabled' @@ -914,7 +877,7 @@ def test_positive_delete_rhel_repo(session, module_org, target_sat): @pytest.mark.tier2 -def test_positive_recommended_repos(session, module_org, target_sat): +def test_positive_recommended_repos(session, module_entitlement_manifest_org): """list recommended repositories using On/Off 'Recommended Repositories' toggle. @@ -925,13 +888,10 @@ def test_positive_recommended_repos(session, module_org, target_sat): 1. Shows repositories as per On/Off 'Recommended Repositories'. 2. Check last Satellite version Capsule/Tools repos do not exist. - :CaseLevel: Integration - :BZ: 1776108 """ - target_sat.upload_manifest(module_org.id) with session: - session.organization.select(module_org.name) + session.organization.select(module_entitlement_manifest_org.name) rrepos_on = session.redhatrepository.read(recommended_repo='on') assert REPOSET['rhel7'] in [repo['name'] for repo in rrepos_on] v = get_sat_version() @@ -960,7 +920,7 @@ def test_positive_upload_resigned_rpm(): :customerscenario: true - :Steps: + :steps: 1. Buld or prepare an unsigned rpm. 2. Create a gpg key. 3. Use the gpg key to sign the rpm with sha1. @@ -986,7 +946,7 @@ def test_positive_remove_srpm_change_checksum(): :BZ: 1850914 - :Steps: + :steps: 1. Sync a repository that contains rpms and srpms and uses sha1 repodata. 2. Re-sync the repository after an srpm has been removed and its repodata regenerated using sha256. @@ -1008,7 +968,7 @@ def test_positive_repo_discovery_change_ssl(): :BZ: 1789848 - :Steps: + :steps: 1. Navigate to Content > Products > click on 'Repo Discovery'. 2. Set the repository type to 'Yum Repositories'. 3. Enter an upstream URL to discover and click on 'Discover'. @@ -1032,7 +992,7 @@ def test_positive_remove_credentials(session, function_product, function_org, fu :customerscenario: true - :Steps: + :steps: 1. Create a custom repository, with a repository type of 'yum' and an upstream username and password. 3. Remove the saved credentials by clicking the delete icon next to the 'Upstream @@ -1076,7 +1036,7 @@ def test_sync_status_persists_after_task_delete(session, module_prod, module_org :customerscenario: true - :Steps: + :steps: 1. Sync a custom Repo. 2. Navigate to Content > Sync Status. Assert status is Synced. 3. Use foreman-rake console to delete the Sync task. @@ -1133,7 +1093,7 @@ def test_positive_sync_status_repo_display(): :customerscenario: true - :Steps: + :steps: 1. Import manifest and enable RHEL 8 repositories. 2. Navigate to Content > Sync Status. @@ -1154,7 +1114,7 @@ def test_positive_search_enabled_kickstart_repos(): :BZ: 1724807, 1829817 - :Steps: + :steps: 1. Import a manifest 2. Navigate to Content > Red Hat Repositories, and enable some kickstart repositories. 3. In the search bar on the right side, select 'Enabled/Both'. @@ -1177,7 +1137,7 @@ def test_positive_rpm_metadata_display(): :BZ: 1904369 - :Steps: + :steps: 1. Enable and sync a repository, e.g., 'Red Hat Satellite Tools 6.9 for RHEL 7 Server RPMs x86_64'. 2. Navigate to Content > Packages > click on a package in the repository (e.g., @@ -1207,7 +1167,7 @@ def test_positive_select_org_in_any_context(): :BZ: 1860957 - :Steps: + :steps: 1. Set "Any organization" and "Any location" on top 2. Click on Content -> "Sync Status" 3. "Select an Organization" page will come up. @@ -1224,7 +1184,7 @@ def test_positive_select_org_in_any_context(): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_repo_and_verify_checksum(session, module_org): +def test_positive_sync_repo_and_verify_checksum(session, module_org, module_target_sat): """Tests that Verify Content Checksum succeeds when executing from the products page :id: 577be1f8-7510-49d2-8b33-600db60bd960 @@ -1233,14 +1193,14 @@ def test_positive_sync_repo_and_verify_checksum(session, module_org): :BZ: 1951626 - :Steps: + :steps: 1. Enable and sync repository 2. Go to Products -> Select Action -> Verify Content Checksum :expectedresults: Verify Content Checksum task succeeds """ repo_name = gen_string('alpha') - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with session: session.repository.create( product.name, @@ -1256,8 +1216,8 @@ def test_positive_sync_repo_and_verify_checksum(session, module_org): @pytest.mark.tier2 -def test_positive_sync_sha_repo(session, module_org): - """Sync 'sha' repo successfully +def test_positive_sync_sha_repo(session, module_org, module_target_sat): + """Sync repository with 'sha' checksum, which uses 'sha1' in particular actually :id: 6172035f-96c4-41e4-a79b-acfaa78ad734 @@ -1265,44 +1225,17 @@ def test_positive_sync_sha_repo(session, module_org): :BZ: 2024889 - :SubComponent: Candlepin - """ - repo_name = gen_string('alpha') - product = entities.Product(organization=module_org).create() - with session: - session.repository.create( - product.name, - { - 'name': repo_name, - 'repo_type': REPO_TYPE['yum'], - 'repo_content.upstream_url': CUSTOM_RPM_SHA, - }, - ) - result = session.repository.synchronize(product.name, repo_name) - assert result['result'] == 'success' - - -@pytest.mark.tier2 -def test_positive_sync_third_party_repo(session, module_org): - """Sync third part repo successfully - - :id: 655161e0-aa90-4c7c-9a0d-cb5b9f56eac3 - - :customerscenario: true - - :BZ: 1920511 - :SubComponent: Pulp """ repo_name = gen_string('alpha') - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with session: session.repository.create( product.name, { 'name': repo_name, 'repo_type': REPO_TYPE['yum'], - 'repo_content.upstream_url': CUSTOM_3RD_PARTY_REPO, + 'repo_content.upstream_url': CUSTOM_RPM_SHA, }, ) result = session.repository.synchronize(product.name, repo_name) diff --git a/tests/foreman/ui/test_rhc.py b/tests/foreman/ui/test_rhc.py index 23aa43f07f5..df90a5768ca 100644 --- a/tests/foreman/ui/test_rhc.py +++ b/tests/foreman/ui/test_rhc.py @@ -4,21 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance +:CaseComponent: RHCloud -:CaseComponent: RHCloud-CloudConnector - -:Team: Platform - -:TestType: Functional +:Team: Phoenix-subscriptions :CaseImportance: High -:Upstream: No """ -import pytest +from datetime import datetime, timedelta + from fauxfactory import gen_string -from wait_for import wait_for +from manifester import Manifester +import pytest from robottelo import constants from robottelo.config import settings @@ -64,18 +61,23 @@ def module_rhc_org(module_target_sat): return org -@pytest.fixture() -def fixture_setup_rhc_satellite(request, module_target_sat, module_rhc_org): +@pytest.fixture +def fixture_setup_rhc_satellite( + request, + module_target_sat, + module_rhc_org, +): """Create Organization and activation key after successful test execution""" + if settings.rh_cloud.crc_env == 'prod': + manifester = Manifester( + allocation_name=module_rhc_org.name, + manifest_category=settings.manifest.extra_rhel_entitlement, + simple_content_access="enabled", + ) + rhcloud_manifest = manifester.get_manifest() + module_target_sat.upload_manifest(module_rhc_org.id, rhcloud_manifest.content) yield if request.node.rep_call.passed: - if settings.rh_cloud.crc_env == 'prod': - manifests_path = module_target_sat.download_file( - file_url=settings.fake_manifest.url['default'] - )[0] - module_target_sat.cli.Subscription.upload( - {'file': manifests_path, 'organization-id': module_rhc_org.id} - ) # Enable and sync required repos repo1_id = module_target_sat.api_factory.enable_sync_redhat_repo( constants.REPOS['rhel8_aps'], module_rhc_org.id @@ -83,9 +85,12 @@ def fixture_setup_rhc_satellite(request, module_target_sat, module_rhc_org): repo2_id = module_target_sat.api_factory.enable_sync_redhat_repo( constants.REPOS['rhel7'], module_rhc_org.id ) + repo3_id = module_target_sat.api_factory.enable_sync_redhat_repo( + constants.REPOS['rhel8_bos'], module_rhc_org.id + ) # Add repos to Content view content_view = module_target_sat.api.ContentView( - organization=module_rhc_org, repository=[repo1_id, repo2_id] + organization=module_rhc_org, repository=[repo1_id, repo2_id, repo3_id] ).create() content_view.publish() # Create Activation key @@ -114,7 +119,7 @@ def test_positive_configure_cloud_connector( :id: 67e45cfe-31bb-51a8-b88f-27918c68f32e - :Steps: + :steps: 1. Navigate to Configure > Inventory Upload 2. Click Configure Cloud Connector @@ -122,8 +127,6 @@ def test_positive_configure_cloud_connector( :expectedresults: The Cloud Connector has been installed and the service is running - :CaseLevel: Integration - :CaseImportance: Critical :BZ: 1818076 @@ -145,8 +148,8 @@ def test_positive_configure_cloud_connector( parameters = [{'name': 'source_display_name', 'value': gen_string('alpha')}] host.host_parameters_attributes = parameters host.update(['host_parameters_attributes']) - - with session: + timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') + with module_target_sat.ui_session() as session: session.organization.select(org_name=module_rhc_org.name) if session.cloudinventory.is_cloud_connector_configured(): pytest.skip( @@ -155,19 +158,19 @@ def test_positive_configure_cloud_connector( 'check if everything is correctly configured from scratch. Skipping.' ) session.cloudinventory.configure_cloud_connector() - template_name = 'Configure Cloud Connector' + module_target_sat.wait_for_tasks( + search_query=( + f'action = "Run hosts job: {template_name}"' f' and started_at >= "{timestamp}"' + ), + search_rate=15, + max_tries=10, + ) invocation_id = ( module_target_sat.api.JobInvocation() .search(query={'search': f'description="{template_name}"'})[0] .id ) - wait_for( - lambda: module_target_sat.api.JobInvocation(id=invocation_id).read().status_label - in ["succeeded", "failed"], - timeout="1500s", - ) - job_output = module_target_sat.cli.JobInvocation.get_output( {'id': invocation_id, 'host': module_target_sat.hostname} ) diff --git a/tests/foreman/ui/test_rhcloud_insights.py b/tests/foreman/ui/test_rhcloud_insights.py index 7bd8a83759e..3646658c2b0 100644 --- a/tests/foreman/ui/test_rhcloud_insights.py +++ b/tests/foreman/ui/test_rhcloud_insights.py @@ -1,49 +1,51 @@ """Tests for RH Cloud - Inventory -:Requirement: RH Cloud - Inventory +:Requirement: RHCloud :CaseAutomation: Automated -:CaseLevel: System +:CaseComponent: RHCloud -:CaseComponent: RHCloud-Inventory - -:Team: Platform - -:TestType: Functional +:Team: Phoenix-subscriptions :CaseImportance: High -:Upstream: No """ from datetime import datetime import pytest -from airgun.session import Session from wait_for import wait_for from robottelo.config import settings -from robottelo.constants import DEFAULT_LOC +from robottelo.constants import DEFAULT_LOC, DNF_RECOMMENDATION, OPENSSH_RECOMMENDATION + + +def create_insights_vulnerability(insights_vm): + """Function to create vulnerabilities that can be remediated.""" + insights_vm.run( + 'chmod 777 /etc/ssh/sshd_config;dnf update -y dnf;' + 'sed -i -e "/^best/d" /etc/dnf/dnf.conf;insights-client' + ) @pytest.mark.e2e -@pytest.mark.run_in_one_thread @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([8, 9]) +@pytest.mark.no_containers +@pytest.mark.rhel_ver_list([7, 8, 9]) def test_rhcloud_insights_e2e( rhel_insights_vm, - organization_ak_setup, - rhcloud_sat_host, + rhcloud_manifest_org, + module_target_sat, ): """Synchronize hits data from cloud, verify it is displayed in Satellite and run remediation. :id: d952e83c-3faf-4299-a048-2eb6ccb8c9c2 - :Steps: + :steps: 1. Prepare misconfigured machine and upload its data to Insights. - 2. In Satellite UI, Configure -> Insights -> Sync recommendations. - 3. Run remediation for "OpenSSH config permissions" recommendation against rhel8/rhel9 host. - 4. Assert that job completed successfully. + 2. In Satellite UI, go to Configure -> Insights -> Sync recommendations. + 3. Run remediation for "OpenSSH config permissions" recommendation against host. + 4. Verify that the remediation job completed successfully. 5. Sync Insights recommendations. 6. Search for previously remediated issue. @@ -55,7 +57,7 @@ def test_rhcloud_insights_e2e( :CaseImportance: Critical - :BZ: 1965901, 1962048 + :BZ: 1965901, 1962048, 1976754 :customerscenario: true @@ -63,20 +65,21 @@ def test_rhcloud_insights_e2e( :CaseAutomation: Automated """ - org, ak = organization_ak_setup - # Create a vulnerability which can be remediated - rhel_insights_vm.run('chmod 777 /etc/ssh/sshd_config;insights-client') - query = 'Decreased security: OpenSSH config permissions' job_query = ( f'Remote action: Insights remediations for selected issues on {rhel_insights_vm.hostname}' ) - with Session(hostname=rhcloud_sat_host.hostname) as session: + org = rhcloud_manifest_org + # Prepare misconfigured machine and upload data to Insights + create_insights_vulnerability(rhel_insights_vm) + + with module_target_sat.ui_session() as session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') + # Sync Insights recommendations session.cloudinsights.sync_hits() wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'Insights full sync and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -87,13 +90,17 @@ def test_rhcloud_insights_e2e( ) # Workaround for alert message causing search to fail. See airgun issue 584. session.browser.refresh() - result = session.cloudinsights.search(query)[0] + # Search for Insights recommendation. + result = session.cloudinsights.search( + f'hostname= "{rhel_insights_vm.hostname}" and title = "{OPENSSH_RECOMMENDATION}"' + )[0] assert result['Hostname'] == rhel_insights_vm.hostname - assert result['Recommendation'] == query + assert result['Recommendation'] == OPENSSH_RECOMMENDATION + # Run remediation and verify job completion. timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') - session.cloudinsights.remediate(query) + session.cloudinsights.remediate(OPENSSH_RECOMMENDATION) wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'{job_query} and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -102,10 +109,11 @@ def test_rhcloud_insights_e2e( silent_failure=True, handle_exception=True, ) + # Search for Insights recommendations again. timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') session.cloudinsights.sync_hits() wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'Insights full sync and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -116,7 +124,10 @@ def test_rhcloud_insights_e2e( ) # Workaround for alert message causing search to fail. See airgun issue 584. session.browser.refresh() - assert not session.cloudinsights.search(query) + # Verify that the insights recommendation is not listed anymore. + assert not session.cloudinsights.search( + f'hostname= "{rhel_insights_vm.hostname}" and title = "{OPENSSH_RECOMMENDATION}"' + ) @pytest.mark.stubbed @@ -125,7 +136,7 @@ def test_insights_reporting_status(): :id: 75629a08-b585-472b-a295-ce497075e519 - :Steps: + :steps: 1. Register a satellite content host with insights. 2. Change 48 hours of wait time to 4 minutes in insights_client_report_status.rb file. See foreman_rh_cloud PR#596. @@ -151,7 +162,7 @@ def test_recommendation_sync_for_satellite(): :id: ee3feba3-c255-42f1-8293-b04d540dcca5 - :Steps: + :steps: 1. Register Satellite with insights.(satellite-installer --register-with-insights) 2. Add RH cloud token in settings. 3. Go to Configure > Insights > Click on Sync recommendations button. @@ -171,29 +182,6 @@ def test_recommendation_sync_for_satellite(): """ -@pytest.mark.stubbed -def test_allow_auto_insights_sync_setting(): - """Test "allow_auto_insights_sync" setting. - - :id: ddc4ed5b-43c0-4121-bf2c-b8e040e45379 - - :Steps: - 1. Register few satellite content host with insights. - 2. Enable "allow_auto_insights_sync" setting. - 3. Wait for "InsightsScheduledSync" task to run. - - :expectedresults: - 1. Satellite has "Inventory scheduled sync" recurring logic, which syncs - inventory status automatically if "Automatic inventory upload" setting is enabled. - - :CaseImportance: Medium - - :BZ: 1865879 - - :CaseAutomation: ManualOnly - """ - - @pytest.mark.stubbed def test_host_sorting_based_on_recommendation_count(): """Verify that hosts can be sorted and filtered based on insights @@ -201,7 +189,7 @@ def test_host_sorting_based_on_recommendation_count(): :id: b1725ec1-60db-422e-809d-f81d99ae156e - :Steps: + :steps: 1. Register few satellite content host with insights. 2. Sync Insights recommendations. 3. Go to Hosts > All Host @@ -220,267 +208,59 @@ def test_host_sorting_based_on_recommendation_count(): """ -@pytest.mark.run_in_one_thread @pytest.mark.tier2 -@pytest.mark.rhel_ver_list([8]) +@pytest.mark.no_containers +@pytest.mark.rhel_ver_list([7, 8, 9]) def test_host_details_page( rhel_insights_vm, - organization_ak_setup, - rhcloud_sat_host, + rhcloud_manifest_org, + module_target_sat, ): """Test host details page for host having insights recommendations. :id: e079ed10-c9f5-4331-9cb3-70b224b1a584 - :Steps: + :customerscenario: true + + :steps: 1. Prepare misconfigured machine and upload its data to Insights. 2. Sync insights recommendations. 3. Sync RH Cloud inventory status. 4. Go to Hosts -> All Hosts - 5. Assert there is "Recommendations" column containing insights recommendation count. + 5. Verify there is a "Recommendations" column containing insights recommendation count. 6. Check popover status of host. - 7. Assert that host properties shows reporting inventory upload status. - 8. Click on "Recommendations" tab. + 7. Verify that host properties shows "reporting" inventory upload status. + 8. Read the recommendations listed in Insights tab present on host details page. + 9. Click on "Recommendations" tab. + 10. Try to delete host. :expectedresults: 1. There's Insights column with number of recommendations. 2. Inventory upload status is displayed in popover status of host. 3. Insights registration status is displayed in popover status of host. 4. Inventory upload status is present in host properties table. - 5. Clicking on "Recommendations" tab takes user to Insights page with insights + 5. Verify the contents of Insights tab. + 6. Clicking on "Recommendations" tab takes user to Insights page with insights recommendations selected for that host. + 7. Host having insights recommendations is deleted from Satellite. - :BZ: 1974578 - - :parametrized: yes - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - # Create a vulnerability which can be remediated - rhel_insights_vm.run('dnf update -y dnf;sed -i -e "/^best/d" /etc/dnf/dnf.conf;insights-client') - # Sync inventory status - inventory_sync = rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_inventory_sync() - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search(query={'search': f'id = {inventory_sync["task"]["id"]}'})[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - with Session(hostname=rhcloud_sat_host.hostname) as session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=DEFAULT_LOC) - # Sync insights recommendations - session.cloudinsights.sync_hits() - result = session.host.host_status(rhel_insights_vm.hostname) - assert 'Insights: Reporting' in result - assert 'Inventory: Successfully uploaded to your RH cloud inventory' in result - result = session.host.search(rhel_insights_vm.hostname)[0] - assert result['Name'] == rhel_insights_vm.hostname - assert int(result['Recommendations']) > 0 - values = session.host.get_details(rhel_insights_vm.hostname) - # Note: Reading host properties adds 'clear' to original value. - assert ( - values['properties']['properties_table']['Inventory'] - == 'Successfully uploaded to your RH cloud inventory clear' - ) - recommendations = session.host.read_insights_recommendations(rhel_insights_vm.hostname) - assert len(recommendations), 'No recommendations were found' - assert recommendations[0]['Hostname'] == rhel_insights_vm.hostname - assert int(result['Recommendations']) == len(recommendations) - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier2 -def test_rh_cloud_insights_clean_statuses( - rhel7_contenthost, - rhel8_contenthost, - organization_ak_setup, - rhcloud_sat_host, -): - """Test rh_cloud_insights:clean_statuses rake command. - - :id: 6416ed31-cafb-4278-b205-bf3da9ab2ee4 - - :Steps: - 1. Prepare misconfigured machine and upload its data to Insights - 2. Go to Hosts -> All Hosts - 3. Assert that host properties shows reporting status for insights. - 4. Run rh_cloud_insights:clean_statuses rake command - 5. Assert that host properties doesn't contain insights status. - - :expectedresults: - 1. rake command deletes insights reporting status of host. - - :BZ: 1962930 - - :parametrized: yes - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - rhel7_contenthost.configure_rhai_client( - satellite=rhcloud_sat_host, activation_key=ak.name, org=org.label, rhel_distro='rhel7' - ) - rhel8_contenthost.configure_rhai_client( - satellite=rhcloud_sat_host, activation_key=ak.name, org=org.label, rhel_distro='rhel8' - ) - with Session(hostname=rhcloud_sat_host.hostname) as session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=DEFAULT_LOC) - values = session.host.get_details(rhel7_contenthost.hostname) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' - values = session.host.get_details(rhel8_contenthost.hostname) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' - # Clean insights status - result = rhcloud_sat_host.run( - f'foreman-rake rh_cloud_insights:clean_statuses SEARCH="{rhel7_contenthost.hostname}"' - ) - assert 'Deleted 1 insights statuses' in result.stdout - assert result.status == 0 - values = session.host.get_details(rhel7_contenthost.hostname) - with pytest.raises(KeyError): - values['properties']['properties_table']['Insights'] - values = session.host.get_details(rhel8_contenthost.hostname) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' - result = rhel7_contenthost.run('insights-client') - assert result.status == 0 - values = session.host.get_details(rhel7_contenthost.hostname) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier2 -@pytest.mark.rhel_ver_list([8]) -def test_delete_host_having_insights_recommendation( - rhel_insights_vm, - organization_ak_setup, - rhcloud_sat_host, -): - """Verify that host having insights recommendations can be deleted from Satellite. - - :id: 07914ff7-e230-4416-8664-7d357e9966f3 - - :customerscenario: true - - :Steps: - 1. Prepare misconfigured machine and upload its data to Insights. - 2. Sync insights recommendations. - 3. Sync RH Cloud inventory status. - 4. Go to Hosts -> All Hosts - 5. Assert there is "Recommendations" column containing insights recommendation count. - 6. Try to delete host. - - :expectedresults: - 1. host having insights recommendations is deleted from Satellite. - - :CaseImportance: Critical - - :BZ: 1860422, 1928652 - - :parametrized: yes - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - # Create a vulnerability which can be remediated - rhel_insights_vm.run('dnf update -y dnf;sed -i -e "/^best/d" /etc/dnf/dnf.conf;insights-client') - # Sync inventory status - inventory_sync = rhcloud_sat_host.api.Organization(id=org.id).rh_cloud_inventory_sync() - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search(query={'search': f'id = {inventory_sync["task"]["id"]}'})[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - with Session(hostname=rhcloud_sat_host.hostname) as session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=DEFAULT_LOC) - # Sync insights recommendations - timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') - session.cloudinsights.sync_hits() - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search(query={'search': f'Insights full sync and started_at >= "{timestamp}"'})[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - result = session.host.search(rhel_insights_vm.hostname)[0] - assert result['Name'] == rhel_insights_vm.hostname - assert int(result['Recommendations']) > 0 - values = session.host.get_details(rhel_insights_vm.hostname) - # Note: Reading host properties adds 'clear' to original value. - assert ( - values['properties']['properties_table']['Inventory'] - == 'Successfully uploaded to your RH cloud inventory clear' - ) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' - # Delete host - session.host.delete(rhel_insights_vm.hostname) - assert not rhcloud_sat_host.api.Host().search( - query={'search': f'name="{rhel_insights_vm.hostname}"'} - ) - - -@pytest.mark.tier2 -@pytest.mark.rhel_ver_list([8]) -def test_insights_tab_on_host_details_page( - rhel_insights_vm, - organization_ak_setup, - rhcloud_sat_host, -): - """Test recommendations count in hosts index is a link and contents - of Insights tab on host details page. - - :id: d969ad38-7ee5-4c57-8538-1cf6c1705707 - - :Steps: - 1. Prepare misconfigured machine and upload its data to Insights. - 2. In Satellite UI, Configure -> Insights -> Sync now. - 3. Go to Hosts -> All Hosts. - 4. Click on recommendation count. - 5. Assert the contents of Insights tab. - - :expectedresults: - 1. There's Insights recommendation column with number of recommendations and - link to Insights tab on host details page. - 2. Insights tab shows recommendations for the host. - - :CaseImportance: High - - :BZ: 1865876, 1879448 + :BZ: 1974578, 1860422, 1928652, 1865876, 1879448 :parametrized: yes :CaseAutomation: Automated """ - org, ak = organization_ak_setup - # Create a vulnerability which can be remediated - rhel_insights_vm.run('dnf update -y dnf;sed -i -e "/^best/d" /etc/dnf/dnf.conf;insights-client') - dnf_issue = ( - 'The dnf installs lower versions of packages when the ' - '"best" option is not present in the /etc/dnf/dnf.conf' - ) - with Session(hostname=rhcloud_sat_host.hostname) as session: + org = rhcloud_manifest_org + # Prepare misconfigured machine and upload data to Insights + create_insights_vulnerability(rhel_insights_vm) + with module_target_sat.ui_session() as session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) # Sync insights recommendations timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') session.cloudinsights.sync_hits() wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search(query={'search': f'Insights full sync and started_at >= "{timestamp}"'})[0] .result == 'success', @@ -489,24 +269,48 @@ def test_insights_tab_on_host_details_page( silent_failure=True, handle_exception=True, ) - result = session.host.search(rhel_insights_vm.hostname)[0] + # Verify Insights status of host. + result = session.host_new.get_host_statuses(rhel_insights_vm.hostname) + assert result['Insights']['Status'] == 'Reporting' + assert result['Inventory']['Status'] == 'Successfully uploaded to your RH cloud inventory' + result = session.host_new.search(rhel_insights_vm.hostname)[0] assert result['Name'] == rhel_insights_vm.hostname assert int(result['Recommendations']) > 0 - insights_recommendations = session.host.insights_tab(rhel_insights_vm.hostname) + values = session.host_new.get_host_statuses(rhel_insights_vm.hostname) + assert values['Inventory']['Status'] == 'Successfully uploaded to your RH cloud inventory' + # Read the recommendations listed in Insights tab present on host details page + insights_recommendations = session.host_new.get_insights(rhel_insights_vm.hostname)[ + 'recommendations_table' + ] for recommendation in insights_recommendations: - if recommendation['name'] == dnf_issue: - assert recommendation['label'] == 'Moderate' - assert dnf_issue in recommendation['text'] + if recommendation['Recommendation'] == DNF_RECOMMENDATION: + assert recommendation['Total risk'] == 'Moderate' + assert DNF_RECOMMENDATION in recommendation['Recommendation'] assert len(insights_recommendations) == int(result['Recommendations']) + # Test Recommendation button present on host details page + recommendations = session.host_new.get_insights(rhel_insights_vm.hostname)[ + 'recommendations_table' + ] + assert len(recommendations), 'No recommendations were found' + assert int(result['Recommendations']) == len(recommendations) + # Delete host + rhel_insights_vm.nailgun_host.delete() + assert not rhel_insights_vm.nailgun_host @pytest.mark.e2e @pytest.mark.no_containers +@pytest.mark.rhel_ver_list([7, 8, 9]) def test_insights_registration_with_capsule( - rhcloud_capsule, organization_ak_setup, rhcloud_sat_host, rhel7_contenthost, default_os + rhcloud_capsule, + rhcloud_activation_key, + rhcloud_manifest_org, + module_target_sat, + rhel_contenthost, + default_os, ): """Registering host with insights having traffic going through - external capsule. + external capsule and also test rh_cloud_insights:clean_statuses rake command. :id: 9db1d307-664c-4d4a-89de-da986224f071 @@ -515,27 +319,39 @@ def test_insights_registration_with_capsule( :steps: 1. Integrate a capsule with satellite. 2. open the global registration form and select the same capsule. - 3. Overide Insights and Rex parameters. + 3. Override Insights and Rex parameters. 4. check host is registered successfully with selected capsule. 5. Test insights client connection & reporting status. + 6. Run rh_cloud_insights:clean_statuses rake command + 7. Verify that host properties doesn't contain insights status. - :expectedresults: Host is successfully registered with capsule host, - having remote execution and insights. + :expectedresults: + 1. Host is successfully registered with capsule host, + having remote execution and insights. + 2. rake command deletes insights reporting status of host. - :BZ: 2110222, 2112386 - """ - org, ak = organization_ak_setup - capsule = rhcloud_sat_host.api.SmartProxy(name=rhcloud_capsule.hostname).search()[0] - org.smart_proxy.append(capsule) - org.update(['smart_proxy']) + :BZ: 2110222, 2112386, 1962930 - with Session(hostname=rhcloud_sat_host.hostname) as session: + :parametrized: yes + """ + org = rhcloud_manifest_org + ak = rhcloud_activation_key + # Enable rhel repos and install insights-client + rhelver = rhel_contenthost.os_version.major + if rhelver > 7: + rhel_contenthost.create_custom_repos(**settings.repos[f'rhel{rhelver}_os']) + else: + rhel_contenthost.create_custom_repos( + **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} + ) + with module_target_sat.ui_session() as session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) - cmd = session.host.get_register_command( + # Generate host registration command + cmd = session.host_new.get_register_command( { 'general.operating_system': default_os.title, - 'general.orgnization': org.name, + 'general.organization': org.name, 'general.capsule': rhcloud_capsule.hostname, 'general.activation_keys': ak.name, 'general.insecure': True, @@ -543,11 +359,27 @@ def test_insights_registration_with_capsule( 'advanced.setup_rex': 'Yes (override)', } ) - # TODO: Make this test parametirzed for all rhel versions after verifying BZ:2129254 - rhel7_contenthost.create_custom_repos(rhel7=settings.repos.rhel7_os) - assert rhel7_contenthost.execute('yum install -y insights-client').status == 0 - rhel7_contenthost.execute(cmd) - assert rhel7_contenthost.subscribed - assert rhel7_contenthost.execute('insights-client --test-connection').status == 0 - values = session.host.get_details(rhel7_contenthost.hostname) - assert values['properties']['properties_table']['Insights'] == 'Reporting clear' + # Register host with Satellite and Insights. + rhel_contenthost.execute(cmd) + assert rhel_contenthost.subscribed + assert rhel_contenthost.execute('insights-client --test-connection').status == 0 + values = session.host_new.get_host_statuses(rhel_contenthost.hostname) + assert values['Insights']['Status'] == 'Reporting' + # Clean insights status + result = module_target_sat.run( + f'foreman-rake rh_cloud_insights:clean_statuses SEARCH="{rhel_contenthost.hostname}"' + ) + assert 'Deleted 1 insights statuses' in result.stdout + assert result.status == 0 + # Workaround for not reading old data. + session.browser.refresh() + # Verify that Insights status is cleared. + values = session.host_new.get_host_statuses(rhel_contenthost.hostname) + assert values['Insights']['Status'] == 'N/A' + result = rhel_contenthost.run('insights-client') + assert result.status == 0 + # Workaround for not reading old data. + session.browser.refresh() + # Verify that Insights status again. + values = session.host_new.get_host_statuses(rhel_contenthost.hostname) + assert values['Insights']['Status'] == 'Reporting' diff --git a/tests/foreman/ui/test_rhcloud_inventory.py b/tests/foreman/ui/test_rhcloud_inventory.py index cca98d346d0..9c98ade0205 100644 --- a/tests/foreman/ui/test_rhcloud_inventory.py +++ b/tests/foreman/ui/test_rhcloud_inventory.py @@ -1,32 +1,27 @@ """Tests for RH Cloud - Inventory, also known as Insights Inventory Upload -:Requirement: RH Cloud - Inventory +:Requirement: RHCloud :CaseAutomation: Automated -:CaseLevel: System +:CaseComponent: RHCloud -:CaseComponent: RHCloud-Inventory - -:Team: Platform - -:TestType: Functional +:Team: Phoenix-subscriptions :CaseImportance: High -:Upstream: No """ -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta import pytest -from airgun.session import Session from wait_for import wait_for from robottelo.constants import DEFAULT_LOC -from robottelo.utils.io import get_local_file_data -from robottelo.utils.io import get_remote_report_checksum -from robottelo.utils.io import get_report_data +from robottelo.utils.io import ( + get_local_file_data, + get_remote_report_checksum, + get_report_data, +) def common_assertion(report_path, inventory_data, org, satellite): @@ -59,7 +54,10 @@ def common_assertion(report_path, inventory_data, org, satellite): @pytest.mark.run_in_one_thread @pytest.mark.tier3 def test_rhcloud_inventory_e2e( - inventory_settings, organization_ak_setup, rhcloud_registered_hosts, rhcloud_sat_host + inventory_settings, + rhcloud_manifest_org, + rhcloud_registered_hosts, + module_target_sat, ): """Generate report and verify its basic properties @@ -76,21 +74,22 @@ def test_rhcloud_inventory_e2e( 5. JSON files inside report can be parsed 6. metadata.json lists all and only slice JSON files in tar 7. Host counts in metadata matches host counts in slices - 8. Assert Hostnames, IP addresses, and installed packages are present in report. + 8. Assert Hostnames, IP addresses, and installed packages are present in the report. :CaseImportance: Critical :BZ: 1807829, 1926100 """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org virtual_host, baremetal_host = rhcloud_registered_hosts - with Session(hostname=rhcloud_sat_host.hostname) as session: + with module_target_sat.ui_session() as session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') session.cloudinventory.generate_report(org.name) + # wait_for_tasks report generation task to finish. wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search( query={ 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' @@ -106,12 +105,15 @@ def test_rhcloud_inventory_e2e( ) report_path = session.cloudinventory.download_report(org.name) inventory_data = session.cloudinventory.read(org.name) - - common_assertion(report_path, inventory_data, org, rhcloud_sat_host) + # Verify that generated archive is valid. + common_assertion(report_path, inventory_data, org, module_target_sat) + # Get report data for assertion json_data = get_report_data(report_path) + # Verify that hostnames are present in the report. hostnames = [host['fqdn'] for host in json_data['hosts']] assert virtual_host.hostname in hostnames assert baremetal_host.hostname in hostnames + # Verify that ip_addresses are present report. ip_addresses = [ host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0] for host in json_data['hosts'] @@ -121,6 +123,7 @@ def test_rhcloud_inventory_e2e( assert baremetal_host.ip_addr in ip_addresses assert virtual_host.ip_addr in ipv4_addresses assert baremetal_host.ip_addr in ipv4_addresses + # Verify that packages are included in report all_host_profiles = [host['system_profile'] for host in json_data['hosts']] for host_profiles in all_host_profiles: assert 'installed_packages' in host_profiles @@ -129,41 +132,62 @@ def test_rhcloud_inventory_e2e( @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_obfuscate_host_names( - rhcloud_sat_host, inventory_settings, organization_ak_setup, rhcloud_registered_hosts +def test_rh_cloud_inventory_settings( + module_target_sat, + inventory_settings, + rhcloud_manifest_org, + rhcloud_registered_hosts, ): - """Test whether `Obfuscate host names` setting works as expected. + """Test whether `Obfuscate host names`, `Obfuscate host ipv4 addresses` + and `Exclude Packages` setting works as expected. :id: 3c3a36b6-6566-446b-b803-3f8f9aab2511 - :Steps: + :customerscenario: true + + :steps: 1. Prepare machine and upload its data to Insights. 2. Go to Configure > Inventory upload > enable “Obfuscate host names” setting. - 3. Generate report after enabling the setting. - 4. Check if host names are obfuscated in generated reports. - 5. Disable previous setting. - 6. Go to Administer > Settings > RH Cloud and enable "Obfuscate host names" setting. - 7. Generate report after enabling the setting. - 8. Check if host names are obfuscated in generated reports. + 3. Go to Configure > Inventory upload > enable “Obfuscate host ipv4 addresses” setting. + 4. Go to Configure > Inventory upload > enable “Exclude Packages” setting. + 5. Generate report after enabling the settings. + 6. Check if host names are obfuscated in generated reports. + 7. Check if hosts ipv4 addresses are obfuscated in generated reports. + 8. Check if packages are excluded from generated reports. + 9. Disable previous setting. + 10. Go to Administer > Settings > RH Cloud and enable "Obfuscate host names" setting. + 11. Go to Administer > Settings > RH Cloud and enable "Obfuscate IPs" setting. + 12. Go to Administer > Settings > RH Cloud and enable + "Don't upload installed packages" setting. + 13. Generate report after enabling the setting. + 14. Check if host names are obfuscated in generated reports. + 15. Check if hosts ipv4 addresses are obfuscated in generated reports. + 16. Check if packages are excluded from generated reports. :expectedresults: 1. Obfuscated host names in reports generated. + 2. Obfuscated host ipv4 addresses in generated reports. + 3. Packages are excluded from reports generated. + + :BZ: 1852594, 1889690, 1852594 :CaseAutomation: Automated """ - org, ak = organization_ak_setup + org = rhcloud_manifest_org virtual_host, baremetal_host = rhcloud_registered_hosts - with Session(hostname=rhcloud_sat_host.hostname) as session: + with module_target_sat.ui_session() as session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) - # Enable obfuscate_hostnames setting on inventory page. + # Enable settings on inventory page. session.cloudinventory.update({'obfuscate_hostnames': True}) + session.cloudinventory.update({'obfuscate_ips': True}) + session.cloudinventory.update({'exclude_packages': True}) timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') session.cloudinventory.generate_report(org.name) # wait_for_tasks report generation task to finish. wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search( query={ 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' @@ -179,122 +203,19 @@ def test_obfuscate_host_names( ) report_path = session.cloudinventory.download_report(org.name) inventory_data = session.cloudinventory.read(org.name) - - # Assert that obfuscate_hostnames is enabled. + # Verify settings are enabled. assert inventory_data['obfuscate_hostnames'] is True - # Assert that generated archive is valid. - common_assertion(report_path, inventory_data, org, rhcloud_sat_host) + assert inventory_data['obfuscate_ips'] is True + assert inventory_data['exclude_packages'] is True + # Verify that generated archive is valid. + common_assertion(report_path, inventory_data, org, module_target_sat) # Get report data for assertion json_data = get_report_data(report_path) + # Verify that hostnames are obfuscated from the report. hostnames = [host['fqdn'] for host in json_data['hosts']] assert virtual_host.hostname not in hostnames assert baremetal_host.hostname not in hostnames - # Assert that host ip_addresses are present in the report. - ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']] - assert virtual_host.ip_addr in ipv4_addresses - assert baremetal_host.ip_addr in ipv4_addresses - # Disable obfuscate_hostnames setting on inventory page. - session.cloudinventory.update({'obfuscate_hostnames': False}) - - # Enable obfuscate_hostnames setting. - rhcloud_sat_host.update_setting('obfuscate_inventory_hostnames', True) - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') - session.cloudinventory.generate_report(org.name) - # wait_for_tasks report generation task to finish. - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search( - query={ - 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' - f'and started_at >= "{timestamp}"' - } - )[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - report_path = session.cloudinventory.download_report(org.name) - inventory_data = session.cloudinventory.read(org.name) - - assert inventory_data['obfuscate_hostnames'] is True - json_data = get_report_data(report_path) - hostnames = [host['fqdn'] for host in json_data['hosts']] - assert virtual_host.hostname not in hostnames - assert baremetal_host.hostname not in hostnames - ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']] - assert virtual_host.ip_addr in ipv4_addresses - assert baremetal_host.ip_addr in ipv4_addresses - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier3 -def test_obfuscate_host_ipv4_addresses( - rhcloud_sat_host, inventory_settings, organization_ak_setup, rhcloud_registered_hosts -): - """Test whether `Obfuscate host ipv4 addresses` setting works as expected. - - :id: c0fc4ee9-a6a1-42c0-83f0-0f131ca9ab41 - - :customerscenario: true - - :Steps: - - 1. Prepare machine and upload its data to Insights. - 2. Go to Configure > Inventory upload > enable “Obfuscate host ipv4 addresses” setting. - 3. Generate report after enabling the setting. - 4. Check if hosts ipv4 addresses are obfuscated in generated reports. - 5. Disable previous setting. - 6. Go to Administer > Settings > RH Cloud and enable "Obfuscate IPs" setting. - 7. Generate report after enabling the setting. - 8. Check if hosts ipv4 addresses are obfuscated in generated reports. - - :expectedresults: - 1. Obfuscated host ipv4 addresses in generated reports. - - :BZ: 1852594, 1889690 - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - virtual_host, baremetal_host = rhcloud_registered_hosts - with Session(hostname=rhcloud_sat_host.hostname) as session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=DEFAULT_LOC) - # Enable obfuscate_ips setting on inventory page. - session.cloudinventory.update({'obfuscate_ips': True}) - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') - session.cloudinventory.generate_report(org.name) - # wait_for_tasks report generation task to finish. - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search( - query={ - 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' - f'and started_at >= "{timestamp}"' - } - )[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - report_path = session.cloudinventory.download_report(org.name) - inventory_data = session.cloudinventory.read(org.name) - # Assert that obfuscate_ips is enabled. - assert inventory_data['obfuscate_ips'] is True - # Assert that generated archive is valid. - common_assertion(report_path, inventory_data, org, rhcloud_sat_host) - # Get report data for assertion - json_data = get_report_data(report_path) - hostnames = [host['fqdn'] for host in json_data['hosts']] - assert virtual_host.hostname in hostnames - assert baremetal_host.hostname in hostnames - # Assert that ip_addresses are obfuscated from report. + # Verify that ip_addresses are obfuscated from the report. ip_addresses = [ host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0] for host in json_data['hosts'] @@ -304,16 +225,23 @@ def test_obfuscate_host_ipv4_addresses( assert baremetal_host.ip_addr not in ip_addresses assert virtual_host.ip_addr not in ipv4_addresses assert baremetal_host.ip_addr not in ipv4_addresses - # Disable obfuscate_ips setting on inventory page. + # Verify that packages are excluded from report + all_host_profiles = [host['system_profile'] for host in json_data['hosts']] + for host_profiles in all_host_profiles: + assert 'installed_packages' not in host_profiles + # Disable settings on inventory page. + session.cloudinventory.update({'obfuscate_hostnames': False}) session.cloudinventory.update({'obfuscate_ips': False}) - - # Enable obfuscate_inventory_ips setting. - rhcloud_sat_host.update_setting('obfuscate_inventory_ips', True) + session.cloudinventory.update({'exclude_packages': False}) + # Enable settings, the one on the main settings page. + module_target_sat.update_setting('obfuscate_inventory_hostnames', True) + module_target_sat.update_setting('obfuscate_inventory_ips', True) + module_target_sat.update_setting('exclude_installed_packages', True) timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') session.cloudinventory.generate_report(org.name) # wait_for_tasks report generation task to finish. wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() + lambda: module_target_sat.api.ForemanTask() .search( query={ 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' @@ -329,13 +257,17 @@ def test_obfuscate_host_ipv4_addresses( ) report_path = session.cloudinventory.download_report(org.name) inventory_data = session.cloudinventory.read(org.name) - + # Verify settings are enabled. + assert inventory_data['obfuscate_hostnames'] is True assert inventory_data['obfuscate_ips'] is True + assert inventory_data['exclude_packages'] is True # Get report data for assertion json_data = get_report_data(report_path) + # Verify that hostnames are obfuscated from the report. hostnames = [host['fqdn'] for host in json_data['hosts']] - assert virtual_host.hostname in hostnames - assert baremetal_host.hostname in hostnames + assert virtual_host.hostname not in hostnames + assert baremetal_host.hostname not in hostnames + # Verify that ip_addresses are obfuscated from the report. ip_addresses = [ host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0] for host in json_data['hosts'] @@ -345,106 +277,7 @@ def test_obfuscate_host_ipv4_addresses( assert baremetal_host.ip_addr not in ip_addresses assert virtual_host.ip_addr not in ipv4_addresses assert baremetal_host.ip_addr not in ipv4_addresses - - -@pytest.mark.run_in_one_thread -@pytest.mark.tier3 -def test_exclude_packages_setting( - rhcloud_sat_host, inventory_settings, organization_ak_setup, rhcloud_registered_hosts -): - """Test whether `Exclude Packages` setting works as expected. - - :id: 646093fa-fdd6-4f70-82aa-725e31fa3f12 - - :customerscenario: true - - :Steps: - - 1. Prepare machine and upload its data to Insights - 2. Go to Configure > Inventory upload > enable “Exclude Packages” setting. - 3. Generate report after enabling the setting. - 4. Check if packages are excluded from generated reports. - 5. Disable previous setting. - 6. Go to Administer > Settings > RH Cloud and enable - "Don't upload installed packages" setting. - 7. Generate report after enabling the setting. - 8. Check if packages are excluded from generated reports. - - :expectedresults: - 1. Packages are excluded from reports generated. - - :BZ: 1852594 - - :CaseAutomation: Automated - """ - org, ak = organization_ak_setup - virtual_host, baremetal_host = rhcloud_registered_hosts - with Session(hostname=rhcloud_sat_host.hostname) as session: - session.organization.select(org_name=org.name) - session.location.select(loc_name=DEFAULT_LOC) - # Enable exclude_packages setting on inventory page. - session.cloudinventory.update({'exclude_packages': True}) - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') - session.cloudinventory.generate_report(org.name) - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search( - query={ - 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' - f'and started_at >= "{timestamp}"' - } - )[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - report_path = session.cloudinventory.download_report(org.name) - inventory_data = session.cloudinventory.read(org.name) - assert inventory_data['exclude_packages'] is True - # Disable exclude_packages setting on inventory page. - session.cloudinventory.update({'exclude_packages': False}) - # Assert that generated archive is valid. - common_assertion(report_path, inventory_data, org, rhcloud_sat_host) - # Get report data for assertion - json_data = get_report_data(report_path) - # Assert that right hosts are present in report. - hostnames = [host['fqdn'] for host in json_data['hosts']] - assert virtual_host.hostname in hostnames - assert baremetal_host.hostname in hostnames - # Assert that packages are excluded from report - all_host_profiles = [host['system_profile'] for host in json_data['hosts']] - for host_profiles in all_host_profiles: - assert 'installed_packages' not in host_profiles - - # Enable exclude_installed_packages setting. - rhcloud_sat_host.update_setting('exclude_installed_packages', True) - timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M') - session.cloudinventory.generate_report(org.name) - wait_for( - lambda: rhcloud_sat_host.api.ForemanTask() - .search( - query={ - 'search': f'label = ForemanInventoryUpload::Async::GenerateReportJob ' - f'and started_at >= "{timestamp}"' - } - )[0] - .result - == 'success', - timeout=400, - delay=15, - silent_failure=True, - handle_exception=True, - ) - report_path = session.cloudinventory.download_report(org.name) - inventory_data = session.cloudinventory.read(org.name) - assert inventory_data['exclude_packages'] is True - json_data = get_report_data(report_path) - hostnames = [host['fqdn'] for host in json_data['hosts']] - assert virtual_host.hostname in hostnames - assert baremetal_host.hostname in hostnames + # Verify that packages are excluded from report all_host_profiles = [host['system_profile'] for host in json_data['hosts']] for host_profiles in all_host_profiles: assert 'installed_packages' not in host_profiles @@ -456,7 +289,7 @@ def test_failed_inventory_upload(): :id: 230d3fc3-2810-4385-b07b-30f9bf632488 - :Steps: + :steps: 1. Register a satellite content host with insights. 2. Change 'DEST' from /var/lib/foreman/red_hat_inventory/uploads/uploader.sh to an invalid url. @@ -480,7 +313,7 @@ def test_rhcloud_inventory_without_manifest(session, module_org, target_sat): :id: 1d90bb24-2380-4653-8ed6-a084fce66d1e - :Steps: + :steps: 1. Don't import manifest to satellite. 3. Go to Configure > Inventory upload > Click on restart button. @@ -518,54 +351,3 @@ def test_rhcloud_inventory_without_manifest(session, module_org, target_sat): f'Skipping organization {module_org.name}, no candlepin certificate defined.' in inventory_data['uploading']['terminal'] ) - - -@pytest.mark.stubbed -def test_automatic_inventory_upload_enabled_setting(): - """Test "Automatic inventory upload" setting. - - :id: e84790c6-1700-46c4-9bf8-d8f1e63a7f1f - - :Steps: - 1. Register satellite content host with insights. - 2. Sync inventory status. - 3. Wait for "Inventory scheduled sync" task to execute. - (Change wait time to 1 minute for testing.) - 4. Check whether the satellite shows successful inventory upload for the host. - 5. Disable "Automatic inventory upload" setting. - 6. Unregister host from insights OR Delete host from cloud. - 7. Wait for "Inventory scheduled sync" task to execute. - - :expectedresults: - 1. When "Automatic inventory upload" setting is disabled then - "Inventory scheduled sync" task doesn't sync the inventory status. - - :CaseImportance: Medium - - :BZ: 1965239 - - :CaseAutomation: ManualOnly - """ - - -@pytest.mark.stubbed -def test_automatic_inventory_upload_disabled_setting(): - """Test "Automatic inventory upload" setting. - - :id: 2c830833-3f92-497c-bbb9-f485a1d8eb47 - - :Steps: - 1. Register few hosts with satellite. - 2. Enable "Automatic inventory upload" setting. - 3. Wait for "Inventory scheduled sync" recurring logic to run. - - :expectedresults: - 1. Satellite has "Inventory scheduled sync" recurring logic, which syncs - inventory status automatically if "Automatic inventory upload" setting is enabled. - - :CaseImportance: Medium - - :BZ: 1962695 - - :CaseAutomation: ManualOnly - """ diff --git a/tests/foreman/ui/test_role.py b/tests/foreman/ui/test_role.py index c3418a43588..652bdcbd4f6 100644 --- a/tests/foreman/ui/test_role.py +++ b/tests/foreman/ui/test_role.py @@ -4,42 +4,32 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random -import pytest -from airgun.session import Session -from nailgun import entities from navmazing import NavigationTriesExceeded +import pytest -from robottelo.constants import PERMISSIONS_UI -from robottelo.constants import ROLES +from robottelo.constants import PERMISSIONS_UI, ROLES from robottelo.utils.datafactory import gen_string @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_end_to_end(session, module_org, module_location): +def test_positive_end_to_end(session, module_org, module_location, module_target_sat): """Perform end to end testing for role component :id: 3284016a-e2df-4a0e-aa24-c95ab132eec1 :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :customerscenario: true :BZ: 1353788 @@ -53,8 +43,8 @@ def test_positive_end_to_end(session, module_org, module_location): cloned_role_name = gen_string('alpha') new_role_name = gen_string('alpha') new_role_description = gen_string('alpha') - new_org = entities.Organization().create() - new_loc = entities.Location(organization=[new_org]).create() + new_org = module_target_sat.api.Organization().create() + new_loc = module_target_sat.api.Location(organization=[new_org]).create() with session: session.role.create( { @@ -164,7 +154,9 @@ def test_positive_delete_cloned_builtin(session): @pytest.mark.tier2 -def test_positive_create_filter_without_override(session, module_org, module_location, test_name): +def test_positive_create_filter_without_override( + session, module_org, module_location, test_name, module_target_sat +): """Create filter in role w/o overriding it :id: a7f76f6e-6c13-4b34-b38c-19501b65786f @@ -186,7 +178,7 @@ def test_positive_create_filter_without_override(session, module_org, module_loc role_name = gen_string('alpha') username = gen_string('alpha') password = gen_string('alpha') - subnet = entities.Subnet() + subnet = module_target_sat.api.Subnet() subnet.create_missing() subnet_name = subnet.name with session: @@ -230,7 +222,7 @@ def test_positive_create_filter_without_override(session, module_org, module_loc 'locations.resources.assigned': [module_location.name], } ) - with Session(test_name, user=username, password=password) as session: + with module_target_sat.ui_session(test_name, user=username, password=password) as session: session.subnet.create( { 'subnet.name': subnet_name, @@ -247,7 +239,9 @@ def test_positive_create_filter_without_override(session, module_org, module_loc @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_create_non_overridable_filter(session, module_org, module_location, test_name): +def test_positive_create_non_overridable_filter( + session, module_org, module_location, test_name, module_target_sat +): """Create non overridden filter in role :id: 5ee281cf-28fa-439d-888d-b1f9aacc6d57 @@ -270,9 +264,9 @@ def test_positive_create_non_overridable_filter(session, module_org, module_loca username = gen_string('alpha') password = gen_string('alpha') new_name = gen_string('alpha') - user_org = entities.Organization().create() - user_loc = entities.Location().create() - arch = entities.Architecture().create() + user_org = module_target_sat.api.Organization().create() + user_loc = module_target_sat.api.Location().create() + arch = module_target_sat.api.Architecture().create() with session: session.role.create( { @@ -301,7 +295,7 @@ def test_positive_create_non_overridable_filter(session, module_org, module_loca 'locations.resources.assigned': [user_loc.name], } ) - with Session(test_name, user=username, password=password) as session: + with module_target_sat.ui_session(test_name, user=username, password=password) as session: session.architecture.update(arch.name, {'name': new_name}) assert session.architecture.search(new_name)[0]['Name'] == new_name with pytest.raises(NavigationTriesExceeded): @@ -310,7 +304,9 @@ def test_positive_create_non_overridable_filter(session, module_org, module_loca @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_create_overridable_filter(session, module_org, module_location, test_name): +def test_positive_create_overridable_filter( + session, module_org, module_location, test_name, module_target_sat +): """Create overridden filter in role :id: 325e7e3e-60fc-4182-9585-0449d9660e8d @@ -335,9 +331,9 @@ def test_positive_create_overridable_filter(session, module_org, module_location role_name = gen_string('alpha') username = gen_string('alpha') password = gen_string('alpha') - role_org = entities.Organization().create() - role_loc = entities.Location().create() - subnet = entities.Subnet() + role_org = module_target_sat.api.Organization().create() + role_loc = module_target_sat.api.Location().create() + subnet = module_target_sat.api.Subnet() subnet.create_missing() subnet_name = subnet.name new_subnet_name = gen_string('alpha') @@ -386,7 +382,7 @@ def test_positive_create_overridable_filter(session, module_org, module_location 'locations.resources.assigned': [role_loc.name, module_location.name], } ) - with Session(test_name, user=username, password=password) as session: + with module_target_sat.ui_session(test_name, user=username, password=password) as session: session.organization.select(org_name=module_org.name) session.location.select(loc_name=module_location.name) session.subnet.create( @@ -493,7 +489,7 @@ def test_positive_create_with_sc_parameter_permission(session_puppet_enabled_sat @pytest.mark.tier2 -def test_positive_create_filter_admin_user_with_locs(test_name): +def test_positive_create_filter_admin_user_with_locs(test_name, module_target_sat): """Attempt to create a role filter by admin user, who has 6+ locations assigned. :id: 688ecb7d-1d49-494c-97cc-0d5e715f3bb1 @@ -509,10 +505,10 @@ def test_positive_create_filter_admin_user_with_locs(test_name): role_name = gen_string('alpha') resource_type = 'Architecture' permissions = ['view_architectures', 'edit_architectures'] - org = entities.Organization().create() - locations = [entities.Location(organization=[org]).create() for _ in range(6)] + org = module_target_sat.api.Organization().create() + locations = [module_target_sat.api.Location(organization=[org]).create() for _ in range(6)] password = gen_string('alphanumeric') - user = entities.User( + user = module_target_sat.api.User( admin=True, organization=[org], location=locations, @@ -520,7 +516,7 @@ def test_positive_create_filter_admin_user_with_locs(test_name): default_location=locations[0], password=password, ).create() - with Session(test_name, user=user.login, password=password) as session: + with module_target_sat.ui_session(test_name, user=user.login, password=password) as session: session.role.create({'name': role_name}) assert session.role.search(role_name)[0]['Name'] == role_name session.filter.create( @@ -531,7 +527,7 @@ def test_positive_create_filter_admin_user_with_locs(test_name): @pytest.mark.tier2 -def test_positive_create_filter_admin_user_with_orgs(test_name): +def test_positive_create_filter_admin_user_with_orgs(test_name, module_target_sat): """Attempt to create a role filter by admin user, who has 10 organizations assigned. :id: 04208e17-34b5-46b1-84dd-b8a973521d30 @@ -548,9 +544,9 @@ def test_positive_create_filter_admin_user_with_orgs(test_name): resource_type = 'Architecture' permissions = ['view_architectures', 'edit_architectures'] password = gen_string('alphanumeric') - organizations = [entities.Organization().create() for _ in range(10)] - loc = entities.Location(organization=[organizations[0]]).create() - user = entities.User( + organizations = [module_target_sat.api.Organization().create() for _ in range(10)] + loc = module_target_sat.api.Location(organization=[organizations[0]]).create() + user = module_target_sat.api.User( admin=True, organization=organizations, location=[loc], @@ -558,7 +554,7 @@ def test_positive_create_filter_admin_user_with_orgs(test_name): default_location=loc, password=password, ).create() - with Session(test_name, user=user.login, password=password) as session: + with module_target_sat.ui_session(test_name, user=user.login, password=password) as session: session.role.create({'name': role_name}) assert session.role.search(role_name)[0]['Name'] == role_name session.filter.create( diff --git a/tests/foreman/ui/test_settings.py b/tests/foreman/ui/test_settings.py index 3101226057b..72bcf9223f0 100644 --- a/tests/foreman/ui/test_settings.py +++ b/tests/foreman/ui/test_settings.py @@ -4,29 +4,20 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: Settings :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import math -import pytest -from airgun.session import Session from fauxfactory import gen_url -from nailgun import entities +import pytest -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.utils.datafactory import filtered_datapoint -from robottelo.utils.datafactory import gen_string +from robottelo.utils.datafactory import filtered_datapoint, gen_string @filtered_datapoint @@ -35,14 +26,14 @@ def invalid_settings_values(): return [' ', '-1', 'text', '0'] -def add_content_views_to_composite(composite_cv, org, repo): +def add_content_views_to_composite(composite_cv, org, repo, module_target_sat): """Add necessary number of content views to the composite one :param composite_cv: Composite content view object :param org: Organisation of satellite :param repo: repository need to added in content view """ - content_view = entities.ContentView(organization=org).create() + content_view = module_target_sat.api.ContentView(organization=org).create() content_view.repository = [repo] content_view.update(['repository']) content_view.publish() @@ -54,7 +45,9 @@ def add_content_views_to_composite(composite_cv, org, repo): @pytest.mark.run_in_one_thread @pytest.mark.tier3 @pytest.mark.parametrize('setting_update', ['restrict_composite_view'], indirect=True) -def test_positive_update_restrict_composite_view(session, setting_update, repo_setup): +def test_positive_update_restrict_composite_view( + session, setting_update, repo_setup, module_target_sat +): """Update settings parameter restrict_composite_view to Yes/True and ensure a composite content view may not be published or promoted, unless the component content view versions that it includes exist in the target environment. @@ -66,13 +59,13 @@ def test_positive_update_restrict_composite_view(session, setting_update, repo_s :expectedresults: Parameter is updated successfully :CaseImportance: Critical - - :CaseLevel: Acceptance """ property_name = setting_update.name - composite_cv = entities.ContentView(composite=True, organization=repo_setup['org']).create() + composite_cv = module_target_sat.api.ContentView( + composite=True, organization=repo_setup['org'] + ).create() content_view = add_content_views_to_composite( - composite_cv, repo_setup['org'], repo_setup['repo'] + composite_cv, repo_setup['org'], repo_setup['repo'], module_target_sat ) composite_cv.publish() with session: @@ -84,10 +77,10 @@ def test_positive_update_restrict_composite_view(session, setting_update, repo_s session.contentview.promote( composite_cv.name, 'Version 1.0', repo_setup['lce'].name ) - assert ( - 'Administrator -> Settings -> Content page using the ' - 'restrict_composite_view flag.' in str(context.value) - ) + assert ( + 'Administrator -> Settings -> Content page using the ' + 'restrict_composite_view flag.' in str(context.value) + ) else: result = session.contentview.promote( composite_cv.name, 'Version 1.0', repo_setup['lce'].name @@ -112,7 +105,6 @@ def test_positive_httpd_proxy_url_update(session, setting_update): :BZ: 1677282 :CaseImportance: Medium - """ property_name = setting_update.name with session: @@ -123,7 +115,7 @@ def test_positive_httpd_proxy_url_update(session, setting_update): @pytest.mark.tier2 -@pytest.mark.parametrize('setting_update', ['foreman_url', 'entries_per_page'], indirect=True) +@pytest.mark.parametrize('setting_update', ['foreman_url'], indirect=True) def test_negative_validate_foreman_url_error_message(session, setting_update): """Updates some settings with invalid values (an exceptional tier2 test) @@ -138,9 +130,10 @@ def test_negative_validate_foreman_url_error_message(session, setting_update): property_name = setting_update.name with session: invalid_value = [invalid_value for invalid_value in invalid_settings_values()][0] + err_msg = 'URL must be valid and schema must be one of http and https, Invalid HTTP(S) URL' with pytest.raises(AssertionError) as context: session.settings.update(f'name = {property_name}', invalid_value) - assert 'Value is invalid: must be integer' in str(context.value) + assert err_msg in str(context.value) @pytest.mark.tier2 @@ -223,8 +216,6 @@ def test_positive_update_login_page_footer_text(session, setting_update): :customerscenario: true :BZ: 2157869 - - :CaseLevel: Acceptance """ property_name = setting_update.name default_value = setting_update.default @@ -253,7 +244,7 @@ def test_positive_update_login_page_footer_text(session, setting_update): @pytest.mark.tier3 -def test_negative_settings_access_to_non_admin(): +def test_negative_settings_access_to_non_admin(module_target_sat): """Check non admin users can't access Administer -> Settings tab :id: 34bb9376-c5fe-431a-ac0d-ef030c0ab50e @@ -268,14 +259,12 @@ def test_negative_settings_access_to_non_admin(): :expectedresults: Administer -> Settings tab should not be available to non admin users :CaseImportance: Medium - - :CaseLevel: Acceptance """ login = gen_string('alpha') password = gen_string('alpha') - entities.User(admin=False, login=login, password=password).create() + module_target_sat.api.User(admin=False, login=login, password=password).create() try: - with Session(user=login, password=password) as session: + with module_target_sat.ui_session(user=login, password=password) as session: result = session.settings.permission_denied() assert ( result == 'Permission denied You are not authorized to perform this action. ' @@ -283,7 +272,7 @@ def test_negative_settings_access_to_non_admin(): 'from a Satellite administrator: view_settings Back' ) finally: - User.delete({'login': login}) + module_target_sat.cli.User.delete({'login': login}) @pytest.mark.stubbed @@ -315,8 +304,6 @@ def test_positive_update_email_delivery_method_smtp(): :CaseImportance: Critical - :CaseLevel: Acceptance - :CaseAutomation: NotAutomated """ @@ -350,8 +337,6 @@ def test_negative_update_email_delivery_method_smtp(): :CaseImportance: Critical - :CaseLevel: Acceptance - :CaseAutomation: NotAutomated """ @@ -381,8 +366,6 @@ def test_positive_update_email_delivery_method_sendmail(session, target_sat): :BZ: 2080324 :CaseImportance: Critical - - :CaseLevel: Acceptance """ property_name = "Email" mail_config_default_param = { @@ -394,7 +377,7 @@ def test_positive_update_email_delivery_method_sendmail(session, target_sat): "send_welcome_email": "", } mail_config_default_param = { - content: entities.Setting().search(query={'search': f'name={content}'})[0] + content: target_sat.api.Setting().search(query={'search': f'name={content}'})[0] for content in mail_config_default_param } mail_config_new_params = { @@ -445,8 +428,6 @@ def test_negative_update_email_delivery_method_sendmail(): :CaseImportance: Critical - :CaseLevel: Acceptance - :CaseAutomation: NotAutomated """ @@ -477,8 +458,6 @@ def test_positive_email_yaml_config_precedence(): :CaseImportance: Critical - :CaseLevel: Acceptance - :CaseAutomation: NotAutomated """ @@ -490,7 +469,7 @@ def test_negative_update_hostname_with_empty_fact(session, setting_update): :id: e0eaab69-4926-4c1e-b111-30c51ede273e - :Steps: + :steps: 1. Goto settings ->Discovered tab -> Hostname_facts 2. Set empty hostname_facts (without any value) @@ -503,14 +482,13 @@ def test_negative_update_hostname_with_empty_fact(session, setting_update): :expectedresults: Error should be raised on setting empty value for hostname_facts setting - """ new_hostname = "" property_name = setting_update.name with session: with pytest.raises(AssertionError) as context: session.settings.update(property_name, new_hostname) - assert 'can\'t be blank' in str(context.value) + assert 'can\'t be blank' in str(context.value) @pytest.mark.run_in_one_thread @@ -521,7 +499,7 @@ def test_positive_entries_per_page(session, setting_update): :id: 009026b6-7550-40aa-9f78-5eb7f7e3800f - :Steps: + :steps: 1. Navigate to Administer > Settings > General tab 2. Update the entries per page value 3. GoTo Monitor > Tasks Table > Pagination @@ -538,14 +516,47 @@ def test_positive_entries_per_page(session, setting_update): :BZ: 1746221 :CaseImportance: Medium - - :CaseLevel: Acceptance """ property_name = setting_update.name property_value = 19 with session: session.settings.update(f"name={property_name}", property_value) page_content = session.task.read_all(widget_names="Pagination") - assert str(property_value) in page_content["Pagination"]["per_page"] - total_pages = math.ceil(int(page_content["Pagination"]["total_items"]) / property_value) - assert str(total_pages) == page_content["Pagination"]["pages"] + assert str(property_value) in page_content["Pagination"]["_items"] + total_pages_str = page_content["Pagination"]['_items'].split()[-2] + total_pages = math.ceil(int(total_pages_str.split()[-1]) / property_value) + assert str(total_pages) == page_content["Pagination"]['_total_pages'].split()[-1] + + +@pytest.mark.tier2 +def test_positive_show_unsupported_templates(request, target_sat, module_org, module_location): + """Verify setting show_unsupported_templates with new custom template + + :id: e0eaab69-4926-4c1e-b111-30c51ede273z + + :Steps: + 1. Goto Settings -> Provisioning tab -> Show unsupported provisioning templates + + :CaseImportance: Medium + + :expectedresults: Custom template aren't searchable when set to No, + and are searchable when set to Yes(default) + """ + pt = target_sat.api.ProvisioningTemplate( + name=gen_string('alpha'), + organization=[module_org], + location=[module_location], + template=gen_string('alpha'), + snippet=False, + ).create() + request.addfinalizer(pt.delete) + with target_sat.ui_session() as session: + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) + default_value = target_sat.update_setting('show_unsupported_templates', 'No') + assert not session.provisioningtemplate.search(f'name={pt.name}') + + # Verify with show_unsupported_templates=Yes + target_sat.update_setting('show_unsupported_templates', default_value) + template = session.provisioningtemplate.search(f'name={pt.name}') + assert template[0]['Name'] == pt.name diff --git a/tests/foreman/ui/test_smartclassparameter.py b/tests/foreman/ui/test_smartclassparameter.py index 9a27ce4f0ef..adc5169f2ae 100644 --- a/tests/foreman/ui/test_smartclassparameter.py +++ b/tests/foreman/ui/test_smartclassparameter.py @@ -4,20 +4,14 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -from random import choice -from random import uniform +from random import choice, uniform import pytest import yaml @@ -81,8 +75,6 @@ def test_positive_end_to_end(session_puppet_enabled_sat, module_puppet_classes, :expectedresults: All expected basic actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ sc_param = sc_params_list.pop() @@ -202,8 +194,6 @@ def test_positive_create_matcher_attribute_priority( :expectedresults: The YAML output has the value only for fqdn matcher. - :CaseLevel: Integration - :BZ: 1241249 :CaseImportance: Critical @@ -381,8 +371,6 @@ def test_positive_update_matcher_from_attribute( 1. The host/hostgroup is saved with changes. 2. Matcher value in parameter is updated from fqdn/hostgroup. - :CaseLevel: Integration - :CaseImportance: Critical """ sc_param = sc_params_list.pop() @@ -449,8 +437,6 @@ def test_positive_impact_parameter_delete_attribute( 1. The matcher for deleted attribute removed from parameter. 2. On recreating attribute, the matcher should not reappear in parameter. - - :CaseLevel: Integration """ sc_param = sc_params_list.pop() matcher_value = gen_string('alpha') @@ -523,8 +509,6 @@ def test_positive_hidden_value_in_attribute( 4. And the value shown hidden. 5. Parameter is successfully unhidden. - :CaseLevel: Integration - :CaseImportance: Critical """ sc_param = sc_params_list.pop() diff --git a/tests/foreman/ui/test_subnet.py b/tests/foreman/ui/test_subnet.py index 814ea6bae10..45886c3ec86 100644 --- a/tests/foreman/ui/test_subnet.py +++ b/tests/foreman/ui/test_subnet.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Networking :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_ipaddr +import pytest from robottelo.utils.datafactory import gen_string @@ -40,8 +35,6 @@ def test_positive_end_to_end(session, module_target_sat, module_dom): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_subscription.py b/tests/foreman/ui/test_subscription.py index e548c1d2535..348398a1de2 100644 --- a/tests/foreman/ui/test_subscription.py +++ b/tests/foreman/ui/test_subscription.py @@ -4,34 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: SubscriptionManagement :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import time from tempfile import mkstemp +import time -import pytest -from airgun.session import Session from fauxfactory import gen_string -from nailgun import entities +import pytest -from robottelo.cli.factory import make_virt_who_config from robottelo.config import settings -from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME -from robottelo.constants import PRDS -from robottelo.constants import REPOS -from robottelo.constants import REPOSET -from robottelo.constants import VDC_SUBSCRIPTION_NAME -from robottelo.constants import VIRT_WHO_HYPERVISOR_TYPES +from robottelo.constants import ( + DEFAULT_SUBSCRIPTION_NAME, + PRDS, + REPOS, + REPOSET, + VDC_SUBSCRIPTION_NAME, + VIRT_WHO_HYPERVISOR_TYPES, +) from robottelo.utils.manifest import clone pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.skip_if_not_set('fake_manifest')] @@ -48,18 +42,18 @@ def golden_ticket_host_setup(function_entitlement_manifest_org, module_target_sa reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() - custom_product = entities.Product(organization=org).create() - custom_repo = entities.Repository( + custom_product = module_target_sat.api.Product(organization=org).create() + custom_repo = module_target_sat.api.Repository( name=gen_string('alphanumeric').upper(), product=custom_product ).create() custom_repo.sync() - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=org.default_content_view, max_hosts=100, organization=org, - environment=entities.LifecycleEnvironment(id=org.library.id), + environment=module_target_sat.api.LifecycleEnvironment(id=org.library.id), auto_attach=True, ).create() return org, ak @@ -94,20 +88,13 @@ def test_positive_end_to_end(session, target_sat): """ expected_message_lines = [ 'Are you sure you want to delete the manifest?', - 'Note: Deleting a subscription manifest is STRONGLY discouraged. ' - 'Deleting a manifest will:', - 'Delete all subscriptions that are attached to running hosts.', - 'Delete all subscriptions attached to activation keys.', - 'Disable Red Hat Insights.', - 'Require you to upload the subscription-manifest and re-attach ' - 'subscriptions to hosts and activation keys.', - 'This action should only be taken in extreme circumstances or for debugging purposes.', + 'Note: Deleting a subscription manifest is STRONGLY discouraged.', + 'This action should only be taken for debugging purposes.', ] - org = entities.Organization().create() + org = target_sat.api.Organization().create() _, temporary_local_manifest_path = mkstemp(prefix='manifest-', suffix='.zip') - with clone() as manifest: - with open(temporary_local_manifest_path, 'wb') as file_handler: - file_handler.write(manifest.content.read()) + with clone() as manifest, open(temporary_local_manifest_path, 'wb') as file_handler: + file_handler.write(manifest.content.read()) with session: session.organization.select(org.name) # Ignore "Danger alert: Katello::Errors::UpstreamConsumerNotFound'" as server will connect @@ -119,14 +106,11 @@ def test_positive_end_to_end(session, target_sat): ignore_error_messages=['Danger alert: Katello::Errors::UpstreamConsumerNotFound'], ) assert session.subscription.has_manifest - # dashboard check - subscription_values = session.dashboard.read('SubscriptionStatus')['subscriptions'] - assert subscription_values[0]['Subscription Status'] == 'Active Subscriptions' - assert int(subscription_values[0]['Count']) >= 1 - assert subscription_values[1]['Subscription Status'] == 'Subscriptions Expiring in 120 Days' - assert int(subscription_values[1]['Count']) == 0 - assert subscription_values[2]['Subscription Status'] == 'Recently Expired Subscriptions' - assert int(subscription_values[2]['Count']) == 0 + subscriptions = session.subscription.read_subscriptions() + assert len(subscriptions) >= 1 + assert any('Red Hat' in subscription['Name'] for subscription in subscriptions) + assert int(subscriptions[0]['Entitlements']) > 0 + assert int(subscriptions[0]['Consumed']) >= 0 # manifest delete testing delete_message = session.subscription.read_delete_manifest_message() assert ' '.join(expected_message_lines) == delete_message @@ -158,12 +142,10 @@ def test_positive_access_with_non_admin_user_without_manifest(test_name, target_ :BZ: 1417082 - :CaseLevel: Integration - :CaseImportance: Critical """ - org = entities.Organization().create() - role = entities.Role(organization=[org]).create() + org = target_sat.api.Organization().create() + role = target_sat.api.Role(organization=[org]).create() target_sat.api_factory.create_role_permissions( role, { @@ -177,14 +159,14 @@ def test_positive_access_with_non_admin_user_without_manifest(test_name, target_ }, ) user_password = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( admin=False, role=[role], password=user_password, organization=[org], default_organization=org, ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: assert not session.subscription.has_manifest @@ -205,25 +187,23 @@ def test_positive_access_with_non_admin_user_with_manifest( :customerscenario: true - :CaseLevel: Integration - :CaseImportance: Critical """ org = function_entitlement_manifest_org - role = entities.Role(organization=[org]).create() + role = target_sat.api.Role(organization=[org]).create() target_sat.api_factory.create_role_permissions( role, {'Katello::Subscription': ['view_subscriptions'], 'Organization': ['view_organizations']}, ) user_password = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( admin=False, role=[role], password=user_password, organization=[org], default_organization=org, ).create() - with Session(test_name, user=user.login, password=user_password) as session: + with target_sat.ui_session(test_name, user=user.login, password=user_password) as session: assert ( session.subscription.search(f'name = "{DEFAULT_SUBSCRIPTION_NAME}"')[0]['Name'] == DEFAULT_SUBSCRIPTION_NAME @@ -231,7 +211,9 @@ def test_positive_access_with_non_admin_user_with_manifest( @pytest.mark.tier2 -def test_positive_access_manifest_as_another_admin_user(test_name, target_sat): +def test_positive_access_manifest_as_another_admin_user( + test_name, target_sat, function_entitlement_manifest +): """Other admin users should be able to access and manage a manifest uploaded by a different admin. @@ -243,27 +225,25 @@ def test_positive_access_manifest_as_another_admin_user(test_name, target_sat): :customerscenario: true - :CaseLevel: Integration - :CaseImportance: High """ - org = entities.Organization().create() + org = target_sat.api.Organization().create() user1_password = gen_string('alphanumeric') - user1 = entities.User( + user1 = target_sat.api.User( admin=True, password=user1_password, organization=[org], default_organization=org ).create() user2_password = gen_string('alphanumeric') - user2 = entities.User( + user2 = target_sat.api.User( admin=True, password=user2_password, organization=[org], default_organization=org ).create() # use the first admin to upload a manifest - with Session(test_name, user=user1.login, password=user1_password) as session: - target_sat.upload_manifest(org.id) + with target_sat.ui_session(test_name, user=user1.login, password=user1_password) as session: + target_sat.upload_manifest(org.id, function_entitlement_manifest.content) assert session.subscription.has_manifest # store subscriptions that have "Red Hat" in the name for later rh_subs = session.subscription.search("Red Hat") # try to view and delete the manifest with another admin - with Session(test_name, user=user2.login, password=user2_password) as session: + with target_sat.ui_session(test_name, user=user2.login, password=user2_password) as session: assert session.subscription.has_manifest assert rh_subs == session.subscription.search("Red Hat") session.subscription.delete_manifest( @@ -273,7 +253,9 @@ def test_positive_access_manifest_as_another_admin_user(test_name, target_sat): @pytest.mark.tier3 -def test_positive_view_vdc_subscription_products(session, rhel7_contenthost, target_sat): +def test_positive_view_vdc_subscription_products( + session, rhel7_contenthost, target_sat, function_entitlement_manifest_org +): """Ensure that Virtual Datacenters subscription provided products is not empty and that a consumed product exist in content products. @@ -303,19 +285,15 @@ def test_positive_view_vdc_subscription_products(session, rhel7_contenthost, tar :BZ: 1366327 :parametrized: yes - - :CaseLevel: System """ - org = entities.Organization().create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = function_entitlement_manifest_org + lce = target_sat.api.LifecycleEnvironment(organization=org).create() repos_collection = target_sat.cli_factory.RepositoryCollection( distro='rhel7', repositories=[target_sat.cli_factory.RHELAnsibleEngineRepository(cdn=True)], ) product_name = repos_collection.rh_repos[0].data['product'] - repos_collection.setup_content( - org.id, lce.id, upload_manifest=True, rh_subscriptions=[DEFAULT_SUBSCRIPTION_NAME] - ) + repos_collection.setup_content(org.id, lce.id, rh_subscriptions=[DEFAULT_SUBSCRIPTION_NAME]) rhel7_contenthost.contenthost_setup( target_sat, org.label, @@ -337,7 +315,9 @@ def test_positive_view_vdc_subscription_products(session, rhel7_contenthost, tar @pytest.mark.skip_if_not_set('libvirt') @pytest.mark.tier3 -def test_positive_view_vdc_guest_subscription_products(session, rhel7_contenthost, target_sat): +def test_positive_view_vdc_guest_subscription_products( + session, rhel7_contenthost, target_sat, function_entitlement_manifest_org +): """Ensure that Virtual Data Centers guest subscription Provided Products and Content Products are not empty. @@ -364,16 +344,14 @@ def test_positive_view_vdc_guest_subscription_products(session, rhel7_contenthos :BZ: 1395788, 1506636, 1487317 :parametrized: yes - - :CaseLevel: System """ - org = entities.Organization().create() - lce = entities.LifecycleEnvironment(organization=org).create() + org = function_entitlement_manifest_org + lce = target_sat.api.LifecycleEnvironment(organization=org).create() provisioning_server = settings.libvirt.libvirt_hostname rh_product_repository = target_sat.cli_factory.RHELAnsibleEngineRepository(cdn=True) product_name = rh_product_repository.data['product'] # Create a new virt-who config - virt_who_config = make_virt_who_config( + virt_who_config = target_sat.cli_factory.virt_who_config( { 'organization-id': org.id, 'hypervisor-type': VIRT_WHO_HYPERVISOR_TYPES['libvirt'], @@ -390,6 +368,7 @@ def test_positive_view_vdc_guest_subscription_products(session, rhel7_contenthos hypervisor_hostname=provisioning_server, configure_ssh=True, subscription_name=VDC_SUBSCRIPTION_NAME, + upload_manifest=False, extra_repos=[rh_product_repository.data], ) virt_who_hypervisor_host = virt_who_data['virt_who_hypervisor_host'] @@ -400,7 +379,8 @@ def test_positive_view_vdc_guest_subscription_products(session, rhel7_contenthos f'subscription_name = "{VDC_SUBSCRIPTION_NAME}" ' f'and name = "{virt_who_hypervisor_host["name"]}"' ) - assert content_hosts and content_hosts[0]['Name'] == virt_who_hypervisor_host['name'] + assert content_hosts + assert content_hosts[0]['Name'] == virt_who_hypervisor_host['name'] # ensure that hypervisor guests subscription provided products list is not empty and # that the product is in provided products. provided_products = session.subscription.provided_products( @@ -416,7 +396,9 @@ def test_positive_view_vdc_guest_subscription_products(session, rhel7_contenthos @pytest.mark.tier3 -def test_select_customizable_columns_uncheck_and_checks_all_checkboxes(session): +def test_select_customizable_columns_uncheck_and_checks_all_checkboxes( + session, function_org, function_entitlement_manifest +): """Ensures that no column headers from checkboxes show up in the table after unticking everything from selectable customizable column @@ -450,28 +432,20 @@ def test_select_customizable_columns_uncheck_and_checks_all_checkboxes(session): 'Consumed': False, 'Entitlements': False, } - org = entities.Organization().create() - _, temporary_local_manifest_path = mkstemp(prefix='manifest-', suffix='.zip') - with clone() as manifest: - with open(temporary_local_manifest_path, 'wb') as file_handler: - file_handler.write(manifest.content.read()) - + org = function_org with session: session.organization.select(org.name) - # Ignore "Danger alert: Katello::Errors::UpstreamConsumerNotFound" as server will connect - # to upstream subscription service to verify - # the consumer uuid, that will be displayed in flash error messages - # Note: this happen only when using clone manifest. session.subscription.add_manifest( - temporary_local_manifest_path, + function_entitlement_manifest.path, ignore_error_messages=['Danger alert: Katello::Errors::UpstreamConsumerNotFound'], ) headers = session.subscription.filter_columns(checkbox_dict) - assert not headers + assert headers == ('Select all rows',) assert len(checkbox_dict) == 9 time.sleep(3) checkbox_dict.update((k, True) for k in checkbox_dict) col = session.subscription.filter_columns(checkbox_dict) + checkbox_dict.update({'Select all rows': ''}) assert set(col) == set(checkbox_dict) @@ -535,13 +509,15 @@ def test_positive_candlepin_events_processed_by_STOMP( :CaseImportance: High """ org = function_entitlement_manifest_org - repo = entities.Repository(product=entities.Product(organization=org).create()).create() + repo = target_sat.api.Repository( + product=target_sat.api.Product(organization=org).create() + ).create() repo.sync() - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=org.default_content_view, max_hosts=100, organization=org, - environment=entities.LifecycleEnvironment(id=org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=org.library.id), ).create() rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(org.name, ak.name) @@ -557,6 +533,6 @@ def test_positive_candlepin_events_processed_by_STOMP( rhel7_contenthost.hostname, widget_names='details' )['details']['subscription_status'] assert 'Fully entitled' in updated_sub_status - response = entities.Ping().search_json()['services']['candlepin_events'] + response = target_sat.api.Ping().search_json()['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message'] diff --git a/tests/foreman/ui/test_sync.py b/tests/foreman/ui/test_sync.py index 65622c73873..308ef7b485a 100644 --- a/tests/foreman/ui/test_sync.py +++ b/tests/foreman/ui/test_sync.py @@ -4,29 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Repositories :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string from nailgun import entities +import pytest from robottelo.config import settings -from robottelo.constants import CONTAINER_REGISTRY_HUB -from robottelo.constants import CONTAINER_UPSTREAM_NAME -from robottelo.constants import PRDS -from robottelo.constants import REPO_TYPE -from robottelo.constants import REPOS -from robottelo.constants import REPOSET +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + PRDS, + REPO_TYPE, + REPOS, + REPOSET, +) from robottelo.constants.repos import FEDORA_OSTREE_REPO @@ -40,43 +37,16 @@ def module_custom_product(module_org): return entities.Product(organization=module_org).create() -@pytest.fixture(scope='module') -def module_org_with_manifest(module_target_sat): - org = entities.Organization().create() - module_target_sat.upload_manifest(org.id) - return org - - -@pytest.mark.tier2 -@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_custom_repo(session, module_custom_product): - """Create Content Custom Sync with minimal input parameters - - :id: 00fb0b04-0293-42c2-92fa-930c75acee89 - - :expectedresults: Sync procedure is successful - - :CaseImportance: Critical - """ - repo = entities.Repository(url=settings.repos.yum_1.url, product=module_custom_product).create() - with session: - results = session.sync_status.synchronize([(module_custom_product.name, repo.name)]) - assert len(results) == 1 - assert results[0] == 'Syncing Complete.' - - @pytest.mark.run_in_one_thread @pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_sync_rh_repos(session, target_sat, module_org_with_manifest): +def test_positive_sync_rh_repos(session, target_sat, module_entitlement_manifest_org): """Create Content RedHat Sync with two repos. :id: e30f6509-0b65-4bcc-a522-b4f3089d3911 :expectedresults: Sync procedure for RedHat Repos is successful - - :CaseLevel: Integration """ repos = ( target_sat.cli_factory.SatelliteCapsuleRepository(cdn=True), @@ -85,10 +55,10 @@ def test_positive_sync_rh_repos(session, target_sat, module_org_with_manifest): distros = ['rhel6', 'rhel7'] repo_collections = [ target_sat.cli_factory.RepositoryCollection(distro=distro, repositories=[repo]) - for distro, repo in zip(distros, repos) + for distro, repo in zip(distros, repos, strict=True) ] for repo_collection in repo_collections: - repo_collection.setup(module_org_with_manifest.id, synchronize=False) + repo_collection.setup(module_entitlement_manifest_org.id, synchronize=False) repo_paths = [ ( repo.repo_data['product'], @@ -99,7 +69,7 @@ def test_positive_sync_rh_repos(session, target_sat, module_org_with_manifest): for repo in repos ] with session: - session.organization.select(org_name=module_org_with_manifest.name) + session.organization.select(org_name=module_entitlement_manifest_org.name) results = session.sync_status.synchronize(repo_paths) assert len(results) == len(repo_paths) assert all([result == 'Syncing Complete.' for result in results]) @@ -118,8 +88,6 @@ def test_positive_sync_custom_ostree_repo(session, module_custom_product): :customerscenario: true - :CaseLevel: Integration - :BZ: 1625783 """ repo = entities.Repository( @@ -139,12 +107,12 @@ def test_positive_sync_custom_ostree_repo(session, module_custom_product): @pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest, target_sat): +def test_positive_sync_rh_ostree_repo(session, target_sat, module_entitlement_manifest_org): """Sync CDN based ostree repository. :id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5 - :Steps: + :steps: 1. Import a valid manifest 2. Enable the OStree repo and sync it @@ -152,20 +120,18 @@ def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest, target_ :expectedresults: ostree repo should be synced successfully from CDN - :CaseLevel: Integration - :BZ: 1625783 """ target_sat.api_factory.enable_rhrepo_and_fetchid( basearch=None, - org_id=module_org_with_manifest.id, + org_id=module_entitlement_manifest_org.id, product=PRDS['rhah'], repo=REPOS['rhaht']['name'], reposet=REPOSET['rhaht'], releasever=None, ) with session: - session.organization.select(org_name=module_org_with_manifest.name) + session.organization.select(org_name=module_entitlement_manifest_org.name) results = session.sync_status.synchronize([(PRDS['rhah'], REPOS['rhaht']['name'])]) assert len(results) == 1 assert results[0] == 'Syncing Complete.' @@ -180,8 +146,6 @@ def test_positive_sync_docker_via_sync_status(session, module_org): :expectedresults: Sync procedure for specific docker repository is successful - - :CaseLevel: Integration """ product = entities.Product(organization=module_org).create() repo_name = gen_string('alphanumeric') diff --git a/tests/foreman/ui/test_syncplan.py b/tests/foreman/ui/test_syncplan.py index 0632af2a034..ae4737fc9e1 100644 --- a/tests/foreman/ui/test_syncplan.py +++ b/tests/foreman/ui/test_syncplan.py @@ -4,29 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SyncPlans :team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from datetime import datetime, timedelta import time -from datetime import datetime -from datetime import timedelta -import pytest from fauxfactory import gen_choice from nailgun import entities +import pytest from robottelo.constants import SYNC_INTERVAL -from robottelo.utils.datafactory import gen_string -from robottelo.utils.datafactory import valid_cron_expressions +from robottelo.utils.datafactory import gen_string, valid_cron_expressions def validate_repo_content(repo, content_types, after_sync=True): @@ -54,7 +47,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.tier2 -def test_positive_end_to_end(session, module_org): +def test_positive_end_to_end(session, module_org, target_sat): """Perform end to end scenario for sync plan component :id: 39c140a6-ca65-4b6a-a640-4a023a2f0f12 @@ -63,8 +56,6 @@ def test_positive_end_to_end(session, module_org): :customerscenario: true - :CaseLevel: Integration - :BZ: 1693795 """ plan_name = gen_string('alpha') @@ -99,7 +90,8 @@ def test_positive_end_to_end(session, module_org): assert syncplan_values['details']['description'] == new_description # Create and add two products to sync plan for _ in range(2): - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() + target_sat.api.Repository(product=product).create() session.syncplan.add_product(plan_name, product.name) # Remove a product and assert syncplan still searchable session.syncplan.remove_product(plan_name, product.name) @@ -116,8 +108,6 @@ def test_positive_end_to_end_custom_cron(session): :id: 48c88529-6318-47b0-97bc-eb46aae0294a :expectedresults: All CRUD actions for component finished successfully - - :CaseLevel: Integration """ plan_name = gen_string('alpha') description = gen_string('alpha') @@ -199,8 +189,6 @@ def test_positive_synchronize_custom_product_custom_cron_real_time(session, modu :id: c551ef9a-6e5a-435a-b24d-e86de203a2bb :expectedresults: Product is synchronized successfully. - - :CaseLevel: System """ plan_name = gen_string('alpha') product = entities.Product(organization=module_org).create() @@ -267,8 +255,6 @@ def test_positive_synchronize_custom_product_custom_cron_past_sync_date( :id: 4d9ed0bf-a63c-44de-846d-7cf302273bcc :expectedresults: Product is synchronized successfully. - - :CaseLevel: System """ plan_name = gen_string('alpha') product = entities.Product(organization=module_org).create() diff --git a/tests/foreman/ui/test_templatesync.py b/tests/foreman/ui/test_templatesync.py index 86b4a2b8c9a..7f0cc0e70ac 100644 --- a/tests/foreman/ui/test_templatesync.py +++ b/tests/foreman/ui/test_templatesync.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Integration - :CaseComponent: TemplatesPlugin :Team: Endeavour -:TestType: Functional - -:Upstream: No """ -import pytest -import requests from fauxfactory import gen_string from nailgun import entities +import pytest +import requests from robottelo.config import settings -from robottelo.constants import FOREMAN_TEMPLATE_IMPORT_URL -from robottelo.constants import FOREMAN_TEMPLATE_ROOT_DIR +from robottelo.constants import FOREMAN_TEMPLATE_IMPORT_URL, FOREMAN_TEMPLATE_ROOT_DIR @pytest.fixture(scope='module') @@ -46,7 +40,7 @@ def test_positive_import_templates(session, templates_org, templates_loc): :bz: 1778181, 1778139 - :Steps: + :steps: 1. Navigate to Host -> Sync Templates, and choose Import. 2. Select fields: @@ -103,7 +97,7 @@ def test_positive_export_templates(session, create_import_export_local_dir, targ :bz: 1778139 - :Steps: + :steps: 1. Navigate to Host -> Sync Templates, and choose Export. 2. Select fields: @@ -164,13 +158,13 @@ def test_positive_export_filtered_templates_to_git(session, git_repository, git_ :id: e4de338a-9ab9-492e-ac42-6cc2ebcd1792 - :Steps: + :steps: 1. Export only the templates matching with regex e.g: `^atomic.*` to git repo. :expectedresults: 1. Assert matching templates are exported to git repo. - :BZ: 1785613 + :BZ: 1785613, 2013759 :parametrized: yes @@ -189,7 +183,10 @@ def test_positive_export_filtered_templates_to_git(session, git_repository, git_ 'template.dirname': dirname, } ) - assert export_title == f'Export to {url} and branch {git_branch} as user {session._user}' + assert ( + export_title == f'Export to {url.replace(git.password, "*****")} ' + f'and branch {git_branch} as user {session._user}' + ) path = f"{dirname}/provisioning_templates/provision" auth = (git.username, git.password) api_url = f"http://{git.hostname}:{git.http_port}/api/v1/repos/{git.username}" diff --git a/tests/foreman/ui/test_user.py b/tests/foreman/ui/test_user.py index d0847a9d0d6..a69b58250bf 100644 --- a/tests/foreman/ui/test_user.py +++ b/tests/foreman/ui/test_user.py @@ -4,28 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import random +from fauxfactory import gen_email, gen_string import pytest -from airgun.session import Session -from fauxfactory import gen_email -from fauxfactory import gen_string -from robottelo.constants import DEFAULT_ORG -from robottelo.constants import PERMISSIONS -from robottelo.constants import ROLES +from robottelo.constants import DEFAULT_ORG, PERMISSIONS, ROLES @pytest.mark.e2e @@ -39,8 +30,6 @@ def test_positive_end_to_end(session, target_sat, test_name, module_org, module_ :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') @@ -93,7 +82,7 @@ def test_positive_end_to_end(session, target_sat, test_name, module_org, module_ assert session.user.search(new_name)[0]['Username'] == new_name assert not session.user.search(name) # Login into application using new user - with Session(test_name, new_name, password) as newsession: + with target_sat.ui_session(test_name, new_name, password) as newsession: newsession.organization.select(module_org.name) newsession.location.select(module_location.name) newsession.activationkey.create({'name': ak_name}) @@ -101,7 +90,7 @@ def test_positive_end_to_end(session, target_sat, test_name, module_org, module_ current_user = newsession.activationkey.read(ak_name, 'current_user')['current_user'] assert current_user == f'{firstname} {lastname}' # Delete user - with Session('deletehostsession') as deletehostsession: + with target_sat.ui_session('deletehostsession') as deletehostsession: deletehostsession.user.delete(new_name) assert not deletehostsession.user.search(new_name) @@ -114,8 +103,6 @@ def test_positive_create_with_multiple_roles(session, target_sat): :expectedresults: User is created successfully and has proper roles assigned - - :CaseLevel: Integration """ name = gen_string('alpha') role1 = gen_string('alpha') @@ -145,8 +132,6 @@ def test_positive_create_with_all_roles(session): :id: 814593ca-1566-45ea-9eff-e880183b1ee3 :expectedresults: User is created successfully - - :CaseLevel: Integration """ name = gen_string('alpha') password = gen_string('alpha') @@ -172,8 +157,6 @@ def test_positive_create_with_multiple_orgs(session, target_sat): :id: d74c0284-3995-4a4a-8746-00858282bf5d :expectedresults: User is created successfully - - :CaseLevel: Integration """ name = gen_string('alpha') org_name1 = gen_string('alpha') @@ -208,8 +191,6 @@ def test_positive_update_with_multiple_roles(session, target_sat): :id: 127fb368-09fd-4f10-8319-566a1bcb5cd2 :expectedresults: User is updated successfully - - :CaseLevel: Integration """ name = gen_string('alpha') role_names = [target_sat.api.Role().create().name for _ in range(3)] @@ -236,8 +217,6 @@ def test_positive_update_with_all_roles(session): :id: cd7a9cfb-a700-45f2-a11d-bba6be3c810d :expectedresults: User is updated successfully - - :CaseLevel: Integration """ name = gen_string('alpha') password = gen_string('alpha') @@ -263,8 +242,6 @@ def test_positive_update_orgs(session, target_sat): :id: a207188d-1ad1-4ff1-9906-bae1d91104fd :expectedresults: User is updated - - :CaseLevel: Integration """ name = gen_string('alpha') password = gen_string('alpha') @@ -298,8 +275,6 @@ def test_positive_create_product_with_limited_user_permission( :customerscenario: true - :CaseLevel: Component - :CaseImportance: High :BZ: 1771937 @@ -324,7 +299,7 @@ def test_positive_create_product_with_limited_user_permission( password=password, mail='test@test.com', ).create() - with Session(test_name, username, password) as newsession: + with target_sat.ui_session(test_name, username, password) as newsession: newsession.product.create( {'name': product_name, 'label': product_label, 'description': product_description} ) @@ -347,8 +322,6 @@ def test_personal_access_token_admin(): 1. Should show output of the api endpoint 2. When revoked, authentication error - :CaseLevel: System - :CaseImportance: High """ @@ -372,8 +345,6 @@ def test_positive_personal_access_token_user_with_role(): 2. When an incorrect role and end point is used, missing permission should be displayed. - :CaseLevel: System - :CaseImportance: High """ @@ -392,7 +363,5 @@ def test_expired_personal_access_token(): :expectedresults: Authentication error - :CaseLevel: System - :CaseImportance: Medium """ diff --git a/tests/foreman/ui/test_usergroup.py b/tests/foreman/ui/test_usergroup.py index f899d4fc099..092126ae868 100644 --- a/tests/foreman/ui/test_usergroup.py +++ b/tests/foreman/ui/test_usergroup.py @@ -4,22 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from fauxfactory import gen_string -from fauxfactory import gen_utf8 +from fauxfactory import gen_string, gen_utf8 from nailgun import entities +import pytest @pytest.mark.tier2 @@ -30,8 +24,6 @@ def test_positive_delete_with_user(session, module_org, module_location): :id: 2bda3db5-f54f-412f-831f-8e005631f271 :expectedresults: Usergroup is deleted but added user is not - - :CaseLevel: Integration """ user_name = gen_string('alpha') group_name = gen_utf8(smp=False) @@ -61,8 +53,6 @@ def test_positive_end_to_end(session, module_org, module_location): :expectedresults: All expected CRUD actions finished successfully - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') diff --git a/tests/foreman/ui/test_webhook.py b/tests/foreman/ui/test_webhook.py index 731bb6053f0..c0e3b470c20 100644 --- a/tests/foreman/ui/test_webhook.py +++ b/tests/foreman/ui/test_webhook.py @@ -4,21 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: HooksandWebhooks :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +from fauxfactory import gen_string, gen_url import pytest -from fauxfactory import gen_string -from fauxfactory import gen_url @pytest.mark.tier1 @@ -74,7 +68,9 @@ def test_positive_end_to_end(session, target_sat): assert values['credentials']['capsule_auth'] is True assert values['credentials']['verify_ssl'] is False assert values['credentials']['user'] == username - result = target_sat.execute('echo "Webhook.last.password" | foreman-rake console') + result = target_sat.execute( + f'echo "Webhook.find_by_name(\\"{hook_name}\\").password" | foreman-rake console' + ) assert password in result.stdout session.webhook.update( hook_name, @@ -94,7 +90,9 @@ def test_positive_end_to_end(session, target_sat): assert values['general']['template'] == new_template assert values['general']['http_method'] == new_http_method assert values['general']['enabled'] is True - result = target_sat.execute('echo "Webhook.last.password" | foreman-rake console') + result = target_sat.execute( + f'echo "Webhook.find_by_name(\\"{new_hook_name}\\").password" | foreman-rake console' + ) assert password in result.stdout session.webhook.delete(new_hook_name) assert not session.webhook.search(new_hook_name) diff --git a/tests/foreman/virtwho/api/test_esx.py b/tests/foreman/virtwho/api/test_esx.py index 39e68087f1f..bb6046188a0 100644 --- a/tests/foreman/virtwho/api/test_esx.py +++ b/tests/foreman/virtwho/api/test_esx.py @@ -4,72 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import create_http_proxy -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_guest_info - - -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_server': settings.virtwho.esx.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def delete_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + create_http_proxy, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_option, +) @pytest.mark.usefixtures('delete_host') class TestVirtWhoConfigforEsx: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, target_sat, virtwho_config_api, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -77,18 +38,13 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -120,64 +76,9 @@ def test_positive_deploy_configure_by_id( assert result['subscription_status_label'] == 'Fully entitled' @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat + def test_positive_debug_option( + self, default_org, form_data_api, virtwho_config_api, target_sat ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 166ec4f8-e3fa-4555-9acb-1a5d693a42bb - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_debug_option(self, default_org, form_data, virtwho_config, target_sat): """Verify debug option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -186,22 +87,22 @@ def test_positive_debug_option(self, default_org, form_data, virtwho_config, tar :expectedresults: debug option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = {'true': '1', 'false': '0', '1': '1', '0': '0'} for key, value in sorted(options.items(), key=lambda item: item[0]): - virtwho_config.debug = key - virtwho_config.update(['debug']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.debug = key + virtwho_config_api.update(['debug']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 - def test_positive_interval_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_interval_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify interval option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -210,8 +111,6 @@ def test_positive_interval_option(self, default_org, form_data, virtwho_config, :expectedresults: interval option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = { @@ -225,17 +124,17 @@ def test_positive_interval_option(self, default_org, form_data, virtwho_config, '4320': '259200', } for key, value in sorted(options.items(), key=lambda item: int(item[0])): - virtwho_config.interval = key - virtwho_config.update(['interval']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.interval = key + virtwho_config_api.update(['interval']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -245,24 +144,24 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ # esx and rhevm support hwuuid option values = ['uuid', 'hostname', 'hwuuid'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 - def test_positive_filter_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_filter_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify filter option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -271,8 +170,6 @@ def test_positive_filter_option(self, default_org, form_data, virtwho_config, ta :expectedresults: filter and filter_hosts can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ whitelist = {'filtering_mode': '1', 'whitelist': '.*redhat.com'} @@ -281,26 +178,30 @@ def test_positive_filter_option(self, default_org, form_data, virtwho_config, ta whitelist['filter_host_parents'] = '.*redhat.com' blacklist['exclude_host_parents'] = '.*redhat.com' # Update Whitelist and check the result - virtwho_config.filtering_mode = whitelist['filtering_mode'] - virtwho_config.whitelist = whitelist['whitelist'] - virtwho_config.filter_host_parents = whitelist['filter_host_parents'] - virtwho_config.update(whitelist.keys()) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.filtering_mode = whitelist['filtering_mode'] + virtwho_config_api.whitelist = whitelist['whitelist'] + virtwho_config_api.filter_host_parents = whitelist['filter_host_parents'] + virtwho_config_api.update(whitelist.keys()) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('filter_hosts', config_file) == whitelist['whitelist'] assert ( get_configure_option('filter_host_parents', config_file) == whitelist['filter_host_parents'] ) # Update Blacklist and check the result - virtwho_config.filtering_mode = blacklist['filtering_mode'] - virtwho_config.blacklist = blacklist['blacklist'] - virtwho_config.exclude_host_parents = blacklist['exclude_host_parents'] - virtwho_config.update(blacklist.keys()) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.filtering_mode = blacklist['filtering_mode'] + virtwho_config_api.blacklist = blacklist['blacklist'] + virtwho_config_api.exclude_host_parents = blacklist['exclude_host_parents'] + virtwho_config_api.update(blacklist.keys()) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('exclude_hosts', config_file) == blacklist['blacklist'] assert ( get_configure_option('exclude_host_parents', config_file) @@ -308,7 +209,9 @@ def test_positive_filter_option(self, default_org, form_data, virtwho_config, ta ) @pytest.mark.tier2 - def test_positive_proxy_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_proxy_option( + self, default_org, default_location, form_data_api, virtwho_config_api, target_sat + ): """Verify http_proxy option by "PUT /foreman_virt_who_configure/api/v2/configs/:id"" @@ -317,38 +220,44 @@ def test_positive_proxy_option(self, default_org, form_data, virtwho_config, tar :expectedresults: http_proxy/https_proxy and no_proxy option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1902199 """ - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) # Check default NO_PROXY option assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == '*' # Check HTTTP Proxy and No_PROXY option http_proxy_url, http_proxy_name, http_proxy_id = create_http_proxy( - http_type='http', org=default_org + http_type='http', org=default_org, location=default_location ) no_proxy = 'test.satellite.com' - virtwho_config.http_proxy_id = http_proxy_id - virtwho_config.no_proxy = no_proxy - virtwho_config.update(['http_proxy_id', 'no_proxy']) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.http_proxy_id = http_proxy_id + virtwho_config_api.no_proxy = no_proxy + virtwho_config_api.update(['http_proxy_id', 'no_proxy']) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check HTTTPs Proxy option - https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy(org=default_org) - virtwho_config.http_proxy_id = https_proxy_id - virtwho_config.update(['http_proxy_id']) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy( + org=default_org, location=default_location + ) + virtwho_config_api.http_proxy_id = https_proxy_id + virtwho_config_api.update(['http_proxy_id']) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url @pytest.mark.tier2 def test_positive_configure_organization_list( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify "GET /foreman_virt_who_configure/ @@ -358,18 +267,18 @@ def test_positive_configure_organization_list( :expectedresults: Config can be searched in org list - :CaseLevel: Integration - :CaseImportance: Medium """ - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) - search_result = virtwho_config.get_organization_configs(data={'per_page': '1000'}) - assert [item for item in search_result['results'] if item['name'] == form_data['name']] + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) + search_result = virtwho_config_api.get_organization_configs(data={'per_page': '1000'}) + assert [item for item in search_result['results'] if item['name'] == form_data_api['name']] @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, default_org, form_data, target_sat + self, default_org, form_data_api, target_sat ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -377,8 +286,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -386,25 +293,25 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor_password'] = "Tes't" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) + form_data_api['hypervisor_password'] = "Tes't" + virtwho_config_api = target_sat.api.VirtWhoConfig(**form_data_api).create() + assert virtwho_config_api.status == 'unknown' + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_status = deploy_configure_by_command_check(command) assert deploy_status == 'Finished successfully' - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert ( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - virtwho_config.delete() + virtwho_config_api.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # check the hypervisor password contains backtick - form_data['hypervisor_password'] = "my`password" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "my`password" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, default_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -417,11 +324,13 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 - def test_positive_remove_env_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_remove_env_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """remove option 'env=' from the virt-who configuration file and without any error :id: 981b6828-a7ed-46d9-9c6c-9fb22af4011e @@ -430,8 +339,6 @@ def test_positive_remove_env_option(self, default_org, form_data, virtwho_config the option "env=" should be removed from etc/virt-who.d/virt-who.conf /var/log/messages should not display warning message - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -439,26 +346,25 @@ def test_positive_remove_env_option(self, default_org, form_data, virtwho_config :BZ: 1834897 """ - command = get_configure_command(virtwho_config.id, default_org.name) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/api/test_esx_sca.py b/tests/foreman/virtwho/api/test_esx_sca.py index ada886ea4ec..2d43f59637a 100644 --- a/tests/foreman/virtwho/api/test_esx_sca.py +++ b/tests/foreman/virtwho/api/test_esx_sca.py @@ -4,62 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :Team: Phoenix -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import create_http_proxy -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_server': settings.virtwho.esx.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + create_http_proxy, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforEsx: @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, target_sat, virtwho_config_api, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -67,34 +36,19 @@ def test_positive_deploy_configure_by_id_script( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_debug_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify debug option by "PUT @@ -104,23 +58,21 @@ def test_positive_debug_option( :expectedresults: debug option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = {'0': '0', '1': '1', 'false': '0', 'true': '1'} for key, value in options.items(): - virtwho_config.debug = key - virtwho_config.update(['debug']) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.debug = key + virtwho_config_api.update(['debug']) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_interval_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify interval option by "PUT @@ -130,8 +82,6 @@ def test_positive_interval_option( :expectedresults: interval option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = { @@ -145,17 +95,17 @@ def test_positive_interval_option( '4320': '259200', } for key, value in options.items(): - virtwho_config.interval = key - virtwho_config.update(['interval']) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.interval = key + virtwho_config_api.update(['interval']) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -165,17 +115,15 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @@ -183,7 +131,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.parametrize('filter_type', ['whitelist', 'blacklist']) @pytest.mark.parametrize('option_type', ['edit', 'create']) def test_positive_filter_option( - self, module_sca_manifest_org, form_data, target_sat, filter_type, option_type + self, module_sca_manifest_org, form_data_api, target_sat, filter_type, option_type ): """Verify filter option by "PUT @@ -195,14 +143,12 @@ def test_positive_filter_option( 1. filter and filter_hosts can be updated. 2. create virt-who config with filter and filter_hosts options work well. - :CaseLevel: Integration - :CaseImportance: Medium """ regex = '.*redhat.com' whitelist = {'filtering_mode': '1', 'whitelist': regex} blacklist = {'filtering_mode': '2', 'blacklist': regex} - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() if option_type == "edit": if filter_type == "whitelist": whitelist['filter_host_parents'] = regex @@ -218,7 +164,7 @@ def test_positive_filter_option( virtwho_config.update(blacklist.keys()) command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(virtwho_config.id) result = target_sat.api.VirtWhoConfig().search( @@ -243,20 +189,20 @@ def test_positive_filter_option( elif option_type == "create": virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) if filter_type == "whitelist": - form_data['filtering_mode'] = 1 - form_data['whitelist'] = regex - form_data['filter_host_parents'] = regex + form_data_api['filtering_mode'] = 1 + form_data_api['whitelist'] = regex + form_data_api['filter_host_parents'] = regex elif filter_type == "blacklist": - form_data['filtering_mode'] = 2 - form_data['blacklist'] = regex - form_data['exclude_host_parents'] = regex - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['filtering_mode'] = 2 + form_data_api['blacklist'] = regex + form_data_api['exclude_host_parents'] = regex + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(virtwho_config.id) result = target_sat.api.VirtWhoConfig().search( @@ -266,7 +212,7 @@ def test_positive_filter_option( assert get_configure_option('filter_hosts', config_file) == regex assert ( get_configure_option('filter_host_parents', config_file) - == whitelist['filter_host_parents'] + == whitelist['whitelist'] ) assert result.whitelist == regex assert result.filter_host_parents == regex @@ -275,13 +221,15 @@ def test_positive_filter_option( assert get_configure_option('exclude_hosts', config_file) == regex assert ( get_configure_option('exclude_host_parents', config_file) - == blacklist['exclude_host_parents'] + == blacklist['blacklist'] ) assert result.blacklist == regex assert result.exclude_host_parents == regex @pytest.mark.tier2 - def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_sat): + def test_positive_proxy_option( + self, module_sca_manifest_org, default_location, form_data_api, target_sat + ): """Verify http_proxy option by "PUT /foreman_virt_who_configure/api/v2/configs/:id"" @@ -292,22 +240,20 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ 1. http_proxy and no_proxy option can be updated. 2. create virt-who config with http_proxy and no_proxy options work well. - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1902199 """ - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) # Check default NO_PROXY option assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == '*' # Check HTTTP Proxy and No_PROXY option http_proxy_url, http_proxy_name, http_proxy_id = create_http_proxy( - http_type='http', org=module_sca_manifest_org + http_type='http', org=module_sca_manifest_org, location=default_location ) no_proxy = 'test.satellite.com' virtwho_config.http_proxy_id = http_proxy_id @@ -315,7 +261,7 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ virtwho_config.update(['http_proxy_id', 'no_proxy']) command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy @@ -325,26 +271,26 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ assert result.no_proxy == no_proxy # Check HTTTPs Proxy option https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy( - org=module_sca_manifest_org + org=module_sca_manifest_org, location=default_location ) virtwho_config.http_proxy_id = https_proxy_id virtwho_config.update(['http_proxy_id']) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # Check the http proxy option, create virt-who config via http proxy id - form_data['http_proxy_id'] = http_proxy_id - form_data['no_proxy'] = no_proxy - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['http_proxy_id'] = http_proxy_id + form_data_api['no_proxy'] = no_proxy + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy @@ -354,12 +300,12 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ assert result.no_proxy == no_proxy virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 def test_positive_configure_organization_list( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify "GET /foreman_virt_who_configure/ @@ -369,20 +315,18 @@ def test_positive_configure_organization_list( :expectedresults: Config can be searched in org list - :CaseLevel: Integration - :CaseImportance: Medium """ - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) - search_result = virtwho_config.get_organization_configs(data={'per_page': '1000'}) - assert [item for item in search_result['results'] if item['name'] == form_data['name']] + search_result = virtwho_config_api.get_organization_configs(data={'per_page': '1000'}) + assert [item for item in search_result['results'] if item['name'] == form_data_api['name']] @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, module_sca_manifest_org, form_data, target_sat + self, module_sca_manifest_org, form_data_api, target_sat ): """Verify "hammer virt-who-config deploy hypervisor with special characters" @@ -390,8 +334,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -399,8 +341,8 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor_password'] = "Tes't" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "Tes't" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -413,11 +355,11 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # check the hypervisor password contains backtick - form_data['hypervisor_password'] = "my`password" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "my`password" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -430,12 +372,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 def test_positive_remove_env_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """remove option 'env=' from the virt-who configuration file and without any error @@ -445,8 +387,6 @@ def test_positive_remove_env_option( 1. the option "env=" should be removed from etc/virt-who.d/virt-who.conf 2. /var/log/messages should not display warning message - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -454,26 +394,25 @@ def test_positive_remove_env_option( :BZ: 1834897 """ - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/api/test_hyperv.py b/tests/foreman/virtwho/api/test_hyperv.py index a2af53b39c0..6d93f1ddb9a 100644 --- a/tests/foreman/virtwho/api/test_hyperv.py +++ b/tests/foreman/virtwho/api/test_hyperv.py @@ -4,59 +4,29 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_server': settings.virtwho.hyperv.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -64,75 +34,13 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 2c58b131-5d68-41d2-b804-4548f998ab5f - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -165,7 +73,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -175,17 +83,15 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_hyperv_sca.py b/tests/foreman/virtwho/api/test_hyperv_sca.py index 5788ab68e13..715ac92cd12 100644 --- a/tests/foreman/virtwho/api/test_hyperv_sca.py +++ b/tests/foreman/virtwho/api/test_hyperv_sca.py @@ -4,60 +4,28 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_server': settings.virtwho.hyperv.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) + +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -65,34 +33,19 @@ def test_positive_deploy_configure_by_id_script( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -102,16 +55,14 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_kubevirt.py b/tests/foreman/virtwho/api/test_kubevirt.py index 059fdab7f21..ef1e7ac578d 100644 --- a/tests/foreman/virtwho/api/test_kubevirt.py +++ b/tests/foreman/virtwho/api/test_kubevirt.py @@ -4,67 +4,30 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_guest_info - - -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'kubeconfig_path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def clean_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('clean_host') +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) + + +@pytest.mark.usefixtures('delete_host') class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -72,75 +35,13 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 77100dc7-644a-44a4-802a-2da562246cba - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -173,7 +74,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -183,17 +84,15 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_kubevirt_sca.py b/tests/foreman/virtwho/api/test_kubevirt_sca.py index 3dfbd125ebf..51628052cd9 100644 --- a/tests/foreman/virtwho/api/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/api/test_kubevirt_sca.py @@ -4,56 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'kubeconfig_path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) + +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -61,34 +31,19 @@ def test_positive_deploy_configure_by_id_script( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -98,16 +53,14 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_libvirt.py b/tests/foreman/virtwho/api/test_libvirt.py index 976a8e424a0..b551e0e81dc 100644 --- a/tests/foreman/virtwho/api/test_libvirt.py +++ b/tests/foreman/virtwho/api/test_libvirt.py @@ -4,58 +4,29 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_server': settings.virtwho.libvirt.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -63,75 +34,13 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 9668b900-0d2f-42ae-b2f8-523ca292b2bd - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -164,7 +73,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -174,17 +83,15 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_libvirt_sca.py b/tests/foreman/virtwho/api/test_libvirt_sca.py index afebdf01eb9..f88edd8d478 100644 --- a/tests/foreman/virtwho/api/test_libvirt_sca.py +++ b/tests/foreman/virtwho/api/test_libvirt_sca.py @@ -4,57 +4,26 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_server': settings.virtwho.libvirt.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.libvirt.hypervisor_username, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -62,34 +31,19 @@ def test_positive_deploy_configure_by_id_script( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -97,16 +51,14 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_nutanix.py b/tests/foreman/virtwho/api/test_nutanix.py index bdf393c88c2..e40433b1c57 100644 --- a/tests/foreman/virtwho/api/test_nutanix.py +++ b/tests/foreman/virtwho/api/test_nutanix.py @@ -4,73 +4,33 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import check_message_in_rhsm_log -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_guest_info -from robottelo.utils.virtwho import get_hypervisor_ahv_mapping - - -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_server': settings.virtwho.ahv.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_password': settings.virtwho.ahv.hypervisor_password, - 'prism_flavor': settings.virtwho.ahv.prism_flavor, - 'ahv_internal_debug': 'false', - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def clean_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('clean_host') +from robottelo.utils.virtwho import ( + check_message_in_rhsm_log, + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_option, + get_hypervisor_ahv_mapping, +) + + +@pytest.mark.usefixtures('delete_host') class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -78,79 +38,13 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - settings.virtwho.sku.vdc_physical, - 'NORMAL', - ), - ( - guest_name, - settings.virtwho.sku.vdc_virtual, - 'STACK_DERIVED', - ), - ] - for hostname, sku, type in hosts: - host = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - subscriptions = target_sat.api.Organization(id=default_org.id).subscriptions()[ - 'results' - ] - for item in subscriptions: - if item['type'] == type and item['product_id'] == sku: - vdc_id = item['id'] - if ( - 'hypervisor' in item - and hypervisor_name.lower() in item['hypervisor']['name'] - ): - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 7aabfa3e-0ec0-44a3-8b7c-b67476318c2c - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -187,7 +81,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -197,25 +91,23 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, default_org, form_data, target_sat, deploy_type + self, default_org, form_data_api, target_sat, deploy_type ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" on nutanix prism central mode @@ -225,23 +117,21 @@ def test_positive_prism_central_deploy_configure_by_id_script( Config can be created and deployed The prism_central has been set in /etc/virt-who.d/vir-who.conf file - :CaseLevel: Integration - :CaseImportance: High """ - form_data['prism_flavor'] = "central" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['prism_flavor'] = "central" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' if deploy_type == "id": command = get_configure_command(virtwho_config.id, default_org.name) hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) elif deploy_type == "script": script = virtwho_config.deploy_script() hypervisor_name, guest_name = deploy_configure_by_script( script['virt_who_config_script'], - form_data['hypervisor_type'], + form_data_api['hypervisor_type'], debug=True, org=default_org.label, ) @@ -287,7 +177,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_central_prism_central_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify prism_flavor option by "PUT @@ -297,21 +187,21 @@ def test_positive_prism_central_prism_central_option( :expectedresults: prism_flavor option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ value = 'central' - virtwho_config.prism_flavor = value - virtwho_config.update(['prism_flavor']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.prism_flavor = value + virtwho_config_api.update(['prism_flavor']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option("prism_central", config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -325,7 +215,6 @@ def test_positive_ahv_internal_debug_option( 5. message Host UUID {system_uuid} found for VM: {guest_uuid} exist in rhsm.log 6. ahv_internal_debug bas been set to true in virt-who-config-X.conf 7. warning message does not exist in log file /var/log/rhsm/rhsm.log - :CaseLevel: Integration :CaseImportance: Medium @@ -333,45 +222,46 @@ def test_positive_ahv_internal_debug_option( :customerscenario: true """ - command = get_configure_command(virtwho_config.id, default_org.name) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) result = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .ahv_internal_debug ) assert str(result) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message # Update ahv_internal_debug option to true value = 'true' - virtwho_config.ahv_internal_debug = value - virtwho_config.update(['ahv_internal_debug']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.ahv_internal_debug = value + virtwho_config_api.update(['ahv_internal_debug']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_api['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' result = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .ahv_internal_debug ) assert str(result) == 'True' # ahv_internal_debug bas been set to true in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' # check message does not exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' diff --git a/tests/foreman/virtwho/api/test_nutanix_sca.py b/tests/foreman/virtwho/api/test_nutanix_sca.py index b57b0eefb8c..14b6d2a04fd 100644 --- a/tests/foreman/virtwho/api/test_nutanix_sca.py +++ b/tests/foreman/virtwho/api/test_nutanix_sca.py @@ -4,61 +4,29 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :Team: Phoenix -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_server': settings.virtwho.ahv.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_password': settings.virtwho.ahv.hypervisor_password, - 'prism_flavor': settings.virtwho.ahv.prism_flavor, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) + +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -66,34 +34,19 @@ def test_positive_deploy_configure_by_id_script( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -103,24 +56,22 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, target_sat, deploy_type + self, module_sca_manifest_org, form_data_api, target_sat, deploy_type ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" on nutanix prism central mode @@ -130,23 +81,24 @@ def test_positive_prism_central_deploy_configure_by_id_script( Config can be created and deployed The prism_central has been set in /etc/virt-who.d/vir-who.conf file - :CaseLevel: Integration - :CaseImportance: High """ - form_data['prism_flavor'] = "central" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['prism_flavor'] = "central" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' if deploy_type == "id": command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, + form_data_api['hypervisor_type'], + debug=True, + org=module_sca_manifest_org.label, ) elif deploy_type == "script": script = virtwho_config.deploy_script() deploy_configure_by_script( script['virt_who_config_script'], - form_data['hypervisor_type'], + form_data_api['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -162,7 +114,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_central_prism_central_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify prism_flavor option by "PUT @@ -172,16 +124,14 @@ def test_positive_prism_central_prism_central_option( :expectedresults: prism_flavor option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ value = 'central' - virtwho_config.prism_flavor = value - virtwho_config.update(['prism_flavor']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.prism_flavor = value + virtwho_config_api.update(['prism_flavor']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option("prism_central", config_file) == 'true' diff --git a/tests/foreman/virtwho/cli/test_esx.py b/tests/foreman/virtwho/cli/test_esx.py index dec037c6058..c18ffb3ab30 100644 --- a/tests/foreman/virtwho/cli/test_esx.py +++ b/tests/foreman/virtwho/cli/test_esx.py @@ -4,68 +4,38 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import re +from fauxfactory import gen_string import pytest import requests -from fauxfactory import gen_string -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.utils.virtwho import create_http_proxy -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import hypervisor_json_create -from robottelo.utils.virtwho import virtwho_package_locked - - -@pytest.fixture() -def form_data(target_sat, default_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.esx.hypervisor_type, - 'hypervisor-server': settings.virtwho.esx.hypervisor_server, - 'organization-id': default_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.esx.hypervisor_username, - 'hypervisor-password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + create_http_proxy, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_option, + hypervisor_json_create, + virtwho_package_locked, +) class TestVirtWhoConfigforEsx: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, target_sat, virtwho_config_cli, deploy_type_cli ): """Verify " hammer virt-who-config deploy" @@ -73,16 +43,11 @@ def test_positive_deploy_configure_by_id( :expectedresults: Config can be created and deployed - :CaseLevel: Integration - :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + hypervisor_name, guest_name = deploy_type_cli + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @@ -113,70 +78,17 @@ def test_positive_deploy_configure_by_id( assert result.strip() == 'Subscription attached to the host successfully.' @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify " hammer virt-who-config fetch" - - :id: 6aaffaeb-aaf2-42cf-b0dc-ca41a53d42a6 - - :expectedresults: Config can be created, fetch and deploy - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config['status'] == 'No Report Yet' - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['status'] - assert virt_who_instance == 'OK' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - result = target_sat.cli.Host.subscription_attach( - {'host-id': host['id'], 'subscription-id': vdc_id} - ) - assert result.strip() == 'Subscription attached to the host successfully.' - - @pytest.mark.tier2 - def test_positive_debug_option(self, default_org, form_data, target_sat): + def test_positive_debug_option(self, default_org, form_data_cli, target_sat): """Verify debug option by hammer virt-who-config update" :id: c98bc518-828c-49ba-a644-542db3190263 :expectedresults: debug option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - assert virtwho_config['name'] == form_data['name'] + virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] + assert virtwho_config['name'] == form_data_cli['name'] new_name = gen_string('alphanumeric') target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'new-name': new_name}) virt_who_instance_name = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ @@ -188,20 +100,20 @@ def test_positive_debug_option(self, default_org, form_data, target_sat): target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'debug': key}) command = get_configure_command(virtwho_config['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 - def test_positive_interval_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_interval_option( + self, default_org, form_data_cli, virtwho_config_cli, target_sat + ): """Verify interval option by hammer virt-who-config update" :id: 5d558bca-534c-4bd4-b401-a0c362033c57 :expectedresults: interval option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = { @@ -215,16 +127,16 @@ def test_positive_interval_option(self, default_org, form_data, virtwho_config, '4320': '259200', } for key, value in sorted(options.items(), key=lambda item: int(item[0])): - target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'interval': key}) - command = get_configure_command(virtwho_config['id'], default_org.name) + target_sat.cli.VirtWhoConfig.update({'id': virtwho_config_cli['id'], 'interval': key}) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -232,104 +144,120 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ # esx and rhevm support hwuuid option values = ['uuid', 'hostname', 'hwuuid'] for value in values: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 - def test_positive_filter_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_filter_option( + self, default_org, form_data_cli, virtwho_config_cli, target_sat + ): """Verify filter option by hammer virt-who-config update" :id: aaf45c5e-9504-47ce-8f25-b8073c2de036 :expectedresults: filter and filter_hosts can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ regex = '.*redhat.com' - whitelist = {'id': virtwho_config['id'], 'filtering-mode': 'whitelist', 'whitelist': regex} - blacklist = {'id': virtwho_config['id'], 'filtering-mode': 'blacklist', 'blacklist': regex} + whitelist = { + 'id': virtwho_config_cli['id'], + 'filtering-mode': 'whitelist', + 'whitelist': regex, + } + blacklist = { + 'id': virtwho_config_cli['id'], + 'filtering-mode': 'blacklist', + 'blacklist': regex, + } # esx support filter-host-parents and exclude-host-parents options whitelist['filter-host-parents'] = regex blacklist['exclude-host-parents'] = regex - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) # Update Whitelist and check the result target_sat.cli.VirtWhoConfig.update(whitelist) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['filtering'] == 'Whitelist' assert result['connection']['filtered-hosts'] == regex assert result['connection']['filter-host-parents'] == regex - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) assert get_configure_option('filter_hosts', config_file) == regex assert get_configure_option('filter_host_parents', config_file) == regex # Update Blacklist and check the result target_sat.cli.VirtWhoConfig.update(blacklist) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['filtering'] == 'Blacklist' assert result['connection']['excluded-hosts'] == regex assert result['connection']['exclude-host-parents'] == regex - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) assert get_configure_option('exclude_hosts', config_file) == regex assert get_configure_option('exclude_host_parents', config_file) == regex @pytest.mark.tier2 - def test_positive_proxy_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_proxy_option( + self, default_org, default_location, form_data_cli, virtwho_config_cli, target_sat + ): """Verify http_proxy option by hammer virt-who-config update" :id: 409d108e-e814-482b-93ed-09db89d21dda :expectedresults: http_proxy and no_proxy option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1902199 """ # Check the https proxy option, update it via http proxy name - https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy(org=default_org) + https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy( + org=default_org, location=default_location + ) no_proxy = 'test.satellite.com' target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'http-proxy': https_proxy_name, 'no-proxy': no_proxy} + {'id': virtwho_config_cli['id'], 'http-proxy': https_proxy_name, 'no-proxy': no_proxy} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['http-proxy']['http-proxy-name'] == https_proxy_name assert result['connection']['ignore-proxy'] == no_proxy - command = get_configure_command(virtwho_config['id'], default_org.name) - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check the http proxy option, update it via http proxy id http_proxy_url, http_proxy_name, http_proxy_id = create_http_proxy( - http_type='http', org=default_org + http_type='http', org=default_org, location=default_location ) target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'http-proxy-id': http_proxy_id} + {'id': virtwho_config_cli['id'], 'http-proxy-id': http_proxy_id} + ) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label ) - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url @pytest.mark.tier2 - def test_positive_rhsm_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_rhsm_option(self, default_org, form_data_cli, virtwho_config_cli, target_sat): """Verify rhsm options in the configure file" :id: b5b93d4d-e780-41c0-9eaa-2407cc1dcc9b @@ -338,15 +266,15 @@ def test_positive_rhsm_option(self, default_org, form_data, virtwho_config, targ rhsm_hostname, rhsm_prefix are ecpected rhsm_username is not a login account - :CaseLevel: Integration - :CaseImportance: Medium """ - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) rhsm_username = get_configure_option('rhsm_username', config_file) - assert not User.exists(search=('login', rhsm_username)) + assert not target_sat.cli.User.exists(search=('login', rhsm_username)) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert get_configure_option('rhsm_prefix', config_file) == '/rhsm' @@ -359,8 +287,6 @@ def test_positive_post_hypervisors(self, function_org, target_sat): :expectedresults: hypervisor/guest json can be posted and the task is success status - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -380,7 +306,7 @@ def test_positive_post_hypervisors(self, function_org, target_sat): @pytest.mark.tier2 def test_positive_foreman_packages_protection( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """foreman-protector should allow virt-who to be installed @@ -390,8 +316,6 @@ def test_positive_foreman_packages_protection( virt-who packages can be installed the virt-who plugin can be deployed successfully - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -399,16 +323,21 @@ def test_positive_foreman_packages_protection( :BZ: 1783987 """ virtwho_package_locked() - command = get_configure_command(virtwho_config['id'], default_org.name) - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + command = get_configure_command(virtwho_config_cli['id'], default_org.name) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ + 'general-information' + ]['status'] + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, default_org, form_data, target_sat + self, default_org, form_data_cli, target_sat ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -416,8 +345,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -425,8 +352,8 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor-password'] = "Tes't" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] + form_data_cli['hypervisor-password'] = "Tes't" + virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] assert virtwho_config['status'] == 'No Report Yet' command = get_configure_command(virtwho_config['id'], default_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -438,11 +365,11 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( == settings.virtwho.esx.hypervisor_username ) target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) # check the hypervisor password contains backtick - form_data['hypervisor-password'] = r"my\`password" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] + form_data_cli['hypervisor-password'] = r"my\`password" + virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] assert virtwho_config['status'] == 'No Report Yet' command = get_configure_command(virtwho_config['id'], default_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -455,7 +382,9 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) @pytest.mark.tier2 - def test_positive_remove_env_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_remove_env_option( + self, default_org, form_data_cli, virtwho_config_cli, target_sat + ): """remove option 'env=' from the virt-who configuration file and without any error :id: 509add77-dce7-4ba4-b9e5-2a5818c39731 @@ -464,32 +393,29 @@ def test_positive_remove_env_option(self, default_org, form_data, virtwho_config the option "env=" should be removed from etc/virt-who.d/virt-who.conf /var/log/messages should not display warning message - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1834897 :customerscenario: true """ - command = get_configure_command(virtwho_config['id'], default_org.name) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label + command, form_data_cli['hypervisor-type'], debug=True, org=default_org.label ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/cli/test_esx_sca.py b/tests/foreman/virtwho/cli/test_esx_sca.py index 4a6625d1659..6b15b6803f9 100644 --- a/tests/foreman/virtwho/cli/test_esx_sca.py +++ b/tests/foreman/virtwho/cli/test_esx_sca.py @@ -4,100 +4,61 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :Team: Phoenix -:TestType: Functional - -:Upstream: No """ import re +from fauxfactory import gen_string import pytest import requests -from fauxfactory import gen_string -from robottelo.cli.user import User from robottelo.config import settings -from robottelo.utils.virtwho import create_http_proxy -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import hypervisor_json_create -from robottelo.utils.virtwho import virtwho_package_locked - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.esx.hypervisor_type, - 'hypervisor-server': settings.virtwho.esx.hypervisor_server, - 'organization-id': module_sca_manifest_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.esx.hypervisor_username, - 'hypervisor-password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + create_http_proxy, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_option, + hypervisor_json_create, + virtwho_package_locked, +) class TestVirtWhoConfigforEsx: @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.parametrize('deploy_type', ['id', 'script']) - def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + @pytest.mark.parametrize( + 'deploy_type_cli', + ['id', 'script', 'name', 'location-id', 'organization-title'], + indirect=True, + ) + def test_positive_deploy_configure_by_id_script_name_locationid_organizationtitle( + self, module_sca_manifest_org, target_sat, virtwho_config_cli, deploy_type_cli ): - """Verify "hammer virt-who-config deploy" + """Verify "hammer virt-who-config deploy & fetch" :id: 04f2cef8-c88e-4a21-9d2f-c17238eea308 - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - if deploy_type == "id": - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -105,26 +66,24 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 def test_positive_debug_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify debug option by hammer virt-who-config update" @@ -132,23 +91,21 @@ def test_positive_debug_option( :expectedresults: debug option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ - assert virtwho_config['name'] == form_data['name'] + assert virtwho_config_cli['name'] == form_data_cli['name'] options = {'false': '0', 'no': '0', 'true': '1', 'yes': '1'} for key, value in options.items(): - target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'debug': key}) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + target_sat.cli.VirtWhoConfig.update({'id': virtwho_config_cli['id'], 'debug': key}) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_name_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify debug option by hammer virt-who-config update" @@ -156,24 +113,22 @@ def test_positive_name_option( :expectedresults: name option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ - assert virtwho_config['name'] == form_data['name'] + assert virtwho_config_cli['name'] == form_data_cli['name'] new_name = gen_string('alphanumeric') - target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'new-name': new_name}) - virt_who_instance_name = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['name'] + target_sat.cli.VirtWhoConfig.update({'id': virtwho_config_cli['id'], 'new-name': new_name}) + virt_who_instance_name = target_sat.cli.VirtWhoConfig.info( + {'id': virtwho_config_cli['id']} + )['general-information']['name'] assert virt_who_instance_name == new_name target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'new-name': form_data['name']} + {'id': virtwho_config_cli['id'], 'new-name': form_data_cli['name']} ) @pytest.mark.tier2 def test_positive_interval_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify interval option by hammer virt-who-config update" @@ -181,8 +136,6 @@ def test_positive_interval_option( :expectedresults: interval option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ options = { @@ -196,10 +149,10 @@ def test_positive_interval_option( '4320': '259200', } for key, value in options.items(): - target_sat.cli.VirtWhoConfig.update({'id': virtwho_config['id'], 'interval': key}) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + target_sat.cli.VirtWhoConfig.update({'id': virtwho_config_cli['id'], 'interval': key}) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @@ -209,8 +162,8 @@ def test_positive_interval_option( def test_positive_filter_option( self, module_sca_manifest_org, - form_data, - virtwho_config, + form_data_cli, + virtwho_config_cli, target_sat, filter_type, option_type, @@ -223,8 +176,6 @@ def test_positive_filter_option( 1. filter and filter_hosts can be updated. 2. create virt-who config with filter and filter_hosts options work well. - :CaseLevel: Integration - :CaseImportance: Medium """ regex = '.*redhat.com' @@ -232,7 +183,7 @@ def test_positive_filter_option( # Update whitelist or blacklist and check the result if filter_type == "whitelist": whitelist = { - 'id': virtwho_config['id'], + 'id': virtwho_config_cli['id'], 'filtering-mode': 'whitelist', 'whitelist': regex, } @@ -241,17 +192,17 @@ def test_positive_filter_option( target_sat.cli.VirtWhoConfig.update(whitelist) elif filter_type == "blacklist": blacklist = { - 'id': virtwho_config['id'], + 'id': virtwho_config_cli['id'], 'filtering-mode': 'blacklist', 'blacklist': regex, } blacklist['exclude-host-parents'] = regex target_sat.cli.VirtWhoConfig.update(blacklist) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) if filter_type == "whitelist": assert result['connection']['filtering'] == 'Whitelist' @@ -267,14 +218,16 @@ def test_positive_filter_option( assert get_configure_option('exclude_host_parents', config_file) == regex elif option_type == "create": # Create a new virt-who config with filtering-mode whitelist or blacklist - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) - form_data['filtering-mode'] = filter_type - form_data[filter_type] = regex - form_data['filter-host-parents'] = regex - form_data['exclude-host-parents'] = regex - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) + form_data_cli['filtering-mode'] = filter_type + form_data_cli[filter_type] = regex + form_data_cli['filter-host-parents'] = regex + form_data_cli['exclude-host-parents'] = regex + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)[ + 'general-information' + ] + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) if filter_type == "whitelist": assert result['connection']['filtering'] == 'Whitelist' assert result['connection']['filtered-hosts'] == regex @@ -283,11 +236,11 @@ def test_positive_filter_option( assert result['connection']['filtering'] == 'Blacklist' assert result['connection']['excluded-hosts'] == regex assert result['connection']['exclude-host-parents'] == regex - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) if filter_type == "whitelist": assert get_configure_option('filter_hosts', config_file) == regex assert get_configure_option('filter_host_parents', config_file) == regex @@ -297,7 +250,12 @@ def test_positive_filter_option( @pytest.mark.tier2 def test_positive_proxy_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, + module_sca_manifest_org, + default_location, + form_data_cli, + virtwho_config_cli, + target_sat, ): """Verify http_proxy option by hammer virt-who-config update" @@ -307,75 +265,77 @@ def test_positive_proxy_option( 1. http_proxy and no_proxy option can be updated. 2. create virt-who config with http_proxy and no_proxy options work well. - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1902199 """ # Check the https proxy option, update it via http proxy name https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy( - org=module_sca_manifest_org + org=module_sca_manifest_org, location=default_location ) no_proxy = 'test.satellite.com' target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'http-proxy': https_proxy_name, 'no-proxy': no_proxy} + {'id': virtwho_config_cli['id'], 'http-proxy': https_proxy_name, 'no-proxy': no_proxy} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['http-proxy']['http-proxy-name'] == https_proxy_name assert result['connection']['ignore-proxy'] == no_proxy - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check the http proxy option, update it via http proxy id http_proxy_url, http_proxy_name, http_proxy_id = create_http_proxy( - http_type='http', org=module_sca_manifest_org + http_type='http', org=module_sca_manifest_org, location=default_location ) target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'http-proxy-id': http_proxy_id} + {'id': virtwho_config_cli['id'], 'http-proxy-id': http_proxy_id} ) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) # Check the http proxy option, create virt-who config via http proxy id - form_data['http-proxy-id'] = http_proxy_id - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + form_data_cli['http-proxy-id'] = http_proxy_id + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)[ + 'general-information' + ] + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) # Check the https proxy option, create virt-who config via http proxy name no_proxy = 'test.satellite.com' - form_data['http-proxy'] = https_proxy_name - form_data['no-proxy'] = no_proxy - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + form_data_cli['http-proxy'] = https_proxy_name + form_data_cli['no-proxy'] = no_proxy + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)[ + 'general-information' + ] + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['http-proxy']['http-proxy-name'] == https_proxy_name assert result['connection']['ignore-proxy'] == no_proxy - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) - get_configure_file(virtwho_config['id']) + get_configure_file(virtwho_config_cli['id']) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy @pytest.mark.tier2 def test_positive_rhsm_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify rhsm options in the configure file" @@ -385,17 +345,15 @@ def test_positive_rhsm_option( 1. rhsm_hostname, rhsm_prefix are expected 2. rhsm_username is not a login account - :CaseLevel: Integration - :CaseImportance: Medium """ - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) rhsm_username = get_configure_option('rhsm_username', config_file) - assert not User.exists(search=('login', rhsm_username)) + assert not target_sat.cli.User.exists(search=('login', rhsm_username)) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert get_configure_option('rhsm_prefix', config_file) == '/rhsm' @@ -408,8 +366,6 @@ def test_positive_post_hypervisors(self, function_org, target_sat): :expectedresults: hypervisor/guest json can be posted and the task is success status - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -429,7 +385,7 @@ def test_positive_post_hypervisors(self, function_org, target_sat): @pytest.mark.tier2 def test_positive_foreman_packages_protection( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """foreman-protector should allow virt-who to be installed @@ -439,8 +395,6 @@ def test_positive_foreman_packages_protection( 1. virt-who packages can be installed 2. the virt-who plugin can be deployed successfully - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium @@ -448,18 +402,18 @@ def test_positive_foreman_packages_protection( :BZ: 1783987 """ virtwho_package_locked() - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, module_sca_manifest_org, form_data, target_sat + self, module_sca_manifest_org, form_data_cli, target_sat ): """Verify "hammer virt-who-config deploy hypervisor with special characters" @@ -467,8 +421,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -476,40 +428,44 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor-password'] = "Tes't" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + form_data_cli['hypervisor-password'] = "Tes't" + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)[ + 'general-information' + ] + assert virtwho_config_cli['status'] == 'No Report Yet' + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) assert deploy_status == 'Finished successfully' - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert ( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) # check the hypervisor password contains backtick - form_data['hypervisor-password'] = r"my\`password" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + form_data_cli['hypervisor-password'] = r"my\`password" + virtwho_config_cli = target_sat.cli.VirtWhoConfig.create(form_data_cli)[ + 'general-information' + ] + assert virtwho_config_cli['status'] == 'No Report Yet' + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) assert deploy_status == 'Finished successfully' - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert ( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config_cli['name']}) + assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data_cli['name'])) @pytest.mark.tier2 def test_positive_remove_env_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """remove option 'env=' from the virt-who configuration file and without any error @@ -519,32 +475,29 @@ def test_positive_remove_env_option( 1. the option "env=" should be removed from etc/virt-who.d/virt-who.conf 2. /var/log/messages should not display warning message - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1834897 :customerscenario: true """ - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], debug=True, org=module_sca_manifest_org.label ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/cli/test_hyperv.py b/tests/foreman/virtwho/cli/test_hyperv.py index 3696554af50..657ee3e04f6 100644 --- a/tests/foreman/virtwho/cli/test_hyperv.py +++ b/tests/foreman/virtwho/cli/test_hyperv.py @@ -4,121 +4,43 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, default_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor-server': settings.virtwho.hyperv.hypervisor_server, - 'organization-id': default_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor-password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: 7cc0ad4f-e185-4d63-a2f5-1cb0245faa6c - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['status'] - assert virt_who_instance == 'OK' - hosts = [ - (hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'), - (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - result = target_sat.cli.Host.subscription_attach( - {'host-id': host['id'], 'subscription-id': vdc_id} - ) - assert result.strip() == 'Subscription attached to the host successfully.' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify " hammer virt-who-config fetch" - - :id: 22dc8068-c843-4ca0-acbe-0b2aef8ece31 - - :expectedresults: Config can be created, fetch and deploy - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config['status'] == 'No Report Yet' - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + hypervisor_name, guest_name = deploy_type_cli + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @@ -144,7 +66,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -152,20 +74,18 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_hyperv_sca.py b/tests/foreman/virtwho/cli/test_hyperv_sca.py index 4fa1c0d3ee5..7c59213485d 100644 --- a/tests/foreman/virtwho/cli/test_hyperv_sca.py +++ b/tests/foreman/virtwho/cli/test_hyperv_sca.py @@ -4,92 +4,49 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor-server': settings.virtwho.hyperv.hypervisor_server, - 'organization-id': module_sca_manifest_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor-password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: ba51dd0e-39da-4afd-b7e1-d470082024ba - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - if deploy_type == "id": - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' + @pytest.mark.tier2 @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -97,19 +54,17 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_kubevirt.py b/tests/foreman/virtwho/cli/test_kubevirt.py index 8ab1fa93e5c..f29a22e4a16 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt.py +++ b/tests/foreman/virtwho/cli/test_kubevirt.py @@ -4,119 +4,43 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, default_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.kubevirt.hypervisor_type, - 'organization-id': default_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'kubeconfig-path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: d0b109f5-2699-43e4-a6cd-d682204d97a7 - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['status'] - assert virt_who_instance == 'OK' - hosts = [ - (hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'), - (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - result = target_sat.cli.Host.subscription_attach( - {'host-id': host['id'], 'subscription-id': vdc_id} - ) - assert result.strip() == 'Subscription attached to the host successfully.' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify " hammer virt-who-config fetch" - - :id: 273df8e0-5ef5-47d9-9567-543157be7dd8 - - :expectedresults: Config can be created, fetch and deploy - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config['status'] == 'No Report Yet' - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + hypervisor_name, guest_name = deploy_type_cli + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @@ -142,7 +66,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -150,20 +74,18 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_kubevirt_sca.py b/tests/foreman/virtwho/cli/test_kubevirt_sca.py index a22d69513e9..d682b5f9159 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/cli/test_kubevirt_sca.py @@ -4,107 +4,63 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.kubevirt.hypervisor_type, - 'organization-id': module_sca_manifest_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'kubeconfig-path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: e0162dba-a50f-4356-9dc2-c928a1bed15c - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - if deploy_type == "id": - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" :id: b60f449d-6698-4a3a-be07-7440c2d9ba20 :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_libvirt.py b/tests/foreman/virtwho/cli/test_libvirt.py index d8d2f7b2c19..70ac4056fa3 100644 --- a/tests/foreman/virtwho/cli/test_libvirt.py +++ b/tests/foreman/virtwho/cli/test_libvirt.py @@ -4,120 +4,43 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, default_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor-server': settings.virtwho.libvirt.hypervisor_server, - 'organization-id': default_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: e66bf88a-bd4e-409a-91a8-bc5e005d95dd - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['status'] - assert virt_who_instance == 'OK' - hosts = [ - (hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'), - (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - result = target_sat.cli.Host.subscription_attach( - {'host-id': host['id'], 'subscription-id': vdc_id} - ) - assert result.strip() == 'Subscription attached to the host successfully.' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify " hammer virt-who-config fetch" - - :id: bd5c52ab-3dbd-4cf1-9837-b8eb6233f1cd - - :expectedresults: Config can be created, fetch and deploy - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config['status'] == 'No Report Yet' - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + hypervisor_name, guest_name = deploy_type_cli + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @@ -143,7 +66,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -151,20 +74,18 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_libvirt_sca.py b/tests/foreman/virtwho/cli/test_libvirt_sca.py index a4d0c4d287d..2bfeda30801 100644 --- a/tests/foreman/virtwho/cli/test_libvirt_sca.py +++ b/tests/foreman/virtwho/cli/test_libvirt_sca.py @@ -4,89 +4,46 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor-server': settings.virtwho.libvirt.hypervisor_server, - 'organization-id': module_sca_manifest_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.libvirt.hypervisor_username, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify " hammer virt-who-config deploy" + """Verify " hammer virt-who-config deploy & fetch" :id: 53143fc3-97e0-4403-8114-4d7f20fde98e - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - if deploy_type == "id": - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -94,19 +51,17 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/cli/test_nutanix.py b/tests/foreman/virtwho/cli/test_nutanix.py index e312f98fc86..b5478912ab7 100644 --- a/tests/foreman/virtwho/cli/test_nutanix.py +++ b/tests/foreman/virtwho/cli/test_nutanix.py @@ -4,125 +4,46 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import check_message_in_rhsm_log -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_hypervisor_ahv_mapping - - -@pytest.fixture() -def form_data(target_sat, default_org): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor-server': settings.virtwho.ahv.hypervisor_server, - 'organization-id': default_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor-password': settings.virtwho.ahv.hypervisor_password, - 'prism-flavor': settings.virtwho.ahv.prism_flavor, - 'ahv-internal-debug': 'false', - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) +from robottelo.utils.virtwho import ( + check_message_in_rhsm_log, + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_option, + get_hypervisor_ahv_mapping, +) class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id( - self, default_org, form_data, virtwho_config, target_sat + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify "hammer virt-who-config deploy" + """Verify "hammer virt-who-config deploy & fetch" :id: 129d8e57-b4fc-4d95-ad33-5aa6ec6fb146 - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config['status'] == 'No Report Yet' - command = get_configure_command(virtwho_config['id'], default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ - 'general-information' - ]['status'] - assert virt_who_instance == 'OK' - hosts = [ - (hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'), - (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - result = target_sat.cli.Host.subscription_attach( - {'host-id': host['id'], 'subscription-id': vdc_id} - ) - assert result.strip() == 'Subscription attached to the host successfully.' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "hammer virt-who-config fetch" - - :id: d707fac0-f2b1-4493-b083-cf1edc231691 - - :expectedresults: Config can be created, fetch and deploy - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + hypervisor_name, guest_name = deploy_type_cli + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @@ -148,7 +69,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -156,28 +77,26 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ values = ['uuid', 'hostname'] for value in values: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=default_org.label + command, form_data_cli['hypervisor-type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, default_org, form_data, target_sat, deploy_type + self, default_org, form_data_cli, target_sat, deploy_type ): """Verify "hammer virt-who-config deploy" on nutanix prism central mode @@ -187,24 +106,22 @@ def test_positive_prism_central_deploy_configure_by_id_script( Config can be created and deployed The prism_central has been set in /etc/virt-who.d/vir-who.conf file - :CaseLevel: Integration - :CaseImportance: High """ - form_data['prism-flavor'] = "central" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] + form_data_cli['prism-flavor'] = "central" + virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] assert virtwho_config['status'] == 'No Report Yet' if deploy_type == "id": command = get_configure_command(virtwho_config['id'], default_org.name) hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label + command, form_data_cli['hypervisor-type'], debug=True, org=default_org.label ) elif deploy_type == "script": script = target_sat.cli.VirtWhoConfig.fetch( {'id': virtwho_config['id']}, output_format='base' ) hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=default_org.label + script, form_data_cli['hypervisor-type'], debug=True, org=default_org.label ) # Check the option "prism_central=true" should be set in etc/virt-who.d/virt-who.conf config_file = get_configure_file(virtwho_config['id']) @@ -235,7 +152,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_central_prism_central_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify prism_central option by hammer virt-who-config update" @@ -243,25 +160,27 @@ def test_positive_prism_central_prism_central_option( :expectedresults: prism_central option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ value = 'central' result = target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'prism-flavor': value} + {'id': virtwho_config_cli['id'], 'prism-flavor': value} + ) + assert ( + result[0]['message'] == f"Virt Who configuration [{virtwho_config_cli['name']}] updated" ) - assert result[0]['message'] == f"Virt Who configuration [{virtwho_config['name']}] updated" - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['general-information']['ahv-prism-flavor'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], default_org.name) - deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) + deploy_configure_by_command( + command, form_data_cli['hypervisor-type'], org=default_org.label + ) assert get_configure_option("prism_central", config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -275,27 +194,26 @@ def test_positive_ahv_internal_debug_option( 5. message Host UUID {system_uuid} found for VM: {guest_uuid} exist in rhsm.log 6. ahv_internal_debug bas been set to true in virt-who-config-X.conf 7. warning message does not exist in log file /var/log/rhsm/rhsm.log - :CaseLevel: Integration :CaseImportance: Medium :BZ: 2141719 + :customerscenario: true """ - command = get_configure_command(virtwho_config['id'], default_org.name) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label + command, form_data_cli['hypervisor-type'], debug=True, org=default_org.label ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['general-information']['enable-ahv-debug'] == 'no' # ahv_internal_debug does not set in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message @@ -303,18 +221,22 @@ def test_positive_ahv_internal_debug_option( # Update ahv_internal_debug option to true value = 'true' result = target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'ahv-internal-debug': value} + {'id': virtwho_config_cli['id'], 'ahv-internal-debug': value} ) - assert result[0]['message'] == f"Virt Who configuration [{virtwho_config['name']}] updated" - command = get_configure_command(virtwho_config['id'], default_org.name) + assert ( + result[0]['message'] == f"Virt Who configuration [{virtwho_config_cli['name']}] updated" + ) + command = get_configure_command(virtwho_config_cli['id'], default_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=default_org.label + command, form_data_cli['hypervisor-type'], debug=True, org=default_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_cli['hypervisor-type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor-type']) == 'Host UUID found for VM' - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['general-information']['enable-ahv-debug'] == 'yes' # ahv_internal_debug bas been set to true in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config['id']) + config_file = get_configure_file(virtwho_config_cli['id']) assert get_configure_option("ahv_internal_debug", config_file) == 'true' # check message does not exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' diff --git a/tests/foreman/virtwho/cli/test_nutanix_sca.py b/tests/foreman/virtwho/cli/test_nutanix_sca.py index 84db5ce8733..360aac0f676 100644 --- a/tests/foreman/virtwho/cli/test_nutanix_sca.py +++ b/tests/foreman/virtwho/cli/test_nutanix_sca.py @@ -4,93 +4,49 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :Team: Phoenix -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - sca_form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor-id': 'hostname', - 'hypervisor-type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor-server': settings.virtwho.ahv.hypervisor_server, - 'organization-id': module_sca_manifest_org.id, - 'filtering-mode': 'none', - 'satellite-url': target_sat.hostname, - 'hypervisor-username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor-password': settings.virtwho.ahv.hypervisor_password, - 'prism-flavor': settings.virtwho.ahv.prism_flavor, - } - return sca_form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] - yield virtwho_config - target_sat.cli.VirtWhoConfig.delete({'name': virtwho_config['name']}) - assert not target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_option, +) class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_cli', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_cli, target_sat, deploy_type_cli ): - """Verify "hammer virt-who-config deploy" + """Verify "hammer virt-who-config deploy & fetch" :id: 71750104-b436-4ad4-9b6b-6f0fe8c3ee4c - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration + :expectedresults: + 1. Config can be created and deployed + 2. Config can be created, fetch and deploy :CaseImportance: High """ - assert virtwho_config['status'] == 'No Report Yet' - if deploy_type == "id": - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = target_sat.cli.VirtWhoConfig.fetch( - {'id': virtwho_config['id']}, output_format='base' - ) - deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label - ) - virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[ + assert virtwho_config_cli['status'] == 'No Report Yet' + virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']})[ 'general-information' ]['status'] assert virt_who_instance == 'OK' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify hypervisor_id option by hammer virt-who-config update" @@ -98,54 +54,57 @@ def test_positive_hypervisor_id_option( :expectedresults: hypervisor_id option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ for value in ['uuid', 'hostname']: target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'hypervisor-id': value} + {'id': virtwho_config_cli['id'], 'hypervisor-id': value} ) - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['connection']['hypervisor-id'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, target_sat, deploy_type + self, module_sca_manifest_org, target_sat, form_data_cli, deploy_type ): - """Verify "hammer virt-who-config deploy" on nutanix prism central mode + """Verify "hammer virt-who-config deploy & fetch" on nutanix prism central mode :id: 96fd691f-5b62-469c-adc7-f2739ddf4a62 :expectedresults: 1. Config can be created and deployed 2. The prism_central has been set in /etc/virt-who.d/vir-who.conf file - - :CaseLevel: Integration + 3. Config can be created, fetch and deploy :CaseImportance: High """ - form_data['prism-flavor'] = "central" - virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] + form_data_cli['prism-flavor'] = "central" + virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data_cli)['general-information'] assert virtwho_config['status'] == 'No Report Yet' if deploy_type == "id": command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label + command, + form_data_cli['hypervisor-type'], + debug=True, + org=module_sca_manifest_org.label, ) elif deploy_type == "script": script = target_sat.cli.VirtWhoConfig.fetch( {'id': virtwho_config['id']}, output_format='base' ) deploy_configure_by_script( - script, form_data['hypervisor-type'], debug=True, org=module_sca_manifest_org.label + script, + form_data_cli['hypervisor-type'], + debug=True, + org=module_sca_manifest_org.label, ) # Check the option "prism_central=true" should be set in etc/virt-who.d/virt-who.conf config_file = get_configure_file(virtwho_config['id']) @@ -157,7 +116,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_element_prism_central_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_cli, virtwho_config_cli, target_sat ): """Verify prism_central option by hammer virt-who-config update" @@ -165,20 +124,20 @@ def test_positive_prism_element_prism_central_option( :expectedresults: prism_central option can be updated. - :CaseLevel: Integration - :CaseImportance: Medium """ value = 'central' result = target_sat.cli.VirtWhoConfig.update( - {'id': virtwho_config['id'], 'prism-flavor': value} + {'id': virtwho_config_cli['id'], 'prism-flavor': value} + ) + assert ( + result[0]['message'] == f"Virt Who configuration [{virtwho_config_cli['name']}] updated" ) - assert result[0]['message'] == f"Virt Who configuration [{virtwho_config['name']}] updated" - result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']}) + result = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config_cli['id']}) assert result['general-information']['ahv-prism-flavor'] == value - config_file = get_configure_file(virtwho_config['id']) - command = get_configure_command(virtwho_config['id'], module_sca_manifest_org.name) + config_file = get_configure_file(virtwho_config_cli['id']) + command = get_configure_command(virtwho_config_cli['id'], module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor-type'], org=module_sca_manifest_org.label + command, form_data_cli['hypervisor-type'], org=module_sca_manifest_org.label ) assert get_configure_option("prism_central", config_file) == 'true' diff --git a/tests/foreman/virtwho/conftest.py b/tests/foreman/virtwho/conftest.py index 81709b551ae..a83875fe130 100644 --- a/tests/foreman/virtwho/conftest.py +++ b/tests/foreman/virtwho/conftest.py @@ -1,6 +1,5 @@ -import pytest -from airgun.session import Session from fauxfactory import gen_string +import pytest from requests.exceptions import HTTPError from robottelo.logging import logger @@ -36,8 +35,8 @@ def module_user(request, module_target_sat, default_org, default_location): logger.warning('Unable to delete session user: %s', str(err)) -@pytest.fixture() -def session(test_name, module_user): +@pytest.fixture +def session(test_name, module_user, module_target_sat): """Session fixture which automatically initializes (but does not start!) airgun UI session and correctly passes current test name to it. Uses shared module user credentials to log in. @@ -49,9 +48,11 @@ def test_foo(session): with session: # your ui test steps here session.architecture.create({'name': 'bar'}) - """ - return Session(test_name, module_user.login, module_user.password) + with module_target_sat.ui_session( + test_name, module_user.login, module_user.password + ) as session: + return session @pytest.fixture(scope='module') @@ -84,8 +85,8 @@ def module_user_sca(request, module_target_sat, module_org, module_location): logger.warning('Unable to delete session user: %s', str(err)) -@pytest.fixture() -def session_sca(test_name, module_user_sca): +@pytest.fixture +def session_sca(test_name, module_user_sca, module_target_sat): """Session fixture which automatically initializes (but does not start!) airgun UI session and correctly passes current test name to it. Uses shared module user credentials to log in. @@ -97,6 +98,8 @@ def test_foo(session): with session: # your ui test steps here session.architecture.create({'name': 'bar'}) - """ - return Session(test_name, module_user_sca.login, module_user_sca.password) + with module_target_sat.ui_session( + test_name, module_user_sca.login, module_user_sca.password + ) as session_sca: + return session_sca diff --git a/tests/foreman/virtwho/ui/test_esx.py b/tests/foreman/virtwho/ui/test_esx.py index ae69655e3e2..efd46e6deba 100644 --- a/tests/foreman/virtwho/ui/test_esx.py +++ b/tests/foreman/virtwho/ui/test_esx.py @@ -4,146 +4,79 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ from datetime import datetime -import pytest -from airgun.session import Session from fauxfactory import gen_string +import pytest from robottelo.config import settings from robottelo.utils.datafactory import valid_emails_list -from robottelo.utils.virtwho import add_configure_option -from robottelo.utils.virtwho import create_http_proxy -from robottelo.utils.virtwho import delete_configure_option -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_guest_info -from robottelo.utils.virtwho import get_virtwho_status -from robottelo.utils.virtwho import restart_virtwho_service -from robottelo.utils.virtwho import update_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.esx.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - -@pytest.fixture(autouse=True) -def clean_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('clean_host') +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + add_configure_option, + create_http_proxy, + delete_configure_option, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, + get_virtwho_status, + restart_virtwho_service, + update_configure_option, +) + + +@pytest.mark.usefixtures('delete_host') class TestVirtwhoConfigforEsx: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, session, form_data): - """Verify configure created and deployed with id. + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, org_session, form_data_ui, deploy_type_ui + ): + """Verify configure created and deployed with id|script. :id: 44f93ec8-a59a-42a4-ab30-edc554b022b2 :expectedresults: - 1. Config can be created and deployed by command + 1. Config can be created and deployed by command|script 2. No error msg in /var/log/rhsm/rhsm.log 3. Report is sent to satellite 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, virtwho_config, session, form_data - ): - """Verify configure created and deployed with script. - - :id: d64332fb-a6e0-4864-9f8b-2406223fcdcc - - :expectedresults: - 1. Config can be created and deployed by script - 2. No error msg in /var/log/rhsm/rhsm.log - 3. Report is sent to satellite - 4. Virtual sku can be generated and attached - 5. Config can be deleted - - :CaseLevel: Integration - - :CaseImportance: High - """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] - vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' - vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_debug_option(self, default_org, virtwho_config, session, form_data): + def test_positive_debug_option(self, default_org, virtwho_config_ui, org_session, form_data_ui): """Verify debug checkbox and the value changes of VIRTWHO_DEBUG :id: adb435c4-d02b-47b6-89f5-dce9a4ff7939 @@ -152,27 +85,27 @@ def test_positive_debug_option(self, default_org, virtwho_config, session, form_ 1. if debug is checked, VIRTWHO_DEBUG=1 in /etc/sysconfig/virt-who 2. if debug is unchecked, VIRTWHO_DEBUG=0 in /etc/sysconfig/virt-who - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '1' - session.virtwho_configure.edit(name, {'debug': False}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'debug': False}) + results = org_session.virtwho_configure.read(name) assert results['overview']['debug'] is False deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '0' @pytest.mark.tier2 - def test_positive_interval_option(self, default_org, virtwho_config, session, form_data): + def test_positive_interval_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify interval dropdown options and the value changes of VIRTWHO_INTERVAL. :id: 731f8361-38d4-40b9-9530-8d785d61eaab @@ -181,11 +114,9 @@ def test_positive_interval_option(self, default_org, virtwho_config, session, fo VIRTWHO_INTERVAL can be changed in /etc/sysconfig/virt-who if the dropdown option is selected to Every 2/4/8/12/24 hours, Every 2/3 days. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) intervals = { @@ -199,16 +130,18 @@ def test_positive_interval_option(self, default_org, virtwho_config, session, fo 'Every 3 days': '259200', } for option, value in sorted(intervals.items(), key=lambda item: int(item[1])): - session.virtwho_configure.edit(name, {'interval': option}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'interval': option}) + results = org_session.virtwho_configure.read(name) assert results['overview']['interval'] == option deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: cc494bd9-51d9-452a-bfa9-5cdcafef5197 @@ -217,27 +150,27 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) # esx and rhevm support hwuuid option values = ['uuid', 'hostname', 'hwuuid'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 - def test_positive_filtering_option(self, default_org, virtwho_config, session, form_data): + def test_positive_filtering_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Filtering dropdown options. :id: e17dda14-79cd-4cd2-8f29-60970b24a905 @@ -246,13 +179,11 @@ def test_positive_filtering_option(self, default_org, virtwho_config, session, f 1. if filtering is selected to Whitelist, 'Filter hosts' can be set. 2. if filtering is selected to Blacklist, 'Exclude hosts' can be set. - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1735670 """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) @@ -263,28 +194,30 @@ def test_positive_filtering_option(self, default_org, virtwho_config, session, f whitelist['filtering_content.filter_host_parents'] = regex blacklist['filtering_content.exclude_host_parents'] = regex # Update Whitelist and check the result - session.virtwho_configure.edit(name, whitelist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, whitelist) + results = org_session.virtwho_configure.read(name) assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert regex == get_configure_option('filter_hosts', config_file) assert regex == get_configure_option('filter_host_parents', config_file) # Update Blacklist and check the result - session.virtwho_configure.edit(name, blacklist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) assert results['overview']['exclude_hosts'] == regex assert results['overview']['exclude_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert regex == get_configure_option('exclude_hosts', config_file) assert regex == get_configure_option('exclude_host_parents', config_file) @pytest.mark.tier2 - def test_positive_proxy_option(self, default_org, virtwho_config, session, form_data): + def test_positive_proxy_option( + self, default_org, default_location, virtwho_config_ui, org_session, form_data_ui + ): """Verify 'HTTP Proxy' and 'Ignore Proxy' options. :id: 6659d577-0135-4bf0-81af-14b930011536 @@ -292,39 +225,39 @@ def test_positive_proxy_option(self, default_org, virtwho_config, session, form_ :expectedresults: http_proxy/https_proxy and NO_PROXY will be setting in /etc/sysconfig/virt-who. - :CaseLevel: Integration - :CaseImportance: Medium """ - https_proxy, https_proxy_name, https_proxy_id = create_http_proxy(org=default_org) + https_proxy, https_proxy_name, https_proxy_id = create_http_proxy( + org=default_org, location=default_location + ) http_proxy, http_proxy_name, http_proxy_id = create_http_proxy( - http_type='http', org=default_org + http_type='http', org=default_org, location=default_location ) - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) no_proxy = 'test.satellite.com' # Check the https proxy and No_PROXY settings - session.virtwho_configure.edit(name, {'proxy': https_proxy, 'no_proxy': no_proxy}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'proxy': https_proxy, 'no_proxy': no_proxy}) + results = org_session.virtwho_configure.read(name) assert results['overview']['proxy'] == https_proxy assert results['overview']['no_proxy'] == no_proxy deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check the http proxy setting - session.virtwho_configure.edit(name, {'proxy': http_proxy}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'proxy': http_proxy}) + results = org_session.virtwho_configure.read(name) assert results['overview']['proxy'] == http_proxy deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy @pytest.mark.tier2 - def test_positive_virtwho_roles(self, session): + def test_positive_virtwho_roles(self, org_session): """Verify the default roles for virtwho configure :id: cd6a5363-f9ba-4b52-892c-905634168fc5 @@ -332,8 +265,6 @@ def test_positive_virtwho_roles(self, session): :expectedresults: 'Virt-who Manager', 'Virt-who Reporter', 'Virt-who Viewer' existing - :CaseLevel: Integration - :CaseImportance: Low """ roles = { @@ -352,19 +283,19 @@ def test_positive_virtwho_roles(self, session): }, 'Virt-who Viewer': {'Satellite virt who configure/config': ['view_virt_who_config']}, } - with session: + with org_session: for role_name, role_filters in roles.items(): - assert session.role.search(role_name)[0]['Name'] == role_name - assigned_permissions = session.filter.read_permissions(role_name) + assert org_session.role.search(role_name)[0]['Name'] == role_name + assigned_permissions = org_session.filter.read_permissions(role_name) assert sorted(assigned_permissions) == sorted(role_filters) @pytest.mark.tier2 - def test_positive_virtwho_configs_widget(self, default_org, session, form_data): + def test_positive_virtwho_configs_widget(self, default_org, org_session, form_data_ui): """Check if Virt-who Configurations Status Widget is working in the Dashboard UI :id: 5d61ce00-a640-4823-89d4-7b1d02b50ea6 - :Steps: + :steps: 1. Create a Virt-who Configuration 2. Navigate Monitor -> Dashboard @@ -372,44 +303,44 @@ def test_positive_virtwho_configs_widget(self, default_org, session, form_data): :expectedresults: The widget is updated with all details. - :CaseLevel: Integration - :CaseImportance: Low """ org_name = gen_string('alpha') name = gen_string('alpha') - form_data['name'] = name - with session: - session.organization.create({'name': org_name}) - session.organization.select(org_name) - session.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.organization.create({'name': org_name}) + org_session.organization.select(org_name) + org_session.virtwho_configure.create(form_data_ui) expected_values = [ {'Configuration Status': 'No Reports', 'Count': '1'}, {'Configuration Status': 'No Change', 'Count': '0'}, {'Configuration Status': 'OK', 'Count': '0'}, {'Configuration Status': 'Total Configurations', 'Count': '1'}, ] - values = session.dashboard.read('VirtWhoConfigStatus') + values = org_session.dashboard.read('VirtWhoConfigStatus') assert values['config_status'] == expected_values assert values['latest_config'] == 'No configuration found' # Check the 'Status' changed after deployed the virt-who config config_id = get_configure_id(name) config_command = get_configure_command(config_id, org_name) - deploy_configure_by_command(config_command, form_data['hypervisor_type'], org=org_name) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' + deploy_configure_by_command( + config_command, form_data_ui['hypervisor_type'], org=org_name + ) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' expected_values = [ {'Configuration Status': 'No Reports', 'Count': '0'}, {'Configuration Status': 'No Change', 'Count': '0'}, {'Configuration Status': 'OK', 'Count': '1'}, {'Configuration Status': 'Total Configurations', 'Count': '1'}, ] - values = session.dashboard.read('VirtWhoConfigStatus') + values = org_session.dashboard.read('VirtWhoConfigStatus') assert values['config_status'] == expected_values assert values['latest_config'] == 'No configuration found' - session.organization.select("Default Organization") + org_session.organization.select("Default Organization") @pytest.mark.tier2 - def test_positive_delete_configure(self, default_org, session, form_data): + def test_positive_delete_configure(self, default_org, org_session, form_data_ui): """Verify when a config is deleted the associated user is deleted. :id: 0e66dcf6-dc64-4fb2-b8a9-518f5adfa800 @@ -425,22 +356,24 @@ def test_positive_delete_configure(self, default_org, session, form_data): """ name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) restart_virtwho_service() assert get_virtwho_status() == 'logerror' @pytest.mark.tier2 - def test_positive_virtwho_reporter_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_reporter_role( + self, default_org, org_session, test_name, form_data_ui, target_sat + ): """Verify the virt-who reporter role can TRULY work. :id: cd235ab0-d89c-464b-98d6-9d090ac40d8f @@ -453,9 +386,9 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -465,14 +398,14 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Update the virt-who config file config_id = get_configure_id(config_name) config_file = get_configure_file(config_id) @@ -482,19 +415,21 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f restart_virtwho_service() assert get_virtwho_status() == 'logerror' # Check the permissioin of Virt-who Reporter - session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) - assert session.user.search(username)[0]['Username'] == username - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) + assert org_session.user.search(username)[0]['Username'] == username + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Reporter'] restart_virtwho_service() assert get_virtwho_status() == 'running' - with Session(test_name, username, password) as newsession: + with target_sat.ui_session(test_name, username, password) as newsession: assert not newsession.virtwho_configure.check_create_permission()['can_view'] - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_virtwho_viewer_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_viewer_role( + self, default_org, org_session, test_name, form_data_ui, target_sat + ): """Verify the virt-who viewer role can TRULY work. :id: bf3be2e4-3853-41cc-9b3e-c8677f0b8c5f @@ -507,9 +442,9 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -519,17 +454,17 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Viewer - session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Viewer'] # Update the virt-who config file config_id = get_configure_id(config_name) @@ -539,7 +474,7 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for add_configure_option('rhsm_password', password, config_file) restart_virtwho_service() assert get_virtwho_status() == 'logerror' - with Session(test_name, username, password) as newsession: + with target_sat.ui_session(test_name, username, password) as newsession: create_permission = newsession.virtwho_configure.check_create_permission() update_permission = newsession.virtwho_configure.check_update_permission( config_name @@ -550,11 +485,13 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for assert not update_permission['can_edit'] newsession.virtwho_configure.read(config_name) # Delete the created user - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_virtwho_manager_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_manager_role( + self, default_org, org_session, test_name, form_data_ui, target_sat + ): """Verify the virt-who manager role can TRULY work. :id: a72023fb-7b23-4582-9adc-c5227dc7859c @@ -566,9 +503,9 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -578,28 +515,28 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Manager - session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Manager'] - with Session(test_name, username, password) as newsession: + with target_sat.ui_session(test_name, username, password) as newsession: # create_virt_who_config new_virt_who_name = gen_string('alpha') - form_data['name'] = new_virt_who_name - newsession.virtwho_configure.create(form_data) + form_data_ui['name'] = new_virt_who_name + newsession.virtwho_configure.create(form_data_ui) # view_virt_who_config values = newsession.virtwho_configure.read(new_virt_who_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) assert newsession.virtwho_configure.search(new_virt_who_name)[0]['Status'] == 'ok' # edit_virt_who_config @@ -610,38 +547,40 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo newsession.virtwho_configure.delete(modify_name) assert not newsession.virtwho_configure.search(modify_name) # Delete the created user - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_overview_label_name(self, default_org, form_data, session): + def test_positive_overview_label_name( + self, default_org, default_location, form_data_ui, org_session + ): """Verify the label name on virt-who config Overview Page. :id: 21df8175-bb41-422e-a263-8677bc3a9565 :BZ: 1649928 - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium """ name = gen_string('alpha') - form_data['name'] = name - hypervisor_type = form_data['hypervisor_type'] - http_proxy_url, proxy_name, proxy_id = create_http_proxy(org=default_org) - form_data['proxy'] = http_proxy_url - form_data['no_proxy'] = 'test.satellite.com' + form_data_ui['name'] = name + hypervisor_type = form_data_ui['hypervisor_type'] + http_proxy_url, proxy_name, proxy_id = create_http_proxy( + org=default_org, location=default_location + ) + form_data_ui['proxy'] = http_proxy_url + form_data_ui['no_proxy'] = 'test.satellite.com' regex = '.*redhat.com' whitelist = {'filtering': 'Whitelist', 'filtering_content.filter_hosts': regex} blacklist = {'filtering': 'Blacklist', 'filtering_content.exclude_hosts': regex} if hypervisor_type == 'esx': whitelist['filtering_content.filter_host_parents'] = regex blacklist['filtering_content.exclude_host_parents'] = regex - form_data = dict(form_data, **whitelist) - with session: - session.virtwho_configure.create(form_data) + form_data = dict(form_data_ui, **whitelist) + with org_session: + org_session.virtwho_configure.create(form_data) fields = { 'status_label': 'Status', 'hypervisor_type_label': 'Hypervisor Type', @@ -662,11 +601,11 @@ def test_positive_overview_label_name(self, default_org, form_data, session): fields['kubeconfig_path_label'] = 'Kubeconfig Path' if hypervisor_type == 'esx': fields['filter_host_parents_label'] = 'Filter Host Parents' - results = session.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) for key, value in fields.items(): assert results['overview'][key] == value - session.virtwho_configure.edit(name, blacklist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) del fields['filter_hosts_label'] if hypervisor_type == 'esx': del fields['filter_host_parents_label'] @@ -676,7 +615,9 @@ def test_positive_overview_label_name(self, default_org, form_data, session): assert results['overview'][key] == value @pytest.mark.tier2 - def test_positive_last_checkin_status(self, default_org, virtwho_config, form_data, session): + def test_positive_last_checkin_status( + self, default_org, virtwho_config_ui, form_data_ui, org_session + ): """Verify the Last Checkin status on Content Hosts Page. :id: 7448d482-d05c-4727-8980-176586e9e4a7 @@ -685,25 +626,23 @@ def test_positive_last_checkin_status(self, default_org, virtwho_config, form_da :BZ: 1652323 - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium """ - name = form_data['name'] - values = session.virtwho_configure.read(name, widget_names='deploy.command') + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name, widget_names='deploy.command') command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - time_now = session.browser.get_client_datetime() - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - checkin_time = session.contenthost.search(hypervisor_name)[0]['Last Checkin'] + time_now = org_session.browser.get_client_datetime() + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + checkin_time = org_session.contenthost.search(hypervisor_name)[0]['Last Checkin'] # 10 mins margin to check the Last Checkin time assert ( abs( - datetime.strptime(checkin_time, "%B %d, %Y, %I:%M %p") + datetime.strptime(checkin_time, "%B %d, %Y at %I:%M %p") .replace(year=datetime.utcnow().year) .timestamp() - time_now.timestamp() @@ -713,7 +652,7 @@ def test_positive_last_checkin_status(self, default_org, virtwho_config, form_da @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, default_org, form_data, target_sat, session + self, default_org, form_data_ui, target_sat, org_session ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -721,8 +660,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -730,12 +667,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name - with session: + form_data_ui['name'] = name + with org_session: # check the hypervisor password contains single quotes - form_data['hypervisor_content.password'] = "Tes't" - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "Tes't" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -746,12 +683,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) # check the hypervisor password contains backtick - form_data['hypervisor_content.password'] = "my`password" - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "my`password" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -762,12 +699,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_remove_env_option( - self, default_org, virtwho_config, form_data, target_sat, session + self, default_org, virtwho_config_ui, form_data_ui, target_sat, org_session ): """remove option 'env=' from the virt-who configuration file and without any error @@ -777,21 +714,19 @@ def test_positive_remove_env_option( the option "env=" should be removed from etc/virt-who.d/virt-who.conf /var/log/messages should not display warning message - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1834897 :customerscenario: true """ - name = form_data['name'] - values = session.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_id = get_configure_id(name) @@ -799,10 +734,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/ui/test_esx_sca.py b/tests/foreman/virtwho/ui/test_esx_sca.py index 90dc761cc2f..a4c45dd4960 100644 --- a/tests/foreman/virtwho/ui/test_esx_sca.py +++ b/tests/foreman/virtwho/ui/test_esx_sca.py @@ -4,80 +4,42 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ from datetime import datetime -import pytest from airgun.session import Session from fauxfactory import gen_string +import pytest from robottelo.config import settings from robottelo.utils.datafactory import valid_emails_list -from robottelo.utils.virtwho import add_configure_option -from robottelo.utils.virtwho import delete_configure_option -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_command_check -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import ETC_VIRTWHO_CONFIG -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_guest_info -from robottelo.utils.virtwho import get_virtwho_status -from robottelo.utils.virtwho import restart_virtwho_service -from robottelo.utils.virtwho import update_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.esx.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture(autouse=True) -def clean_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - -@pytest.mark.usefixtures('clean_host') +from robottelo.utils.virtwho import ( + ETC_VIRTWHO_CONFIG, + add_configure_option, + delete_configure_option, + deploy_configure_by_command, + deploy_configure_by_command_check, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, + get_virtwho_status, + restart_virtwho_service, + update_configure_option, +) + + +@pytest.mark.usefixtures('delete_host') class TestVirtwhoConfigforEsx: @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -90,33 +52,13 @@ def test_positive_deploy_configure_by_id_script( 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_debug_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify debug checkbox and the value changes of VIRTWHO_DEBUG @@ -126,28 +68,26 @@ def test_positive_debug_option( 1. if debug is checked, VIRTWHO_DEBUG=1 in /etc/sysconfig/virt-who 2. if debug is unchecked, VIRTWHO_DEBUG=0 in /etc/sysconfig/virt-who - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '1' - session_sca.virtwho_configure.edit(name, {'debug': False}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'debug': False}) + results = org_session.virtwho_configure.read(name) assert results['overview']['debug'] is False deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '0' @pytest.mark.tier2 def test_positive_interval_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify interval dropdown options and the value changes of VIRTWHO_INTERVAL. @@ -157,11 +97,9 @@ def test_positive_interval_option( VIRTWHO_INTERVAL can be changed in /etc/sysconfig/virt-who if the dropdown option is selected to Every 2/4/8/12/24 hours, Every 2/3 days. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) intervals = { @@ -175,17 +113,17 @@ def test_positive_interval_option( 'Every 3 days': '259200', } for option, value in intervals.items(): - session_sca.virtwho_configure.edit(name, {'interval': option}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'interval': option}) + results = org_session.virtwho_configure.read(name) assert results['overview']['interval'] == option deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -195,21 +133,19 @@ def test_positive_hypervisor_id_option( hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) # esx and rhevm support hwuuid option for value in ['uuid', 'hostname', 'hwuuid']: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @@ -217,7 +153,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.parametrize('filter_type', ['whitelist', 'blacklist']) @pytest.mark.parametrize('option_type', ['edit', 'create']) def test_positive_filtering_option( - self, module_sca_manifest_org, session_sca, form_data, filter_type, option_type + self, module_sca_manifest_org, org_session, form_data_ui, filter_type, option_type ): """Verify Filtering dropdown options. @@ -232,7 +168,6 @@ def test_positive_filtering_option( 'Filter hosts' can be set. 4. Create virtwho config if filtering is selected to Blacklist, 'Exclude hosts' can be set. - :CaseLevel: Integration :CaseImportance: Medium @@ -241,11 +176,11 @@ def test_positive_filtering_option( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name + form_data_ui['name'] = name regex = '.*redhat.com' - with session_sca: + with org_session: if option_type == "edit": - session_sca.virtwho_configure.create(form_data) + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) @@ -254,19 +189,21 @@ def test_positive_filtering_option( # esx support filter-host-parents and exclude-host-parents options whitelist['filtering_content.filter_host_parents'] = regex # Update Whitelist and check the result - session_sca.virtwho_configure.edit(name, whitelist) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, whitelist) + results = org_session.virtwho_configure.read(name) assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex elif filter_type == "blacklist": blacklist = {'filtering': 'Blacklist', 'filtering_content.exclude_hosts': regex} blacklist['filtering_content.exclude_host_parents'] = regex - session_sca.virtwho_configure.edit(name, blacklist) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) assert results['overview']['exclude_hosts'] == regex assert results['overview']['exclude_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, + form_data_ui['hypervisor_type'], + org=module_sca_manifest_org.label, ) if filter_type == "whitelist": assert regex == get_configure_option('filter_hosts', config_file) @@ -274,25 +211,25 @@ def test_positive_filtering_option( elif filter_type == "blacklist": assert regex == get_configure_option('exclude_hosts', config_file) assert regex == get_configure_option('exclude_host_parents', config_file) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) elif option_type == "create": if filter_type == "whitelist": - form_data['filtering'] = "Whitelist" - form_data['filtering_content.filter_hosts'] = regex - form_data['filtering_content.filter_host_parents'] = regex + form_data_ui['filtering'] = "Whitelist" + form_data_ui['filtering_content.filter_hosts'] = regex + form_data_ui['filtering_content.filter_host_parents'] = regex elif filter_type == "blacklist": - form_data['filtering'] = "Blacklist" - form_data['filtering_content.exclude_hosts'] = regex - form_data['filtering_content.exclude_host_parents'] = regex - session_sca.virtwho_configure.create(form_data) + form_data_ui['filtering'] = "Blacklist" + form_data_ui['filtering_content.exclude_hosts'] = regex + form_data_ui['filtering_content.exclude_host_parents'] = regex + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(config_id) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) if filter_type == "whitelist": assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex @@ -306,7 +243,7 @@ def test_positive_filtering_option( @pytest.mark.tier2 def test_positive_last_checkin_status( - self, module_sca_manifest_org, virtwho_config, form_data, session_sca + self, module_sca_manifest_org, virtwho_config_ui, form_data_ui, org_session ): """Verify the Last Checkin status on Content Hosts Page. @@ -316,25 +253,23 @@ def test_positive_last_checkin_status( :BZ: 1652323 - :CaseLevel: Integration - :customerscenario: true :CaseImportance: Medium """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name, widget_names='deploy.command') + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name, widget_names='deploy.command') command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - time_now = session_sca.browser.get_client_datetime() - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - checkin_time = session_sca.contenthost.search(hypervisor_name)[0]['Last Checkin'] + time_now = org_session.browser.get_client_datetime() + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + checkin_time = org_session.contenthost.search(hypervisor_name)[0]['Last Checkin'] # 10 mins margin to check the Last Checkin time assert ( abs( - datetime.strptime(checkin_time, "%B %d, %Y, %I:%M %p") + datetime.strptime(checkin_time, "%B %d, %Y at %I:%M %p") .replace(year=datetime.utcnow().year) .timestamp() - time_now.timestamp() @@ -344,7 +279,7 @@ def test_positive_last_checkin_status( @pytest.mark.tier2 def test_positive_remove_env_option( - self, module_sca_manifest_org, virtwho_config, form_data, target_sat, session_sca + self, module_sca_manifest_org, virtwho_config_ui, form_data_ui, target_sat, org_session ): """remove option 'env=' from the virt-who configuration file and without any error @@ -354,21 +289,19 @@ def test_positive_remove_env_option( 1. the option "env=" should be removed from etc/virt-who.d/virt-who.conf 2. /var/log/messages should not display warning message - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 1834897 :customerscenario: true """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_id = get_configure_id(name) @@ -376,17 +309,16 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') assert result.status == 1 @pytest.mark.tier2 - def test_positive_virtwho_roles(self, session_sca): + def test_positive_virtwho_roles(self, org_session): """Verify the default roles for virtwho configure :id: 3c2501d5-c122-49f0-baa4-4c0d678cb6fc @@ -394,8 +326,6 @@ def test_positive_virtwho_roles(self, session_sca): :expectedresults: 'Virt-who Manager', 'Virt-who Reporter', 'Virt-who Viewer' existing - :CaseLevel: Integration - :CaseImportance: Low """ roles = { @@ -414,14 +344,14 @@ def test_positive_virtwho_roles(self, session_sca): }, 'Virt-who Viewer': {'Satellite virt who configure/config': ['view_virt_who_config']}, } - with session_sca: + with org_session: for role_name, role_filters in roles.items(): - assert session_sca.role.search(role_name)[0]['Name'] == role_name - assigned_permissions = session_sca.filter.read_permissions(role_name) + assert org_session.role.search(role_name)[0]['Name'] == role_name + assigned_permissions = org_session.filter.read_permissions(role_name) assert sorted(assigned_permissions) == sorted(role_filters) @pytest.mark.tier2 - def test_positive_delete_configure(self, module_sca_manifest_org, session_sca, form_data): + def test_positive_delete_configure(self, module_sca_manifest_org, org_session, form_data_ui): """Verify when a config is deleted the associated user is deleted. :id: efc7253d-f455-4dc3-ae03-3ed5e215bd11 @@ -435,28 +365,26 @@ def test_positive_delete_configure(self, module_sca_manifest_org, session_sca, f 1. Verify the virt-who server can no longer connect to the Satellite. - :CaseLevel: Integration - :CaseImportance: Low """ name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) restart_virtwho_service() assert get_virtwho_status() == 'logerror' @pytest.mark.tier2 def test_positive_virtwho_reporter_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who reporter role can TRULY work. @@ -467,16 +395,14 @@ def test_positive_virtwho_reporter_role( to upload the report, it can be used if you configure virt-who manually and want to use user that has locked down account. - :CaseLevel: Integration - :CaseImportance: Low """ username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -486,14 +412,14 @@ def test_positive_virtwho_reporter_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Update the virt-who config file config_id = get_configure_id(config_name) config_file = get_configure_file(config_id) @@ -503,20 +429,20 @@ def test_positive_virtwho_reporter_role( restart_virtwho_service() assert get_virtwho_status() == 'logerror' # Check the permissioin of Virt-who Reporter - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) - assert session_sca.user.search(username)[0]['Username'] == username - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) + assert org_session.user.search(username)[0]['Username'] == username + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Reporter'] restart_virtwho_service() assert get_virtwho_status() == 'running' with Session(test_name, username, password) as newsession: assert not newsession.virtwho_configure.check_create_permission()['can_view'] - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_virtwho_viewer_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who viewer role can TRULY work. @@ -527,16 +453,14 @@ def test_positive_virtwho_viewer_role( including their configuration scripts, which means viewers could still deploy the virt-who instances for existing virt-who configurations. - :CaseLevel: Integration - :CaseImportance: Low """ username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -546,17 +470,17 @@ def test_positive_virtwho_viewer_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Viewer - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Viewer'] # Update the virt-who config file config_id = get_configure_id(config_name) @@ -577,12 +501,12 @@ def test_positive_virtwho_viewer_role( assert not update_permission['can_edit'] newsession.virtwho_configure.read(config_name) # Delete the created user - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_virtwho_manager_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who manager role can TRULY work. @@ -591,16 +515,15 @@ def test_positive_virtwho_manager_role( :expectedresults: Virt-who Manager Role granting all permissions to manage virt-who configurations, user needs this role to create, delete or update configurations. - :CaseLevel: Integration :CaseImportance: Low """ username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -610,28 +533,28 @@ def test_positive_virtwho_manager_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Manager - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Manager'] with Session(test_name, username, password) as newsession: # create_virt_who_config new_virt_who_name = gen_string('alpha') - form_data['name'] = new_virt_who_name - newsession.virtwho_configure.create(form_data) + form_data_ui['name'] = new_virt_who_name + newsession.virtwho_configure.create(form_data_ui) # view_virt_who_config values = newsession.virtwho_configure.read(new_virt_who_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert newsession.virtwho_configure.search(new_virt_who_name)[0]['Status'] == 'ok' # edit_virt_who_config @@ -642,12 +565,12 @@ def test_positive_virtwho_manager_role( newsession.virtwho_configure.delete(modify_name) assert not newsession.virtwho_configure.search(modify_name) # Delete the created user - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, module_sca_manifest_org, form_data, target_sat, session_sca + self, module_sca_manifest_org, form_data_ui, target_sat, org_session ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -655,8 +578,6 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :expectedresults: Config can be created and deployed without any error - :CaseLevel: Integration - :CaseImportance: High :BZ: 1870816,1959136 @@ -664,12 +585,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name - with session_sca: + form_data_ui['name'] = name + with org_session: # check the hypervisor password contains single quotes - form_data['hypervisor_content.password'] = "Tes't" - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "Tes't" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -680,12 +601,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) # check the hypervisor password contains backtick - form_data['hypervisor_content.password'] = "my`password" - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "my`password" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -696,5 +617,36 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) + + @pytest.mark.tier2 + def test_positive_hypervisor_password_option( + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui + ): + """Verify Hypervisor password. + + :id: 8362955a-4daa-4332-9559-b526d9095a61 + + :expectedresults: + hypervisor_password has been set in virt-who-config-{}.conf + hypervisor_password has been encrypted in satellite WEB UI Edit page + hypervisor_password has been encrypted in satellite WEB UI Details page + + :CaseImportance: Medium + + :BZ: 2256927 + """ + name = form_data_ui['name'] + config_id = get_configure_id(name) + config_command = get_configure_command(config_id, module_sca_manifest_org.name) + deploy_configure_by_command( + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label + ) + config_file = get_configure_file(config_id) + assert get_configure_option('encrypted_password', config_file) + res = org_session.virtwho_configure.read_edit(name) + assert 'encrypted-' in res['hypervisor_content']['password'] + org_session.virtwho_configure.edit(name, {'hypervisor_password': gen_string('alpha')}) + results = org_session.virtwho_configure.read(name) + assert 'encrypted_password=$cr_password' in results['deploy']['script'] diff --git a/tests/foreman/virtwho/ui/test_hyperv.py b/tests/foreman/virtwho/ui/test_hyperv.py index dad6a6121d0..8348540326f 100644 --- a/tests/foreman/virtwho/ui/test_hyperv.py +++ b/tests/foreman/virtwho/ui/test_hyperv.py @@ -4,124 +4,68 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.hyperv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforHyperv: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, session, form_data): + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, org_session, form_data_ui, deploy_type_ui + ): """Verify configure created and deployed with id. :id: c8913398-c5c6-4f2c-bc53-0bbfb158b762 :expectedresults: - 1. Config can be created and deployed by command + 1. Config can be created and deployed by command/script 2. No error msg in /var/log/rhsm/rhsm.log 3. Report is sent to satellite 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, virtwho_config, session, form_data - ): - """Verify configure created and deployed with script. - - :id: b0401417-3a6e-4a54-b8e8-22d290813da3 - - :expectedresults: - 1. Config can be created and deployed by script - 2. No error msg in /var/log/rhsm/rhsm.log - 3. Report is sent to satellite - 4. Virtual sku can be generated and attached - 5. Config can be deleted - - :CaseLevel: Integration - - :CaseImportance: High - """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] - vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' - vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: f2efc018-d57e-4dc5-895e-53af320237de @@ -130,20 +74,18 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_hyperv_sca.py b/tests/foreman/virtwho/ui/test_hyperv_sca.py index 140142bb419..a5b560c99a1 100644 --- a/tests/foreman/virtwho/ui/test_hyperv_sca.py +++ b/tests/foreman/virtwho/ui/test_hyperv_sca.py @@ -4,58 +4,27 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.hyperv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.hyperv.hypervisor_password, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -68,33 +37,13 @@ def test_positive_deploy_configure_by_id_script( 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -104,20 +53,18 @@ def test_positive_hypervisor_id_option( 1. hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_kubevirt.py b/tests/foreman/virtwho/ui/test_kubevirt.py index 57d7f278e03..e12c4774fc8 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt.py +++ b/tests/foreman/virtwho/ui/test_kubevirt.py @@ -4,122 +4,68 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'hypervisor_content.kubeconfig': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforKubevirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, session, form_data): + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, org_session, form_data_ui, deploy_type_ui + ): """Verify configure created and deployed with id. :id: 7b2a1b08-f33c-44f4-ad2e-317b6c44b938 :expectedresults: - 1. Config can be created and deployed by command + 1. Config can be created and deployed by command/script 2. No error msg in /var/log/rhsm/rhsm.log 3. Report is sent to satellite 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, virtwho_config, session, form_data - ): - """Verify configure created and deployed with script. - - :id: b3903ccb-04cc-4867-b7ed-d5053d2bfe03 - - :expectedresults: - 1. Config can be created and deployed by script - 2. No error msg in /var/log/rhsm/rhsm.log - 3. Report is sent to satellite - 4. Virtual sku can be generated and attached - 5. Config can be deleted - - :CaseLevel: Integration - - :CaseImportance: High - """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] - vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' - vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: 09826cc0-aa49-4355-8980-8097511eb7d7 @@ -128,20 +74,18 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_kubevirt_sca.py b/tests/foreman/virtwho/ui/test_kubevirt_sca.py index 569c5f7caca..a1554929866 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/ui/test_kubevirt_sca.py @@ -4,56 +4,27 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'hypervisor_content.kubeconfig': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -66,33 +37,13 @@ def test_positive_deploy_configure_by_id_script( 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -102,20 +53,18 @@ def test_positive_hypervisor_id_option( 1. hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_libvirt.py b/tests/foreman/virtwho/ui/test_libvirt.py index f75db53cc20..a66bf1737bb 100644 --- a/tests/foreman/virtwho/ui/test_libvirt.py +++ b/tests/foreman/virtwho/ui/test_libvirt.py @@ -4,123 +4,68 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from fauxfactory import gen_string from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.libvirt.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforLibvirt: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, session, form_data): + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, org_session, form_data_ui, deploy_type_ui + ): """Verify configure created and deployed with id. :id: ae37ea79-f99c-4511-ace9-a7de26d6db40 :expectedresults: - 1. Config can be created and deployed by command + 1. Config can be created and deployed by command/script 2. No error msg in /var/log/rhsm/rhsm.log 3. Report is sent to satellite 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, virtwho_config, session, form_data - ): - """Verify configure created and deployed with script. - - :id: 3655a501-ab05-4724-945a-7f6e6878091d - - :expectedresults: - 1. Config can be created and deployed by script - 2. No error msg in /var/log/rhsm/rhsm.log - 3. Report is sent to satellite - 4. Virtual sku can be generated and attached - 5. Config can be deleted - - :CaseLevel: Integration - - :CaseImportance: High - """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] - vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' - vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: b8b2b272-89f2-45d0-b922-6e988b20808b @@ -129,20 +74,18 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_libvirt_sca.py b/tests/foreman/virtwho/ui/test_libvirt_sca.py index 56c156a8380..ffef902441e 100644 --- a/tests/foreman/virtwho/ui/test_libvirt_sca.py +++ b/tests/foreman/virtwho/ui/test_libvirt_sca.py @@ -4,57 +4,27 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - -:Upstream: No """ import pytest -from fauxfactory import gen_string - -from robottelo.config import settings -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.libvirt.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.libvirt.hypervisor_username, - } - return form - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, +) class TestVirtwhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -67,33 +37,13 @@ def test_positive_deploy_configure_by_id_script( 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -103,20 +53,18 @@ def test_positive_hypervisor_id_option( 1. hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_nutanix.py b/tests/foreman/virtwho/ui/test_nutanix.py index fca28223f50..7ec05f191df 100644 --- a/tests/foreman/virtwho/ui/test_nutanix.py +++ b/tests/foreman/virtwho/ui/test_nutanix.py @@ -4,62 +4,35 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest from robottelo.config import settings -from robottelo.utils.virtwho import check_message_in_rhsm_log -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_hypervisor_ahv_mapping - - -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.ahv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.ahv.hypervisor_password, - 'hypervisor_content.prism_flavor': "Prism Element", - 'ahv_internal_debug': False, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + check_message_in_rhsm_log, + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, + get_hypervisor_ahv_mapping, +) class TestVirtwhoConfigforNutanix: @pytest.mark.tier2 - def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, session, form_data): + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) + def test_positive_deploy_configure_by_id_script( + self, default_org, org_session, form_data_ui, deploy_type_ui + ): """Verify configure created and deployed with id. :id: becea4d0-db4e-4a85-93d2-d40e86da0e2f @@ -71,61 +44,32 @@ def test_positive_deploy_configure_by_id(self, default_org, virtwho_config, sess 4. Virtual sku can be generated and attached 5. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, virtwho_config, session, form_data - ): - """Verify configure created and deployed with script. - - :id: 1c1b19c9-988c-4b86-a2b2-658fded10ccb - - :expectedresults: - 1. Config can be created and deployed by script - 2. No error msg in /var/log/rhsm/rhsm.log - 3. Report is sent to satellite - 4. Virtual sku can be generated and attached - 5. Config can be deleted - - :CaseLevel: Integration - - :CaseImportance: High - """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] - vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' - vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: e076a305-88f4-42fb-8ef2-cb55e38eb912 @@ -134,29 +78,27 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] - values = session.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) config_id = get_configure_id(name) config_command = values['deploy']['command'] config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, default_org, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type ): """Verify configure created and deployed with id on nutanix prism central mode @@ -170,44 +112,54 @@ def test_positive_prism_central_deploy_configure_by_id_script( 5. Virtual sku can be generated and attached 6. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') - form_data['name'] = name - form_data['hypervisor_content.prism_flavor'] = "Prism Central" - with session: - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['name'] = name + form_data_ui['hypervisor_content.prism_flavor'] = "Prism Central" + with org_session: + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) if deploy_type == "id": command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) elif deploy_type == "script": script = values['deploy']['script'] hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + script, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) # Check the option "prism_central=true" should be set in etc/virt-who.d/virt-who.conf config_id = get_configure_id(name) config_file = get_configure_file(config_id) assert get_configure_option("prism_central", config_file) == 'true' - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + assert ( + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] + == 'Unsubscribed hypervisor' + ) + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert ( + org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + ) + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + == 'Unentitled' + ) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_prism_central_prism_flavor_option( - self, default_org, virtwho_config, session, form_data + self, default_org, virtwho_config_ui, org_session, form_data_ui ): """Verify prism_flavor dropdown options. @@ -217,27 +169,27 @@ def test_positive_prism_central_prism_flavor_option( prism_flavor can be changed in virt-who-config-{}.conf if the dropdown option is selected to prism central. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] - results = session.virtwho_configure.read(name) + name = form_data_ui['name'] + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "element" config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) - session.virtwho_configure.edit(name, {'hypervisor_content.prism_flavor': "Prism Central"}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit( + name, {'hypervisor_content.prism_flavor': "Prism Central"} + ) + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "central" deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('prism_central', config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, default_org, virtwho_config, session, form_data + self, default_org, virtwho_config_ui, org_session, form_data_ui ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -251,43 +203,43 @@ def test_positive_ahv_internal_debug_option( 5. message Host UUID {system_uuid} found for VM: {guest_uuid} exist in rhsm.log 6. ahv_internal_debug bas been set to true in virt-who-config-X.conf 7. warning message does not exist in log file /var/log/rhsm/rhsm.log - :CaseLevel: Integration :CaseImportance: Medium :BZ: 2141719 :customerscenario: true """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) - values = session.virtwho_configure.read(name) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - results = session.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert str(results['overview']['ahv_internal_debug']) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message # Update ahv_internal_debug option to true - session.virtwho_configure.edit(name, {'ahv-internal-debug': True}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'ahv_internal_debug': True}) + results = org_session.virtwho_configure.read(name) command = results['deploy']['command'] assert str(results['overview']['ahv_internal_debug']) == 'True' deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_ui['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' # ahv_internal_debug bas been set to true in virt-who-config-X.conf config_file = get_configure_file(config_id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' diff --git a/tests/foreman/virtwho/ui/test_nutanix_sca.py b/tests/foreman/virtwho/ui/test_nutanix_sca.py index 7dd104a38ec..bb2483195d1 100644 --- a/tests/foreman/virtwho/ui/test_nutanix_sca.py +++ b/tests/foreman/virtwho/ui/test_nutanix_sca.py @@ -4,61 +4,31 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin -:Team: Phoenix - -:TestType: Functional +:Team: Phoenix-subscriptions -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.config import settings -from robottelo.utils.virtwho import check_message_in_rhsm_log -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import deploy_configure_by_script -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_id -from robottelo.utils.virtwho import get_configure_option -from robottelo.utils.virtwho import get_hypervisor_ahv_mapping - - -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.ahv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.ahv.hypervisor_password, - 'hypervisor_content.prism_flavor': "Prism Element", - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) +from robottelo.utils.virtwho import ( + check_message_in_rhsm_log, + deploy_configure_by_command, + deploy_configure_by_script, + get_configure_command, + get_configure_file, + get_configure_id, + get_configure_option, + get_hypervisor_ahv_mapping, +) class TestVirtwhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -70,33 +40,13 @@ def test_positive_deploy_configure_by_id_script( 3. Report is sent to satellite 4. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -106,22 +56,20 @@ def test_positive_hypervisor_id_option( 1. hypervisor_id can be changed in virt-who-config-{}.conf if the dropdown option is selected to uuid/hwuuid/hostname. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) config_id = get_configure_id(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) for value in ['uuid', 'hostname']: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( command, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -130,7 +78,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, module_sca_manifest_org, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type ): """Verify configure created and deployed with id on nutanix prism central mode @@ -144,21 +92,19 @@ def test_positive_prism_central_deploy_configure_by_id_script( 5. Virtual sku can be generated and attached 6. Config can be deleted - :CaseLevel: Integration - :CaseImportance: High """ name = gen_string('alpha') - form_data['name'] = name - form_data['hypervisor_content.prism_flavor'] = "Prism Central" - with session_sca: - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['name'] = name + form_data_ui['hypervisor_content.prism_flavor'] = "Prism Central" + with org_session: + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) if deploy_type == "id": command = values['deploy']['command'] deploy_configure_by_command( command, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -166,7 +112,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( script = values['deploy']['script'] deploy_configure_by_script( script, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -174,13 +120,13 @@ def test_positive_prism_central_deploy_configure_by_id_script( config_id = get_configure_id(name) config_file = get_configure_file(config_id) assert get_configure_option("prism_central", config_file) == 'true' - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_prism_central_prism_flavor_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify prism_flavor dropdown options. @@ -190,29 +136,27 @@ def test_positive_prism_central_prism_flavor_option( 1. prism_flavor can be changed in virt-who-config-{}.conf if the dropdown option is selected to prism central. - :CaseLevel: Integration - :CaseImportance: Medium """ - name = form_data['name'] - results = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "element" config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) - session_sca.virtwho_configure.edit( + org_session.virtwho_configure.edit( name, {'hypervisor_content.prism_flavor': "Prism Central"} ) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "central" deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('prism_central', config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -227,44 +171,43 @@ def test_positive_ahv_internal_debug_option( 6. ahv_internal_debug bas been set to true in virt-who-config-X.conf 7. warning message does not exist in log file /var/log/rhsm/rhsm.log - :CaseLevel: Integration - :CaseImportance: Medium :BZ: 2141719 :customerscenario: true """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) - values = session_sca.virtwho_configure.read(name) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert str(results['overview']['ahv_internal_debug']) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message # Update ahv_internal_debug option to true - session_sca.virtwho_configure.edit(name, {'ahv_internal_debug': True}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'ahv_internal_debug': True}) + results = org_session.virtwho_configure.read(name) command = results['deploy']['command'] assert str(results['overview']['ahv_internal_debug']) == 'True' deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_ui['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' # ahv_internal_debug bas been set to true in virt-who-config-X.conf config_file = get_configure_file(config_id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' diff --git a/tests/robottelo/conftest.py b/tests/robottelo/conftest.py index c093c969d12..dbd1fe41b50 100644 --- a/tests/robottelo/conftest.py +++ b/tests/robottelo/conftest.py @@ -1,10 +1,11 @@ +import contextlib import glob import os from pathlib import Path from tempfile import NamedTemporaryFile -import pytest from fauxfactory import gen_string +import pytest @pytest.fixture(scope='session', autouse=True) @@ -13,13 +14,13 @@ def align_to_satellite(): pass -@pytest.fixture(scope='function') +@pytest.fixture def dummy_test(request): """This should be indirectly parametrized to provide dynamic dummy_tests to exec_test""" return request.param -@pytest.fixture(scope='function') +@pytest.fixture def exec_test(request, dummy_test): """Create a temporary file with the string provided by dummy_test, and run it with pytest.main @@ -44,8 +45,6 @@ def exec_test(request, dummy_test): yield report_file for logfile in glob.glob('robottelo*.log'): os.remove(logfile) - try: - os.remove(report_file) - except OSError: + with contextlib.suppress(OSError): # the file might not exist if the test fails prematurely - pass + os.remove(report_file) diff --git a/tests/robottelo/test_api_utils.py b/tests/robottelo/test_api_utils.py deleted file mode 100644 index 3a78416366c..00000000000 --- a/tests/robottelo/test_api_utils.py +++ /dev/null @@ -1 +0,0 @@ -"""Unit tests for :mod:`robottelo.api.utils`.""" diff --git a/tests/robottelo/test_cli.py b/tests/robottelo/test_cli.py index 5c2ad05baa4..a2568f5a584 100644 --- a/tests/robottelo/test_cli.py +++ b/tests/robottelo/test_cli.py @@ -1,14 +1,16 @@ -import unittest from functools import partial +import unittest from unittest import mock import pytest from robottelo.cli.base import Base -from robottelo.cli.base import CLIBaseError -from robottelo.cli.base import CLIDataBaseError -from robottelo.cli.base import CLIError -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import ( + CLIBaseError, + CLIDataBaseError, + CLIError, + CLIReturnCodeError, +) class CLIClass(Base): @@ -144,11 +146,11 @@ def assert_response_error(self, expected_error, stderr='some error'): def test_add_operating_system(self, construct, execute): """Check command_sub edited when executing add_operating_system""" options = {'foo': 'bar'} - assert 'add-operatingsystem' != Base.command_sub + assert Base.command_sub != 'add-operatingsystem' assert execute.return_value == Base.add_operating_system(options) - assert 'add-operatingsystem' == Base.command_sub - construct.called_once_with(options) - execute.called_once_with(construct.return_value) + assert Base.command_sub == 'add-operatingsystem' + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -156,9 +158,9 @@ def test_add_create_with_empty_result(self, construct, execute): """Check command create when result is empty""" execute.return_value = [] assert execute.return_value == Base.create() - assert 'create' == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + assert Base.command_sub == 'create' + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) @mock.patch('robottelo.cli.base.Base.info') @mock.patch('robottelo.cli.base.Base.execute') @@ -167,9 +169,9 @@ def test_add_create_with_result_dct_without_id(self, construct, execute, info): """Check command create when result has dct but dct hasn't id key""" execute.return_value = [{'not_id': 'foo'}] assert execute.return_value == Base.create() - assert 'create' == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + assert Base.command_sub == 'create' + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) assert not info.called @mock.patch('robottelo.cli.base.Base.info') @@ -182,10 +184,10 @@ def test_add_create_with_result_dct_with_id_not_required_org(self, construct, ex execute.return_value = [{'id': 'foo', 'bar': 'bas'}] Base.command_requires_org = False assert execute.return_value == Base.create() - assert 'create' == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') - info.called_once_with({'id': 'foo'}) + assert Base.command_sub == 'create' + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) + info.assert_called_once_with({'id': 'foo'}) @mock.patch('robottelo.cli.base.Base.info') @mock.patch('robottelo.cli.base.Base.execute') @@ -197,10 +199,10 @@ def test_add_create_with_result_dct_with_id_required_org(self, construct, execut execute.return_value = [{'id': 'foo', 'bar': 'bas'}] Base.command_requires_org = True assert execute.return_value == Base.create({'organization-id': 'org-id'}) - assert 'create' == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') - info.called_once_with({'id': 'foo', 'organization-id': 'org-id'}) + assert Base.command_sub == 'create' + construct.assert_called_once_with({'organization-id': 'org-id'}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) + info.assert_called_once_with({'id': 'foo', 'organization-id': 'org-id'}) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -212,9 +214,9 @@ def test_add_create_with_result_dct_id_required_org_error(self, construct, execu Base.command_requires_org = True with pytest.raises(CLIError): Base.create() - assert 'create' == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, output_format='csv') + assert Base.command_sub == 'create' + construct.assert_called_once_with({}) + execute.assert_called_once_with(construct.return_value, output_format='csv', timeout=None) def assert_cmd_execution( self, construct, execute, base_method, cmd_sub, ignore_stderr=False, **base_method_kwargs @@ -222,8 +224,10 @@ def assert_cmd_execution( """Asssert Base class method successfully executed""" assert execute.return_value == base_method(**base_method_kwargs) assert cmd_sub == Base.command_sub - construct.called_once_with({}) - execute.called_once_with(construct.return_value, ignore_stderr=ignore_stderr) + construct.assert_called_once_with(base_method_kwargs.get('options')) + execute.assert_called_once_with( + construct.return_value, ignore_stderr=ignore_stderr, timeout=None + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -296,26 +300,46 @@ def test_exists_with_option_and_no_empty_return(self, lst_method): my_options = {'search': 'foo=bar'} response = Base.exists(my_options, search=['id', 1]) lst_method.assert_called_once_with(my_options) - assert 1 == response + assert response == 1 @mock.patch('robottelo.cli.base.Base.command_requires_org') - def test_info_requires_organization_id(self, _): + def test_info_requires_organization_id(self, _): # noqa: PT019 - not a fixture """Check info raises CLIError with organization-id is not present in options """ with pytest.raises(CLIError): Base.info() + def assert_alt_cmd_execution( + self, + construct, + execute, + base_method, + cmd_sub, + call_kwargs, + command_kwarg=True, + **base_method_kwargs, + ): + """Asssert Base class method successfully executed""" + assert execute.return_value == base_method(**base_method_kwargs) + assert cmd_sub == Base.command_sub + construct.assert_called_once_with(base_method_kwargs.get('options')) + if command_kwarg: + execute.assert_called_once_with(command=construct.return_value, **call_kwargs) + else: + execute.assert_called_once_with(construct.return_value, **call_kwargs) + @mock.patch('robottelo.cli.base.hammer.parse_info') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_info_without_parsing_response(self, construct, execute, parse): """Check info method execution without parsing response""" - self.assert_cmd_execution( + self.assert_alt_cmd_execution( construct, execute, Base.info, 'info', + call_kwargs={'output_format': 'json', 'return_raw_response': None}, output_format='json', options={'organization-id': 1}, ) @@ -327,27 +351,24 @@ def test_info_without_parsing_response(self, construct, execute, parse): def test_info_parsing_response(self, construct, execute, parse): """Check info method execution parsing response""" parse.return_value = execute.return_value = 'some_response' - self.assert_cmd_execution( - construct, execute, Base.info, 'info', options={'organization-id': 1} + self.assert_alt_cmd_execution( + construct, + execute, + Base.info, + 'info', + call_kwargs={'output_format': None, 'return_raw_response': None}, + options={'organization-id': 1}, ) - parse.called_once_with('some_response') - - # @mock.patch('robottelo.cli.base.Base.command_requires_org') - # def test_list_requires_organization_id(self, _): - # """Check list raises CLIError with organization-id is not present in - # options - # """ - # with pytest.raises(CLIError): - # Base.list() + parse.assert_called_once_with('some_response') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_list_with_default_per_page(self, construct, execute): """Check list method set per_page as 1000 by default""" assert execute.return_value == Base.list(options={'organization-id': 1}) - assert 'list' == Base.command_sub - construct.called_once_with({'per-page': 1000}) - execute.called_once_with(construct.return_value, output_format='csv') + assert Base.command_sub == 'list' + construct.assert_called_once_with({'organization-id': 1, 'per-page': 10000}) + execute.assert_called_once_with(construct.return_value, output_format='csv') @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') @@ -356,39 +377,80 @@ def test_list_without_per_page(self, construct, execute): list_with_per_page_false = partial( Base.list, per_page=False, options={'organization-id': 1} ) - self.assert_cmd_execution(construct, execute, list_with_per_page_false, 'list') + self.assert_alt_cmd_execution( + construct, + execute, + list_with_per_page_false, + 'list', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + options={'organization-id': 1}, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_puppet_classes(self, construct, execute): """Check puppet_classes method execution""" - self.assert_cmd_execution(construct, execute, Base.puppetclasses, 'puppet-classes') + self.assert_alt_cmd_execution( + construct, + execute, + Base.puppetclasses, + 'puppet-classes', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_remove_operating_system(self, construct, execute): """Check remove_operating_system method execution""" - self.assert_cmd_execution( - construct, execute, Base.remove_operating_system, 'remove-operatingsystem' + self.assert_alt_cmd_execution( + construct, + execute, + Base.remove_operating_system, + 'remove-operatingsystem', + call_kwargs={}, + command_kwarg=False, ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_sc_params(self, construct, execute): """Check sc_params method execution""" - self.assert_cmd_execution(construct, execute, Base.sc_params, 'sc-params') + self.assert_alt_cmd_execution( + construct, + execute, + Base.sc_params, + 'sc-params', + call_kwargs={'output_format': 'csv'}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_set_parameter(self, construct, execute): """Check set_parameter method execution""" - self.assert_cmd_execution(construct, execute, Base.set_parameter, 'set-parameter') + self.assert_alt_cmd_execution( + construct, + execute, + Base.set_parameter, + 'set-parameter', + call_kwargs={}, + command_kwarg=False, + ) @mock.patch('robottelo.cli.base.Base.execute') @mock.patch('robottelo.cli.base.Base._construct_command') def test_update(self, construct, execute): """Check update method execution""" - self.assert_cmd_execution(construct, execute, Base.update, 'update') + self.assert_alt_cmd_execution( + construct, + execute, + Base.update, + 'update', + call_kwargs={'output_format': 'csv', 'return_raw_response': None}, + command_kwarg=False, + ) class CLIErrorTests(unittest.TestCase): diff --git a/tests/robottelo/test_cli_method_calls.py b/tests/robottelo/test_cli_method_calls.py index 04d4c65e8cd..765a24d2044 100644 --- a/tests/robottelo/test_cli_method_calls.py +++ b/tests/robottelo/test_cli_method_calls.py @@ -40,8 +40,8 @@ def test_cli_org_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Org, command_sub.replace('-', '_'))(options) assert command_sub == Org.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) @pytest.mark.parametrize('command_sub', ['import-classes', 'refresh-features']) @@ -54,11 +54,11 @@ def test_cli_proxy_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Proxy, command_sub.replace('-', '_'))(options) assert command_sub == Proxy.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value) -@pytest.mark.parametrize('command_sub', ['synchronize', 'remove-content', 'upload-content']) +@pytest.mark.parametrize('command_sub', ['remove-content', 'upload-content']) def test_cli_repository_method_called(mocker, command_sub): """Check Repository methods are called and command_sub edited This is a parametrized test called by Pytest for each of Repository methods @@ -68,8 +68,8 @@ def test_cli_repository_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Repository, command_sub.replace('-', '_'))(options) assert command_sub == Repository.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value, output_format='csv', ignore_stderr=True) @pytest.mark.parametrize('command_sub', ['info', 'create']) @@ -94,5 +94,5 @@ def test_cli_subscription_method_called(mocker, command_sub): options = {'foo': 'bar'} assert execute.return_value == getattr(Subscription, command_sub.replace('-', '_'))(options) assert command_sub == Subscription.command_sub - assert construct.called_once_with(options) - assert execute.called_once_with(construct.return_value) + construct.assert_called_once_with(options) + execute.assert_called_once_with(construct.return_value, ignore_stderr=True, timeout=None) diff --git a/tests/robottelo/test_datafactory.py b/tests/robottelo/test_datafactory.py index 7d5a6c12b7e..b93d401ed89 100644 --- a/tests/robottelo/test_datafactory.py +++ b/tests/robottelo/test_datafactory.py @@ -13,7 +13,7 @@ class TestFilteredDataPoint: """Tests for :meth:`robottelo.utils.datafactory.filtered_datapoint` decorator""" - @pytest.fixture(scope="function") + @pytest.fixture def run_one_datapoint(self, request): # Modify run_one_datapoint on settings singleton based on the indirect param # default to false when not parametrized @@ -127,7 +127,7 @@ def test_return_type(self): ): assert isinstance(item, str) for item in datafactory.invalid_id_list(): - if not (isinstance(item, (str, int)) or item is None): + if not (isinstance(item, str | int) or item is None): pytest.fail('Unexpected data type') diff --git a/tests/robottelo/test_decorators.py b/tests/robottelo/test_decorators.py index ff333fbb7e1..6c5e60f568a 100644 --- a/tests/robottelo/test_decorators.py +++ b/tests/robottelo/test_decorators.py @@ -9,7 +9,7 @@ class TestCacheable: """Tests for :func:`robottelo.utils.decorators.cacheable`.""" - @pytest.fixture(scope="function") + @pytest.fixture def make_foo(self): mocked_object_cache_patcher = mock.patch.dict('robottelo.utils.decorators.OBJECT_CACHE') mocked_object_cache_patcher.start() @@ -28,12 +28,12 @@ def test_create_and_not_add_to_cache(self, make_foo): """ make_foo(cached=False) assert 'foo' not in decorators.OBJECT_CACHE - assert decorators.OBJECT_CACHE == {} + assert {} == decorators.OBJECT_CACHE def test_build_cache(self, make_foo): """Create a new object and add it to the cache.""" obj = make_foo(cached=True) - assert decorators.OBJECT_CACHE == {'foo': {'id': 42}} + assert {'foo': {'id': 42}} == decorators.OBJECT_CACHE assert id(decorators.OBJECT_CACHE['foo']) == id(obj) def test_return_from_cache(self, make_foo): diff --git a/tests/robottelo/test_dependencies.py b/tests/robottelo/test_dependencies.py index 71dbee35466..950cbeebec4 100644 --- a/tests/robottelo/test_dependencies.py +++ b/tests/robottelo/test_dependencies.py @@ -1,12 +1,11 @@ """Test important behavior in robottelo's direct dependencies""" +import contextlib def test_cryptography(): from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives import serialization - from cryptography.hazmat.primitives.asymmetric import padding - from cryptography.hazmat.primitives.asymmetric import rsa + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import padding, rsa fake_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() @@ -83,7 +82,7 @@ def test_productmd(): def test_pyotp(): import pyotp - fake_secret = 'JBSWY3DPEHPK3PXP' + fake_secret = 'JBSWY3DPEHPK3PXP' # notsecret totp = pyotp.TOTP(fake_secret) assert totp.now() @@ -111,18 +110,14 @@ def test_requests(): def test_tenacity(): - from tenacity import retry - from tenacity import stop_after_attempt - from tenacity import wait_fixed + from tenacity import retry, stop_after_attempt, wait_fixed @retry(stop=stop_after_attempt(3), wait=wait_fixed(1)) def test(): raise Exception('test') - try: + with contextlib.suppress(Exception): test() - except Exception: - pass def test_testimony(): diff --git a/tests/robottelo/test_func_locker.py b/tests/robottelo/test_func_locker.py index e59f8b913e8..d0da97431e0 100644 --- a/tests/robottelo/test_func_locker.py +++ b/tests/robottelo/test_func_locker.py @@ -1,8 +1,8 @@ import multiprocessing import os +from pathlib import Path import tempfile import time -from pathlib import Path import pytest @@ -35,9 +35,7 @@ def __init__(self): def read(self): with open(self.file_name) as cf: - content = cf.read() - - return content + return cf.read() def write(self, content): with open(self.file_name, 'wb') as cf: @@ -98,9 +96,10 @@ def simple_recursive_locking_function(): """try to trigger the same lock from the same process, an exception should be expected """ - with func_locker.locking_function(simple_locked_function): - with func_locker.locking_function(simple_locked_function): - pass + with func_locker.locking_function(simple_locked_function), func_locker.locking_function( + simple_locked_function + ): + pass return 'I should not be reached' @@ -126,9 +125,10 @@ def simple_function_to_lock(): def simple_with_locking_function(index=None): global counter_file time.sleep(0.05) - with func_locker.locking_function(simple_locked_function): - with open(_get_function_lock_path('simple_locked_function')) as rf: - content = rf.read() + with func_locker.locking_function(simple_locked_function), open( + _get_function_lock_path('simple_locked_function') + ) as rf: + content = rf.read() if index is not None: saved_counter = int(counter_file.read()) @@ -169,7 +169,7 @@ def simple_scoped_lock_function(): """This function do nothing, when called the lock function must create a lock file """ - return None + return @func_locker.lock_function @@ -182,18 +182,18 @@ def simple_scoped_locking_function(): ): pass - return None + return def simple_function_not_locked(): """This function do nothing, when called with locking, exception must be raised that this function is not locked """ - return None + return class TestFuncLocker: - @pytest.fixture(scope="function", autouse=True) + @pytest.fixture(autouse=True) def count_and_pool(self): global counter_file counter_file.write('0') @@ -234,9 +234,10 @@ def test_locker_file_location_when_in_class(self): content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.simple_function_to_lock): - with open(file_path) as rf: - content = rf.read() + with func_locker.locking_function(SimpleClass.simple_function_to_lock), open( + file_path + ) as rf: + content = rf.read() assert str(os.getpid()) == content @@ -248,9 +249,10 @@ def test_locker_file_location_when_in_class(self): content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.simple_function_to_lock_cls): - with open(file_path) as rf: - content = rf.read() + with func_locker.locking_function(SimpleClass.simple_function_to_lock_cls), open( + file_path + ) as rf: + content = rf.read() assert str(os.getpid()) == content @@ -294,9 +296,10 @@ def test_locker_file_location_when_in_class(self): else: content = '' assert str(os.getpid()) != content - with func_locker.locking_function(SimpleClass.SubClass.simple_function_to_lock_cls): - with open(file_path) as rf: - content = rf.read() + with func_locker.locking_function(SimpleClass.SubClass.simple_function_to_lock_cls), open( + file_path + ) as rf: + content = rf.read() assert str(os.getpid()) == content @@ -371,7 +374,9 @@ def test_recursive_lock_function(self, count_and_pool, recursive_function): """Ensure that recursive calls to locked function is detected using lock_function decorator""" res = count_and_pool.apply_async(recursive_function, ()) - with pytest.raises(func_locker.FunctionLockerError, match=r'.*recursion detected.*'): + with pytest.raises( # noqa: PT012 + func_locker.FunctionLockerError, match=r'.*recursion detected.*' + ): try: res.get(timeout=5) except multiprocessing.TimeoutError: @@ -405,7 +410,7 @@ def test_scoped_with_locking(self): assert os.path.exists(lock_file_path) def test_negative_with_locking_not_locked(self): - - with pytest.raises(func_locker.FunctionLockerError, match=r'.*Cannot ensure locking.*'): - with func_locker.locking_function(simple_function_not_locked): - pass + with pytest.raises( + func_locker.FunctionLockerError, match=r'.*Cannot ensure locking.*' + ), func_locker.locking_function(simple_function_not_locked): + pass diff --git a/tests/robottelo/test_func_shared.py b/tests/robottelo/test_func_shared.py index 871a4a01298..4e70550a08d 100644 --- a/tests/robottelo/test_func_shared.py +++ b/tests/robottelo/test_func_shared.py @@ -2,19 +2,22 @@ import os import time +from fauxfactory import gen_integer, gen_string import pytest -from fauxfactory import gen_integer -from fauxfactory import gen_string - -from robottelo.utils.decorators.func_shared.file_storage import get_temp_dir -from robottelo.utils.decorators.func_shared.file_storage import TEMP_FUNC_SHARED_DIR -from robottelo.utils.decorators.func_shared.file_storage import TEMP_ROOT_DIR -from robottelo.utils.decorators.func_shared.shared import _NAMESPACE_SCOPE_KEY_TYPE -from robottelo.utils.decorators.func_shared.shared import _set_configured -from robottelo.utils.decorators.func_shared.shared import enable_shared_function -from robottelo.utils.decorators.func_shared.shared import set_default_scope -from robottelo.utils.decorators.func_shared.shared import shared -from robottelo.utils.decorators.func_shared.shared import SharedFunctionException + +from robottelo.utils.decorators.func_shared.file_storage import ( + TEMP_FUNC_SHARED_DIR, + TEMP_ROOT_DIR, + get_temp_dir, +) +from robottelo.utils.decorators.func_shared.shared import ( + _NAMESPACE_SCOPE_KEY_TYPE, + SharedFunctionException, + _set_configured, + enable_shared_function, + set_default_scope, + shared, +) DEFAULT_POOL_SIZE = 8 SIMPLE_TIMEOUT_VALUE = 3 @@ -160,13 +163,13 @@ def scope(self): # generate a new namespace scope = gen_string('alpha', 10) set_default_scope(scope) - yield scope + return scope - @pytest.fixture(scope='function', autouse=True) + @pytest.fixture(autouse=True) def enable(self): enable_shared_function(True) - @pytest.fixture(scope='function') + @pytest.fixture def pool(self): pool = multiprocessing.Pool(DEFAULT_POOL_SIZE) yield pool @@ -535,7 +538,7 @@ def test_function_kw_scope(self): """ prefixes = [f'pre_{i}' for i in range(10)] suffixes = [f'suf_{i}' for i in range(10)] - for prefix, suffix in zip(prefixes, suffixes): + for prefix, suffix in zip(prefixes, suffixes, strict=True): counter_value = gen_integer(min_value=2, max_value=10000) inc_string = basic_shared_counter_string( prefix=prefix, suffix=suffix, counter=counter_value diff --git a/tests/robottelo/test_helpers.py b/tests/robottelo/test_helpers.py index 8fa11e58836..0b39bb54b9e 100644 --- a/tests/robottelo/test_helpers.py +++ b/tests/robottelo/test_helpers.py @@ -1,8 +1,7 @@ """Tests for module ``robottelo.helpers``.""" import pytest -from robottelo.utils import slugify_component -from robottelo.utils import validate_ssh_pub_key +from robottelo.utils import slugify_component, validate_ssh_pub_key class FakeSSHResult: diff --git a/tests/robottelo/test_issue_handlers.py b/tests/robottelo/test_issue_handlers.py index bcdf0e58fba..4da0b6df011 100644 --- a/tests/robottelo/test_issue_handlers.py +++ b/tests/robottelo/test_issue_handlers.py @@ -1,18 +1,14 @@ +from collections import defaultdict import os import subprocess import sys -from collections import defaultdict -import pytest from packaging.version import Version +import pytest from pytest_plugins.issue_handlers import DEFAULT_BZ_CACHE_FILE -from robottelo.constants import CLOSED_STATUSES -from robottelo.constants import OPEN_STATUSES -from robottelo.constants import WONTFIX_RESOLUTIONS -from robottelo.utils.issue_handlers import add_workaround -from robottelo.utils.issue_handlers import is_open -from robottelo.utils.issue_handlers import should_deselect +from robottelo.constants import CLOSED_STATUSES, OPEN_STATUSES, WONTFIX_RESOLUTIONS +from robottelo.utils.issue_handlers import add_workaround, is_open, should_deselect class TestBugzillaIssueHandler: @@ -346,8 +342,8 @@ def test_bz_should_not_deselect(self): @pytest.mark.parametrize('issue', ["BZ123456", "XX:123456", "KK:89456", "123456", 999999]) def test_invalid_handler(self, issue): """Assert is_open w/ invalid handlers raise AttributeError""" + issue_deselect = should_deselect(issue) with pytest.raises(AttributeError): - issue_deselect = should_deselect(issue) is_open(issue) assert issue_deselect is None diff --git a/tests/robottelo/test_report.py b/tests/robottelo/test_report.py index e5f2090b21a..acdc41c6ad4 100644 --- a/tests/robottelo/test_report.py +++ b/tests/robottelo/test_report.py @@ -44,11 +44,13 @@ def test_junit_timestamps(exec_test, property_level): prop = [prop] try: assert 'start_time' in [p['@name'] for p in prop] - except KeyError as e: - raise AssertionError(f'Missing property node: "start_time": {e}') + except KeyError as err: + raise AssertionError(f'Missing property node: "start_time": {err}') from err try: for p in prop: if p['@name'] == 'start_time': datetime.datetime.strptime(p['@value'], XUNIT_TIME_FORMAT) - except ValueError as e: - raise AssertionError(f'Unable to parse datetime for "start_time" property node: {e}') + except ValueError as err: + raise AssertionError( + f'Unable to parse datetime for "start_time" property node: {err}' + ) from err diff --git a/tests/robottelo/test_shared_resource.py b/tests/robottelo/test_shared_resource.py new file mode 100644 index 00000000000..ff2146cd80a --- /dev/null +++ b/tests/robottelo/test_shared_resource.py @@ -0,0 +1,61 @@ +import multiprocessing +from pathlib import Path +import random +from threading import Thread +import time + +from robottelo.utils.shared_resource import SharedResource + + +def upgrade_action(*args, **kwargs): + print(f"Upgrading satellite with {args=} and {kwargs=}") + time.sleep(1) + print("Satellite upgraded!") + + +def run_resource(resource_name): + time.sleep(random.random() * 5) # simulate random pre-setup + with SharedResource(resource_name, upgrade_action) as resource: + assert Path(f"/tmp/{resource_name}.shared").exists() + time.sleep(5) # simulate setup actions + resource.ready() + time.sleep(1) # simulate cleanup actions + + +def test_shared_resource(): + """Test the SharedResource class.""" + with SharedResource("test_resource", upgrade_action, 1, 2, 3, foo="bar") as resource: + assert Path("/tmp/test_resource.shared").exists() + assert resource.is_main + assert not resource.is_recovering + assert resource.action == upgrade_action + assert resource.action_args == (1, 2, 3) + assert resource.action_kwargs == {"foo": "bar"} + assert not resource.action_is_recoverable + + resource.ready() + assert resource._check_all_status("ready") + + assert not Path("/tmp/test_resource.shared").exists() + + +def test_shared_resource_multiprocessing(): + """Test the SharedResource class with multiprocessing.""" + with multiprocessing.Pool(2) as pool: + pool.map(run_resource, ["test_resource_mp", "test_resource_mp"]) + + assert not Path("/tmp/test_resource_mp.shared").exists() + + +def test_shared_resource_multithreading(): + """Test the SharedResource class with multithreading.""" + t1 = Thread(target=run_resource, args=("test_resource_th",)) + t2 = Thread(target=run_resource, args=("test_resource_th",)) + + t1.start() + t2.start() + + t1.join() + t2.join() + + assert not Path("/tmp/test_resource_th.shared").exists() diff --git a/tests/upgrades/conftest.py b/tests/upgrades/conftest.py index 16d021445ed..1853efaf073 100644 --- a/tests/upgrades/conftest.py +++ b/tests/upgrades/conftest.py @@ -86,8 +86,8 @@ def test_capsule_post_upgrade_skipped(pre_upgrade_data): import json import os -import pytest from box import Box +import pytest from robottelo.logging import logger from robottelo.utils.decorators.func_locker import lock_function @@ -154,8 +154,7 @@ def get_entity_data(scenario_name): """ with open('scenario_entities') as pref: entity_data = json.load(pref) - entity_data = entity_data.get(scenario_name) - return entity_data + return entity_data.get(scenario_name) def get_all_entity_data(): @@ -171,8 +170,7 @@ def get_all_entity_data(): with scenario_name as keys and corresponding attribute data as values. """ with open('scenario_entities') as pref: - entity_data = json.load(pref) - return entity_data + return json.load(pref) def _read_test_data(test_node_id): @@ -230,7 +228,7 @@ def test_something_post_upgrade(pre_upgrade_data): dependant_on_functions = [] for marker in request.node.iter_markers(POST_UPGRADE_MARK): depend_on = marker.kwargs.get('depend_on') - if isinstance(depend_on, (list, tuple)): + if isinstance(depend_on, list | tuple): dependant_on_functions.extend(depend_on) elif depend_on is not None: dependant_on_functions.append(depend_on) @@ -304,17 +302,18 @@ def __initiate(config): global POST_UPGRADE global PRE_UPGRADE_TESTS_FILE_PATH PRE_UPGRADE_TESTS_FILE_PATH = getattr(config.option, PRE_UPGRADE_TESTS_FILE_OPTION) - if not [ - upgrade_mark - for upgrade_mark in (PRE_UPGRADE_MARK, POST_UPGRADE_MARK) - if upgrade_mark in config.option.markexpr - ]: - # Raise only if the `tests/upgrades` directory is selected - if 'upgrades' in config.args[0]: - pytest.fail( - f'For upgrade scenarios either {PRE_UPGRADE_MARK} or {POST_UPGRADE_MARK} mark ' - 'must be provided' - ) + if ( + not [ + upgrade_mark + for upgrade_mark in (PRE_UPGRADE_MARK, POST_UPGRADE_MARK) + if upgrade_mark in config.option.markexpr + ] + and 'upgrades' in config.args[0] + ): # Raise only if the `tests/upgrades` directory is selected + pytest.fail( + f'For upgrade scenarios either {PRE_UPGRADE_MARK} or {POST_UPGRADE_MARK} mark ' + 'must be provided' + ) if PRE_UPGRADE_MARK in config.option.markexpr: pre_upgrade_failed_tests = [] PRE_UPGRADE = True @@ -373,7 +372,7 @@ def pytest_collection_modifyitems(items, config): dependant_on_functions = [] for marker in item.iter_markers(POST_UPGRADE_MARK): depend_on = marker.kwargs.get('depend_on') - if isinstance(depend_on, (list, tuple)): + if isinstance(depend_on, list | tuple): dependant_on_functions.extend(depend_on) elif depend_on is not None: dependant_on_functions.append(depend_on) diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 982e9e945a7..11d37ccdab0 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ActivationKeys :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest from requests.exceptions import HTTPError @@ -26,7 +21,7 @@ class TestActivationKey: operated/modified. """ - @pytest.fixture(scope='function') + @pytest.fixture def activation_key_setup(self, request, target_sat): """ The purpose of this fixture is to setup the activation key based on the provided @@ -44,8 +39,7 @@ def activation_key_setup(self, request, target_sat): ak = target_sat.api.ActivationKey( content_view=cv, organization=org, name=f"{request.param}_ak" ).create() - ak_details = {'org': org, "cv": cv, 'ak': ak, 'custom_repo': custom_repo} - yield ak_details + return {'org': org, "cv": cv, 'ak': ak, 'custom_repo': custom_repo} @pytest.mark.pre_upgrade @pytest.mark.parametrize( @@ -60,14 +54,14 @@ def test_pre_create_activation_key(self, activation_key_setup, target_sat): :steps: 1. Create the activation key. 2. Add subscription in the activation key. - 3: Check the subscription id of the activation key and compare it with custom_repos - product id. + 3. Check the subscription id of the activation key and compare it with custom_repos + product id. 4. Update the host collection in the activation key. :parametrized: yes :expectedresults: Activation key should be created successfully and it's subscription id - should be same with custom repos product id. + should be same with custom repos product id. """ ak = activation_key_setup['ak'] org_subscriptions = target_sat.api.Subscription( @@ -95,7 +89,7 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): 3. Delete activation key. :expectedresults: Activation key's entities should be same after upgrade and activation - key update and delete should work. + key update and delete should work. """ pre_test_name = dependent_scenario_name org = target_sat.api.Organization().search(query={'search': f'name={pre_test_name}_org'}) diff --git a/tests/upgrades/test_bookmarks.py b/tests/upgrades/test_bookmarks.py index dde6b112a3b..ff53bcdb30b 100644 --- a/tests/upgrades/test_bookmarks.py +++ b/tests/upgrades/test_bookmarks.py @@ -4,21 +4,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Search :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from robottelo.constants import BOOKMARK_ENTITIES +from robottelo.constants import BOOKMARK_ENTITIES_SELECTION class TestPublicDisableBookmark: @@ -34,7 +29,7 @@ def test_pre_create_public_disable_bookmark(self, request, target_sat): :id: preupgrade-13904b14-6340-4b85-a56f-98080cf50a92 - :Steps: + :steps: 1. Create public disabled bookmarks before the upgrade for all system entities using available bookmark data. @@ -50,7 +45,7 @@ def test_pre_create_public_disable_bookmark(self, request, target_sat): :CaseImportance: Critical """ - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + request.node.name bm = target_sat.api.Bookmark( controller=entity['controller'], @@ -71,18 +66,18 @@ def test_post_create_public_disable_bookmark(self, dependent_scenario_name, targ :id: postupgrade-13904b14-6340-4b85-a56f-98080cf50a92 - :Steps: + :steps: 1. Check the bookmark status after post-upgrade. 2. Remove the bookmark. :expectedresults: Public disabled bookmarks details for all the system entities - should be unchanged after upgrade. + should be unchanged after upgrade. :CaseImportance: Critical """ pre_test_name = dependent_scenario_name - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + pre_test_name bm = target_sat.api.Bookmark().search(query={'search': f'name="{book_mark_name}"'})[0] assert bm.controller == entity['controller'] @@ -105,8 +100,7 @@ def test_pre_create_public_enable_bookmark(self, request, target_sat): :id: preupgrade-93c419db-66b4-4c9a-a82a-a6a68703881f - :Steps: - + :steps: 1. Create public enable bookmarks before the upgrade for all system entities using available bookmark data. 2. Check the bookmark attribute(controller, name, query public) status @@ -121,7 +115,7 @@ def test_pre_create_public_enable_bookmark(self, request, target_sat): :customerscenario: true """ - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + request.node.name bm = target_sat.api.Bookmark( controller=entity['controller'], @@ -141,18 +135,17 @@ def test_post_create_public_enable_bookmark(self, dependent_scenario_name, targe :id: postupgrade-93c419db-66b4-4c9a-a82a-a6a68703881f - :Steps: - + :steps: 1. Check the bookmark status after post-upgrade. 2. Remove the bookmark. :expectedresults: Public disabled bookmarks details for all the system entities - should be unchanged after upgrade. + should be unchanged after upgrade. :CaseImportance: Critical """ pre_test_name = dependent_scenario_name - for entity in BOOKMARK_ENTITIES: + for entity in BOOKMARK_ENTITIES_SELECTION: book_mark_name = entity["name"] + pre_test_name bm = target_sat.api.Bookmark().search(query={'search': f'name="{book_mark_name}"'})[0] assert bm.controller == entity['controller'] diff --git a/tests/upgrades/test_capsule.py b/tests/upgrades/test_capsule.py index fb6fc994ca4..c7cf63e9977 100644 --- a/tests/upgrades/test_capsule.py +++ b/tests/upgrades/test_capsule.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance +:CaseComponent: ForemanProxy -:CaseComponent: Capsule - -:Team: Endeavour - -:TestType: Functional +:Team: Platform :CaseImportance: High -:Upstream: No """ import os @@ -59,8 +54,7 @@ def test_pre_user_scenario_capsule_sync(self, target_sat, default_org, save_test :expectedresults: 1. The repo/rpm should be synced to satellite 2. Activation key's environment id should be available in the content views environment - id's list - + id's list """ ak_name = ( settings.upgrade.capsule_ak[settings.upgrade.os] @@ -112,8 +106,7 @@ def test_post_user_scenario_capsule_sync( ak_env = ak.environment.read() org = ak.organization.read() content_view = target_sat.api.ContentView(id=pre_upgrade_data.get('content_view_id')).read() - pre_configured_capsule.nailgun_capsule.content_sync(timeout=3600) - pre_configured_capsule.wait_for_sync(timeout=9000) + pre_configured_capsule.nailgun_capsule.content_sync(timeout=7200) sat_repo_url = target_sat.get_published_repo_url( org=org.label, @@ -131,11 +124,16 @@ def test_post_user_scenario_capsule_sync( ) sat_files_urls = get_repo_files_urls_by_url(sat_repo_url) cap_files_urls = get_repo_files_urls_by_url(cap_repo_url) - assert len(sat_files_urls) == len(cap_files_urls) == constants.FAKE_1_YUM_REPOS_COUNT + assert ( + len(sat_files_urls) == constants.FAKE_1_YUM_REPOS_COUNT + ), 'upstream and satellite repo rpm counts are differrent' + assert len(sat_files_urls) == len( + cap_files_urls + ), 'satellite and capsule repo rpm counts are differrent' sat_files = {os.path.basename(f) for f in sat_files_urls} cap_files = {os.path.basename(f) for f in cap_files_urls} - assert sat_files == cap_files + assert sat_files == cap_files, 'satellite and capsule rpm basenames are differrent' for pkg in constants.FAKE_1_YUM_REPO_RPMS: assert pkg in sat_files, f'{pkg=} is not in the {repo=} on satellite' @@ -143,7 +141,7 @@ def test_post_user_scenario_capsule_sync( sat_files_md5 = [target_sat.md5_by_url(url) for url in sat_files_urls] cap_files_md5 = [target_sat.md5_by_url(url) for url in cap_files_urls] - assert sat_files_md5 == cap_files_md5 + assert sat_files_md5 == cap_files_md5, 'satellite and capsule rpm md5sums are differrent' class TestCapsuleSyncNewRepo: @@ -188,8 +186,7 @@ def test_post_user_scenario_capsule_sync_yum_repo( content_view.read().version[0].promote(data={'environment_ids': ak_env.id}) content_view_env = [env.id for env in content_view.read().environment] assert ak_env.id in content_view_env - pre_configured_capsule.nailgun_capsule.content_sync() - pre_configured_capsule.wait_for_sync(timeout=9000) + pre_configured_capsule.nailgun_capsule.content_sync(timeout=7200) sat_repo_url = target_sat.get_published_repo_url( org=default_org.label, @@ -208,11 +205,16 @@ def test_post_user_scenario_capsule_sync_yum_repo( sat_files_urls = get_repo_files_urls_by_url(sat_repo_url) cap_files_urls = get_repo_files_urls_by_url(cap_repo_url) - assert len(sat_files_urls) == len(cap_files_urls) == constants.FAKE_1_YUM_REPOS_COUNT + assert ( + len(sat_files_urls) == constants.FAKE_1_YUM_REPOS_COUNT + ), 'upstream and satellite repo rpm counts are differrent' + assert len(sat_files_urls) == len( + cap_files_urls + ), 'satellite and capsule repo rpm counts are differrent' sat_files = {os.path.basename(f) for f in sat_files_urls} cap_files = {os.path.basename(f) for f in cap_files_urls} - assert sat_files == cap_files + assert sat_files == cap_files, 'satellite and capsule rpm basenames are differrent' for pkg in constants.FAKE_1_YUM_REPO_RPMS: assert pkg in sat_files, f'{pkg=} is not in the {repo=} on satellite' @@ -220,4 +222,4 @@ def test_post_user_scenario_capsule_sync_yum_repo( sat_files_md5 = [target_sat.md5_by_url(url) for url in sat_files_urls] cap_files_md5 = [target_sat.md5_by_url(url) for url in cap_files_urls] - assert sat_files_md5 == cap_files_md5 + assert sat_files_md5 == cap_files_md5, 'satellite and capsule rpm md5sums are differrent' diff --git a/tests/upgrades/test_classparameter.py b/tests/upgrades/test_classparameter.py index c22d39c48b1..b7f8d1c6ed1 100644 --- a/tests/upgrades/test_classparameter.py +++ b/tests/upgrades/test_classparameter.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Parameters +:CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import json @@ -51,7 +46,7 @@ class TestScenarioPositivePuppetParameterAndDatatypeIntact: """ @pytest.fixture(scope="class") - def _setup_scenario(self, class_target_sat): + def setup_scenario(self, class_target_sat): """Import some parametrized puppet classes. This is required to make sure that we have smart class variable available. Read all available smart class parameters for imported puppet class to @@ -87,7 +82,7 @@ def _validate_value(self, data, sc_param): :param sc_param: The Actual Value of parameter """ if data['sc_type'] == 'boolean': - assert sc_param.default_value == (True if data['value'] == '1' else False) + assert sc_param.default_value == (data['value'] == '1') elif data['sc_type'] == 'array': string_list = [str(element) for element in sc_param.default_value] assert str(string_list) == data['value'] @@ -100,7 +95,7 @@ def _validate_value(self, data, sc_param): @pytest.mark.pre_upgrade @pytest.mark.parametrize('count', list(range(1, 10))) def test_pre_puppet_class_parameter_data_and_type( - self, class_target_sat, count, _setup_scenario, save_test_data + self, class_target_sat, count, setup_scenario, save_test_data ): """Puppet Class parameters with different data type are created @@ -116,7 +111,7 @@ def test_pre_puppet_class_parameter_data_and_type( :expectedresults: The parameters are updated with different data types """ - save_test_data(_setup_scenario) + save_test_data(setup_scenario) data = _valid_sc_parameters_data()[count - 1] sc_param = class_target_sat.api.SmartClassParameters().search( query={'search': f'parameter="api_classparameters_scp_00{count}"'} @@ -130,9 +125,10 @@ def test_pre_puppet_class_parameter_data_and_type( self._validate_value(data, sc_param) @pytest.mark.post_upgrade(depend_on=test_pre_puppet_class_parameter_data_and_type) + @pytest.mark.usefixtures('_clean_scenario') @pytest.mark.parametrize('count', list(range(1, 10))) def test_post_puppet_class_parameter_data_and_type( - self, count, _clean_scenario, class_pre_upgrade_data, class_target_sat + self, count, class_pre_upgrade_data, class_target_sat ): """Puppet Class Parameters value and type is intact post upgrade diff --git a/tests/upgrades/test_client.py b/tests/upgrades/test_client.py index 95c93aebcae..8ca57aa4586 100644 --- a/tests/upgrades/test_client.py +++ b/tests/upgrades/test_client.py @@ -7,23 +7,16 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts-Content :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from broker import Broker -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE_NAME +from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME from robottelo.hosts import ContentHost @@ -93,17 +86,12 @@ def test_post_scenario_pre_client_package_installation( :expectedresults: The package is installed on client """ - client_name = pre_upgrade_data.get('rhel_client') - client_id = ( - module_target_sat.api.Host().search(query={'search': f'name={client_name}'})[0].id - ) + client_hostname = pre_upgrade_data.get('rhel_client') + rhel_client = ContentHost.get_host_by_hostname(client_hostname) module_target_sat.cli.Host.package_install( - {'host-id': client_id, 'packages': FAKE_0_CUSTOM_PACKAGE_NAME} + {'host-id': rhel_client.nailgun_host.id, 'packages': FAKE_0_CUSTOM_PACKAGE_NAME} ) - # Verifies that package is really installed - rhel_client = Broker(host_class=ContentHost).from_inventory( - filter=f'@inv.hostname == "{client_name}"' - )[0] + # Verify that package is really installed result = rhel_client.execute(f"rpm -q {FAKE_0_CUSTOM_PACKAGE_NAME}") assert FAKE_0_CUSTOM_PACKAGE_NAME in result.stdout diff --git a/tests/upgrades/test_contentview.py b/tests/upgrades/test_contentview.py index 68e1b5918b0..2b581297b1e 100644 --- a/tests/upgrades/test_contentview.py +++ b/tests/upgrades/test_contentview.py @@ -4,24 +4,18 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ContentViews :Team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_alpha +import pytest from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants import RPM_TO_UPLOAD +from robottelo.constants import RPM_TO_UPLOAD, DataFile class TestContentView: @@ -93,6 +87,7 @@ def test_cv_postupgrade_scenario(self, request, target_sat, pre_upgrade_data): cv = target_sat.api.ContentView(organization=org.id).search( query={'search': f'name="{cv_name}"'} )[0] + request.addfinalizer(cv.delete) yum_repo = target_sat.api.Repository(organization=org.id).search( query={'search': f'name="{pre_test_name}_yum_repo"'} )[0] @@ -101,7 +96,6 @@ def test_cv_postupgrade_scenario(self, request, target_sat, pre_upgrade_data): query={'search': f'name="{pre_test_name}_file_repo"'} )[0] request.addfinalizer(file_repo.delete) - request.addfinalizer(cv.delete) cv.repository = [] cv.update(['repository']) assert len(cv.read_json()['repositories']) == 0 diff --git a/tests/upgrades/test_discovery.py b/tests/upgrades/test_discovery.py new file mode 100644 index 00000000000..c85e02e01f4 --- /dev/null +++ b/tests/upgrades/test_discovery.py @@ -0,0 +1,63 @@ +"""Test Discovery Plugin related Upgrade Scenario's + +:Requirement: UpgradedSatellite + +:CaseAutomation: Automated + +:CaseComponent: DiscoveryImage + +:Team: Rocket + +:CaseImportance: High + +""" +import re + +from packaging.version import Version +import pytest + + +class TestDiscoveryImage: + """Pre-upgrade and post-upgrade scenarios to test Foreman Discovery Image version. + + Test Steps: + 1. Before Satellite upgrade, Check the FDI version on the Satellite + 2. Upgrade satellite. + 3. Check the FDI version on the Satellite after upgrade to make sure its same or greater. + """ + + @pytest.mark.pre_upgrade + def test_pre_upgrade_fdi_version(self, target_sat, save_test_data, request): + """Test FDI version before upgrade. + + :id: preupgrade-8c94841c-6791-4af0-aa9c-e54c8d8b9a92 + + :steps: + 1. Check installed version of FDI + + :expectedresults: Version should be saved and checked post-upgrade + """ + target_sat.register_to_cdn() + target_sat.execute('foreman-maintain packages install -y foreman-discovery-image') + fdi_package = target_sat.execute('rpm -qa *foreman-discovery-image*').stdout + # Note: The regular exp takes care of format digit.digit.digit or digit.digit.digit-digit in the output + pre_upgrade_version = Version(re.search(r'\d+\.\d+\.\d+(-\d+)?', fdi_package).group()) + save_test_data({'pre_upgrade_version': pre_upgrade_version}) + + @pytest.mark.post_upgrade(depend_on=test_pre_upgrade_fdi_version) + def test_post_upgrade_fdi_version(self, target_sat, pre_upgrade_data): + """Test FDI version post upgrade. + + :id: postugrade-38bdecaa-2b50-434b-90b1-4aa2b600d04e + + :steps: + 1. Check installed version of FDI + + :expectedresults: Version should be greater than or equal to pre_upgrade version + """ + pre_upgrade_version = pre_upgrade_data.get('pre_upgrade_version') + fdi_package = target_sat.execute('rpm -qa *foreman-discovery-image*').stdout + # Note: The regular exp takes care of format digit.digit.digit or digit.digit.digit-digit in the output + post_upgrade_version = Version(re.search(r'\d+\.\d+\.\d+(-\d+)?', fdi_package).group()) + assert post_upgrade_version >= pre_upgrade_version + target_sat.unregister() diff --git a/tests/upgrades/test_errata.py b/tests/upgrades/test_errata.py index c8d8d229b6b..ed2407d67c6 100644 --- a/tests/upgrades/test_errata.py +++ b/tests/upgrades/test_errata.py @@ -4,20 +4,14 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: ErrataManagement :Team: Phoenix-content -:TestType: Functional - :CaseImportance: Critical -:Upstream: No """ import pytest -from broker import Broker from wait_for import wait_for from robottelo import constants @@ -116,17 +110,15 @@ def test_pre_scenario_generate_errata_for_client( :id: preupgrade-88fd28e6-b4df-46c0-91d6-784859fd1c21 :steps: - 1. Create Product and Custom Yum Repo 2. Create custom tools, rhel repos and sync them 3. Create content view and publish it 4. Create activation key and add subscription 5. Register RHEL host to Satellite - 7. Generate Errata by installing outdated/older packages - 8. Check that errata applicability generated expected errata list for the given client. + 6. Generate Errata by installing outdated/older packages + 7. Check that errata applicability generated expected errata list for the given client. :expectedresults: - 1. The content host is created 2. errata count, erratum list will be generated to Satellite content host 3. All the expected errata are ready-to-be-applied on the client @@ -204,10 +196,10 @@ def test_post_scenario_errata_count_installation(self, target_sat, pre_upgrade_d 1. Recover pre_upgrade data for post_upgrade verification 2. Verify errata count has not changed on Satellite - 4. Verify the errata_ids - 5. Verify installation of errata is successfull - 6. Verify that the errata application updated packages on client - 7. Verify that all expected erratas were installed on client. + 3. Verify the errata_ids + 4. Verify installation of errata is successfull + 5. Verify that the errata application updated packages on client + 6. Verify that all expected erratas were installed on client. :expectedresults: 1. errata count and erratum list should same after Satellite upgrade @@ -221,9 +213,7 @@ def test_post_scenario_errata_count_installation(self, target_sat, pre_upgrade_d custom_repo_id = pre_upgrade_data.get('custom_repo_id') activation_key = pre_upgrade_data.get('activation_key') organization_id = pre_upgrade_data.get('organization_id') - rhel_client = Broker(host_class=ContentHost).from_inventory( - filter=f'@inv.hostname == "{client_hostname}"' - )[0] + rhel_client = ContentHost.get_host_by_hostname(client_hostname) custom_yum_repo = target_sat.api.Repository(id=custom_repo_id).read() host = target_sat.api.Host().search(query={'search': f'activation_key={activation_key}'})[0] assert host.id == rhel_client.nailgun_host.id, 'Host not found in Satellite' diff --git a/tests/upgrades/test_host.py b/tests/upgrades/test_host.py index a3a50fa334f..b60585d63bd 100644 --- a/tests/upgrades/test_host.py +++ b/tests/upgrades/test_host.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Hosts :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest class TestScenarioPositiveGCEHostComputeResource: @@ -77,7 +72,7 @@ def class_host( Later in tests this host will be used to perform assertions """ - host = sat_gce.api.Host( + return sat_gce.api.Host( architecture=sat_gce_default_architecture, compute_attributes=self.compute_attrs, domain=sat_gce_domain, @@ -90,7 +85,6 @@ def class_host( image=module_gce_finishimg, root_pass=gen_string('alphanumeric'), ).create() - yield host def google_host(self, googleclient): """Returns the Google Client Host object to perform the assertions""" @@ -114,9 +108,7 @@ def test_pre_create_gce_cr_and_host( :id: 889975f2-56ca-4584-95a7-21c513969630 - :CaseLevel: Component - - ::CaseImportance: Critical + :CaseImportance: Critical :steps: 1. Create a GCE Compute Resource @@ -169,6 +161,7 @@ def test_post_create_gce_cr_and_host( pre_upgrade_host = sat_gce.api.Host().search( query={'search': f'name={pre_upgrade_data.provision_host_name}'} )[0] + request.addfinalizer(pre_upgrade_host.delete) org = sat_gce.api.Organization(id=pre_upgrade_host.organization.id).read() loc = sat_gce.api.Location(id=pre_upgrade_host.location.id).read() domain = sat_gce.api.Domain(id=pre_upgrade_host.domain.id).read() @@ -193,7 +186,6 @@ def test_post_create_gce_cr_and_host( image=image, root_pass=gen_string('alphanumeric'), ).create() - request.addfinalizer(pre_upgrade_host.delete) request.addfinalizer(host.delete) assert host.name == f"{self.hostname.lower()}.{domain.name}" assert host.build_status_label == 'Installed' diff --git a/tests/upgrades/test_hostcontent.py b/tests/upgrades/test_hostcontent.py index 54b102679b9..0a6a60e2a9e 100644 --- a/tests/upgrades/test_hostcontent.py +++ b/tests/upgrades/test_hostcontent.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Host-Content +:CaseComponent: Hosts-Content :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/upgrades/test_hostgroup.py b/tests/upgrades/test_hostgroup.py index dc6c4d8d91b..a6d67a106db 100644 --- a/tests/upgrades/test_hostgroup.py +++ b/tests/upgrades/test_hostgroup.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: HostGroup :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest class TestHostgroup: diff --git a/tests/upgrades/test_performance_tuning.py b/tests/upgrades/test_performance_tuning.py index ba5f79d1b8b..33237e8f561 100644 --- a/tests/upgrades/test_performance_tuning.py +++ b/tests/upgrades/test_performance_tuning.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: Installer +:CaseComponent: Installation :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import filecmp @@ -31,7 +26,7 @@ class TestScenarioPerformanceTuning: Test Steps:: 1. Before satellite upgrade. - - Apply the medium tune size using satellite-installer. + - Apply non-default tuning size using satellite-installer. - Check the tuning status and their set tuning parameters after applying the new size. 2. Upgrade the satellite. 3. Verify the following points. @@ -49,17 +44,17 @@ class TestScenarioPerformanceTuning: @pytest.mark.pre_upgrade def test_pre_performance_tuning_apply(self, target_sat): - """In preupgrade scenario we apply the medium tuning size. + """In preupgrade scenario we apply non-default tuning size. :id: preupgrade-83404326-20b7-11ea-a370-48f17f1fc2e1 :steps: 1. collect the custom_hira.yaml file before upgrade. - 2. Update the tuning size to medium. + 2. Update the tuning size to non-default. 3. Check the updated tuning size. 4. If something gets wrong with updated tune size then restore the default tune size. - :expectedresults: Medium tuning parameter should be applied. + :expectedresults: Non-default tuning parameter should be applied. """ try: @@ -67,12 +62,12 @@ def test_pre_performance_tuning_apply(self, target_sat): local_path="custom-hiera-before-upgrade.yaml", remote_path="/etc/foreman-installer/custom-hiera.yaml", ) - installer_obj = InstallerCommand(tuning='medium') + installer_obj = InstallerCommand(tuning='development') command_output = target_sat.execute(installer_obj.get_command(), timeout='30m') assert 'Success!' in command_output.stdout installer_obj = InstallerCommand(help='tuning') command_output = target_sat.execute(installer_obj.get_command()) - assert 'default: "medium"' in command_output.stdout + assert 'default: "development"' in command_output.stdout except Exception as exp: logger.critical(exp) @@ -92,17 +87,17 @@ def test_post_performance_tuning_apply(self, target_sat): :steps: 1. Check the tuning size. 2. Compare the custom-hiera.yaml file. - 3. Change the tuning size from medium to default. + 3. Change the tuning size from non-default to default. :expectedresults: - 1. medium tune parameter should be unchanged after upgrade. + 1. non-default tuning parameter should be set after upgrade. 2. custom-hiera.yaml file should be unchanged after upgrade. 3. tuning parameter update should work after upgrade. """ installer_obj = InstallerCommand(help='tuning') command_output = target_sat.execute(installer_obj.get_command(), timeout='30m') - assert 'default: "medium"' in command_output.stdout + assert 'default: "development"' in command_output.stdout target_sat.get( local_path="custom-hiera-after-upgrade.yaml", remote_path="/etc/foreman-installer/custom-hiera.yaml", diff --git a/tests/upgrades/test_provisioningtemplate.py b/tests/upgrades/test_provisioningtemplate.py new file mode 100644 index 00000000000..7c603c30a53 --- /dev/null +++ b/tests/upgrades/test_provisioningtemplate.py @@ -0,0 +1,136 @@ +"""Test for ProvisioningTemplates related Upgrade Scenario's + +:Requirement: UpgradedSatellite + +:CaseAutomation: Automated + +:CaseComponent: ProvisioningTemplates + +:Team: Rocket + +:CaseImportance: High + +""" +from fauxfactory import gen_string +import pytest + +from robottelo.config import settings + +provisioning_template_kinds = ['provision', 'PXEGrub', 'PXEGrub2', 'PXELinux', 'iPXE'] + + +class TestScenarioPositiveProvisioningTemplates: + """Provisioning Templates can be rendered correctly on host created in previous and upgraded versions + + :steps: + 1. Create host on Satellite and trying rendering provisioning templates + 2. Upgrade the Satellite to the next or latest version. + 3. After the upgrade, verify provisioning templates can be rendered on existing host + 4. Create host on upgraded Satellite and trying rendering provisioning templates. + + :expectedresults: + 1. Provisioning templates for host are able to render in previous and upgraded versions + """ + + @pytest.mark.pre_upgrade + @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) + def test_pre_scenario_provisioning_templates( + self, + module_target_sat, + module_org, + module_location, + default_os, + default_domain, + default_architecture, + default_partitiontable, + pxe_loader, + save_test_data, + ): + """Verify Host created Read the Provision template + + :id: preupgrade-3f338475-fa69-43ef-ac86-f00f4d324b21 + + :steps: + 1. Create host on Satellite and trying rendering provisioning templates + + :expectedresults: + 1. Provisioning templates for host are able to render in before upgrading to new version + + :parametrized: yes + """ + host = module_target_sat.api.Host( + organization=module_org, + location=module_location, + name=gen_string('alpha'), + operatingsystem=default_os, + architecture=default_architecture, + domain=default_domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + pxe_loader=pxe_loader.pxe_loader, + ).create() + + for kind in provisioning_template_kinds: + assert host.read_template(data={'template_kind': kind}) + + save_test_data( + { + 'provision_host_id': host.id, + 'pxe_loader': pxe_loader.pxe_loader, + } + ) + + @pytest.mark.post_upgrade(depend_on=test_pre_scenario_provisioning_templates) + @pytest.mark.parametrize('pre_upgrade_data', ['bios', 'uefi'], indirect=True) + def test_post_scenario_provisioning_templates( + self, + request, + pre_upgrade_data, + module_target_sat, + ): + """Host provisioned using pre-upgrade GCE CR + + :id: postupgrade-ef82143d-efef-49b2-9702-93d67ef6805e + + :steps: + 1. Postupgrade, verify provisioning templates rendering for host + 2. Create a new host on Satellite and try rendering provisioning templates + + :expectedresults: + 1. Provisioning templates for existing and new host are able to render. + + :parametrized: yes + """ + pxe_loader = pre_upgrade_data.pxe_loader + pre_upgrade_host = module_target_sat.api.Host().search( + query={'search': f'id={pre_upgrade_data.provision_host_id}'} + )[0] + request.addfinalizer(pre_upgrade_host.delete) + org = module_target_sat.api.Organization(id=pre_upgrade_host.organization.id).read() + loc = module_target_sat.api.Location(id=pre_upgrade_host.location.id).read() + domain = module_target_sat.api.Domain(id=pre_upgrade_host.domain.id).read() + architecture = module_target_sat.api.Architecture( + id=pre_upgrade_host.architecture.id + ).read() + os = module_target_sat.api.OperatingSystem(id=pre_upgrade_host.operatingsystem.id).read() + ptable = module_target_sat.api.PartitionTable(id=pre_upgrade_host.ptable.id).read() + + for kind in provisioning_template_kinds: + assert pre_upgrade_host.read_template(data={'template_kind': kind}) + + new_host_name = gen_string('alpha') + new_host = module_target_sat.api.Host( + name=new_host_name, + organization=org, + location=loc, + architecture=architecture, + domain=domain, + operatingsystem=os, + ptable=ptable, + root_pass=settings.provisioning.host_root_password, + pxe_loader=pxe_loader, + ).create() + request.addfinalizer(new_host.delete) + + for kind in provisioning_template_kinds: + assert new_host.read_template(data={'template_kind': kind}) diff --git a/tests/upgrades/test_puppet.py b/tests/upgrades/test_puppet.py index 96af4c82de6..b0c96394cc8 100644 --- a/tests/upgrades/test_puppet.py +++ b/tests/upgrades/test_puppet.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Puppet :Team: Rocket -:TestType: Functional - :CaseImportance: Medium -:Upstream: No """ import pytest @@ -23,8 +18,7 @@ def upgrade_server(request, module_target_sat, pre_configured_capsule): if request.param: return module_target_sat - else: - return pre_configured_capsule + return pre_configured_capsule class TestPuppet: diff --git a/tests/upgrades/test_remoteexecution.py b/tests/upgrades/test_remoteexecution.py index 78af475c8da..62d469e5bc9 100644 --- a/tests/upgrades/test_remoteexecution.py +++ b/tests/upgrades/test_remoteexecution.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: RemoteExecution :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/upgrades/test_repository.py b/tests/upgrades/test_repository.py index c221336d925..31233d820c6 100644 --- a/tests/upgrades/test_repository.py +++ b/tests/upgrades/test_repository.py @@ -4,24 +4,22 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Repositories :Team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest -from broker import Broker from robottelo.config import settings -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME -from robottelo.constants import FAKE_4_CUSTOM_PACKAGE_NAME +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_4_CUSTOM_PACKAGE_NAME, + REPOS, +) from robottelo.hosts import ContentHost UPSTREAM_USERNAME = 'rTtest123' @@ -198,8 +196,178 @@ def test_post_scenario_custom_repo_check(self, target_sat, pre_upgrade_data): data={'environment_ids': lce_id} ) - rhel_client = Broker(host_class=ContentHost).from_inventory( - filter=f'@inv.hostname == "{client_hostname}"' - )[0] + rhel_client = ContentHost.get_host_by_hostname(client_hostname) result = rhel_client.execute(f'yum install -y {FAKE_4_CUSTOM_PACKAGE_NAME}') assert result.status == 0 + + +class TestScenarioCustomRepoOverrideCheck: + """Scenario test to verify that repositories in a non-sca org set to "Enabled" + should be overridden to "Enabled(Override)" when upgrading to 6.14. + + Test Steps: + + 1. Before Satellite upgrade. + 2. Create new Organization, Location. + 3. Create Product, Custom Repository, Content view. + 4. Create Activation Key and add Subscription. + 5. Create a Content Host, register it, and check Repository Sets for enabled Repository. + 6. Upgrade Satellite. + 7. Search Host to verify Repository Set is set to Enabled(Override). + + BZ: 1265120 + """ + + @pytest.mark.pre_upgrade + def test_pre_scenario_custom_repo_sca_toggle( + self, + target_sat, + function_org, + function_product, + function_lce, + sat_upgrade_chost, + save_test_data, + default_location, + ): + """This is a pre-upgrade scenario test to verify that repositories in a non-sca org + set to "Enabled" should be overridden to "Enabled(Override)" when upgrading to 6.14. + + :id: preupgrade-65e1e312-a743-4605-b226-f580f523377f + + :steps: + 1. Before Satellite upgrade. + 2. Create new Organization, Location. + 3. Create Product, Custom Repository, Content view. + 4. Create Activation Key and add Subscription. + 5. Create a Content Host, register it, and check Repository Sets for enabled Repository. + + :expectedresults: + + 1. Custom Repository is created. + 2. Custom Repository is enabled on Host. + + """ + repo = target_sat.api.Repository( + product=function_product.id, url=settings.repos.yum_1.url + ).create() + repo.sync() + content_view = target_sat.publish_content_view(function_org, repo) + content_view.version[0].promote(data={'environment_ids': function_lce.id}) + ak = target_sat.api.ActivationKey( + content_view=content_view, organization=function_org.id, environment=function_lce + ).create() + if not target_sat.is_sca_mode_enabled(function_org.id): + subscription = target_sat.api.Subscription(organization=function_org).search( + query={'search': f'name={function_product.name}'} + )[0] + ak.add_subscriptions(data={'subscription_id': subscription.id}) + sat_upgrade_chost.register(function_org, default_location, ak.name, target_sat) + product_details = sat_upgrade_chost.execute('subscription-manager repos --list') + assert 'Enabled: 1' in product_details.stdout + + save_test_data( + { + 'rhel_client': sat_upgrade_chost.hostname, + 'org_name': function_org.name, + 'product_name': function_product.name, + 'repo_name': repo.name, + 'product_details': product_details.stdout, + } + ) + + @pytest.mark.post_upgrade(depend_on=test_pre_scenario_custom_repo_sca_toggle) + def test_post_scenario_custom_repo_sca_toggle(self, pre_upgrade_data): + """This is a post-upgrade scenario test to verify that repositories in a non-sca + Organization set to "Enabled" should be overridden to "Enabled(Override)" + when upgrading to 6.14. + + :id: postupgrade-cc392ce3-f3bb-4cf3-afd5-c062e3a5d109 + + :steps: + 1. After upgrade, search Host to verify Repository Set is set to + Enabled(Override). + + + :expectedresults: Repository on Host should be overridden. + + """ + client_hostname = pre_upgrade_data.get('rhel_client') + org_name = pre_upgrade_data.get('org_name') + product_name = pre_upgrade_data.get('product_name') + repo_name = pre_upgrade_data.get('repo_name') + rhel_client = ContentHost.get_host_by_hostname(client_hostname) + result = rhel_client.execute('subscription-manager repo-override --list') + assert 'enabled: 1' in result.stdout + assert f'{org_name}_{product_name}_{repo_name}' in result.stdout + + +class TestScenarioLargeRepoSyncCheck: + """Scenario test to verify that large repositories can be synced without + failure after an upgrade. + + Test Steps: + + 1. Before Satellite upgrade. + 2. Enable and sync large RH repository. + 3. Upgrade Satellite. + 4. Enable and sync a second large repository. + + BZ: 2043144 + + :customerscenario: true + """ + + @pytest.mark.pre_upgrade + def test_pre_scenario_sync_large_repo( + self, target_sat, module_entitlement_manifest_org, save_test_data + ): + """This is a pre-upgrade scenario to verify that users can sync large repositories + before an upgrade + + :id: afb957dc-c509-4009-ac85-4b71b64d3c74 + + :steps: + 1. Enable a large redhat repository + 2. Sync repository and assert sync succeeds + + :expectedresults: Large Repositories should succeed when synced + """ + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=REPOS['rhel8_bos']['product'], + repo=REPOS['rhel8_bos']['name'], + reposet=REPOS['rhel8_bos']['reposet'], + releasever=REPOS['rhel8_bos']['releasever'], + ) + repo = target_sat.api.Repository(id=rh_repo_id).read() + res = repo.sync(timeout=2000) + assert res['result'] == 'success' + save_test_data({'org_id': module_entitlement_manifest_org.id}) + + @pytest.mark.post_upgrade(depend_on=test_pre_scenario_sync_large_repo) + def test_post_scenario_sync_large_repo(self, target_sat, pre_upgrade_data): + """This is a post-upgrade scenario to verify that large repositories can be + synced after an upgrade + + :id: 7bdbb2ac-7197-4e1a-8163-5852943eb49b + + :steps: + 1. Sync large repository + 2. Upgrade satellite + 3. Sync a second large repository in that same organization + + :expectedresults: Large repositories should succeed after an upgrade + """ + org_id = pre_upgrade_data.get('org_id') + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=org_id, + product=REPOS['rhel8_aps']['product'], + repo=REPOS['rhel8_aps']['name'], + reposet=REPOS['rhel8_aps']['reposet'], + releasever=REPOS['rhel8_aps']['releasever'], + ) + repo = target_sat.api.Repository(id=rh_repo_id).read() + res = repo.sync(timeout=4000) + assert res['result'] == 'success' diff --git a/tests/upgrades/test_role.py b/tests/upgrades/test_role.py index 2202ade883d..c2ee4b25d20 100644 --- a/tests/upgrades/test_role.py +++ b/tests/upgrades/test_role.py @@ -4,17 +4,12 @@ :CaseAutomation: NotAutomated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/upgrades/test_satellite_maintain.py b/tests/upgrades/test_satellite_maintain.py index fe7a3bb10af..03abb569e94 100644 --- a/tests/upgrades/test_satellite_maintain.py +++ b/tests/upgrades/test_satellite_maintain.py @@ -1,27 +1,23 @@ -"""Test for Satellite-maintain related Upgrade Scenario's +"""satellite-maintain Upgrade Scenarios :Requirement: UpgradedSatellite :CaseAutomation: Automated -:CaseLevel: Acceptance - -:CaseComponent: ForemanMaintain +:CaseComponent: SatelliteMaintain :Team: Platform -:TestType: Functional - :CaseImportance: High -:Upstream: No """ +import re + import pytest class TestSatelliteMaintain: - """The test class contains pre-upgrade and post-upgrade scenarios to test - satellite-maintain utility + """Pre-upgrade and post-upgrade scenarios to test satellite-maintain utility. Test Steps: 1. Before Satellite upgrade, Perform test for "satellite-maintain upgrade list-versions" @@ -32,74 +28,18 @@ class TestSatelliteMaintain: @staticmethod def satellite_upgradable_version_list(sat_obj): - """ - This function is used to collect the details of satellite version and upgradable - version list. - - :return: satellite_version, upgradeable_version, major_version_change - """ + """Obtain upgradable version list by satellite-maintain. - cmd = "rpm -q satellite > /dev/null && rpm -q satellite --queryformat=%{VERSION}" - # leaving this section as-is for now but this could be refactored to use sat_obj.version - satellite_version = sat_obj.execute(cmd) - if satellite_version.status == 0: - satellite_version = satellite_version.stdout - else: - return [], [], None, None - satellite_maintain_version = sat_obj.execute( - "satellite-maintain upgrade list-versions --disable-self-upgrade" - ) - upgradeable_version = [ - version for version in satellite_maintain_version.stdout if version != '' - ] - version_change = 0 - for version in upgradeable_version: - version_change += int(version.split('.')[0]) - if version_change % 2 == 0: - major_version_change = False - y_version = '' - else: - major_version_change = True - y_version = list(set(satellite_maintain_version) - set(satellite_version))[0].split( - '.' - )[-1] - - return satellite_version, upgradeable_version, major_version_change, y_version - - @staticmethod - def version_details( - satellite_version, major_version_change, y_version, upgrade_stage="pre-upgrade" - ): - """ - This function is used to update the details of zstream upgrade and - next version upgrade - :param str satellite_version: satellite version would be like 6.5.0, 6.6.0, 6.7.0 - :param bool major_version_change: For major version upgrade like 6.8 to 7.0, 7.0 - to 8.0 etc, then major_version_change would be True. - :param str y_version: y_version change depends on major_version_change - :param str upgrade_stage: upgrade stage would be pre or post. - :return: zstream_version, next_version + :return: upgradeable_versions """ - - major_version = satellite_version.split('.')[0:1] - if major_version_change: - major_version = [int(major_version[0]) + 1].append(y_version) - else: - y_version = int(satellite_version.split('.')[0:2][-1]) - zstream_version = '' - if upgrade_stage == "pre-upgrade": - major_version.append(str(y_version + 1)) - zstream_version = ".".join(satellite_version.split('.')[0:2]) + ".z" - else: - major_version.append(str(y_version)) - major_version.append("z") - next_version = ".".join(major_version) - return zstream_version, next_version + cmd = 'satellite-maintain upgrade list-versions --disable-self-upgrade' + list_versions = sat_obj.execute(cmd).stdout + regex = re.compile(r'^\d+\.\d+') + return [version for version in list_versions if regex.match(version)] @pytest.mark.pre_upgrade def test_pre_satellite_maintain_upgrade_list_versions(self, target_sat): - """Pre-upgrade sceanrio that tests list of satellite version - which satellite can be upgraded. + """Test list of satellite target versions before upgrade. :id: preupgrade-fc2c54b2-2663-11ea-b47c-48f17f1fc2e1 @@ -107,29 +47,15 @@ def test_pre_satellite_maintain_upgrade_list_versions(self, target_sat): 1. Run satellite-maintain upgrade list-versions :expectedresults: Versions should be current z-stream. - """ - ( - satellite_version, - upgradable_version, - major_version_change, - y_version, - ) = self.satellite_upgradable_version_list(target_sat) - if satellite_version: - # In future If satellite-maintain packages update add before - # pre-upgrade test case execution then next version kind of - # stuff check we can add it here. - zstream_version, next_version = self.version_details( - satellite_version[0], major_version_change, y_version - ) - else: - zstream_version = -1 - assert zstream_version in upgradable_version + zstream = '.'.join(target_sat.version.split('.')[0:2]) + '.z' + upgradable_versions = self.satellite_upgradable_version_list(target_sat) + # only possible target should be appropriate zstream + assert set(upgradable_versions) - {zstream} == set() @pytest.mark.post_upgrade def test_post_satellite_maintain_upgrade_list_versions(self, target_sat): - """Post-upgrade sceanrio that tests list of satellite version - which satellite can be upgraded. + """Test list of satellite target versions after upgrade. :id: postupgrade-0bce689c-2664-11ea-b47c-48f17f1fc2e1 @@ -137,18 +63,8 @@ def test_post_satellite_maintain_upgrade_list_versions(self, target_sat): 1. Run satellite-maintain upgrade list-versions. :expectedresults: Versions should be next z-stream. - """ - ( - satellite_version, - upgradable_version, - major_version_change, - y_version, - ) = self.satellite_upgradable_version_list(target_sat) - if satellite_version: - zstream_version, next_version = self.version_details( - satellite_version[0], major_version_change, y_version, upgrade_stage="post-upgrade" - ) - else: - next_version = -1 - assert next_version in upgradable_version + zstream = '.'.join(target_sat.version.split('.')[0:2]) + '.z' + upgradable_versions = self.satellite_upgradable_version_list(target_sat) + # only possible target should be appropriate zstream + assert set(upgradable_versions) - {zstream} == set() diff --git a/tests/upgrades/test_satellitesync.py b/tests/upgrades/test_satellitesync.py index 5024cf93c18..ad9356d2f97 100644 --- a/tests/upgrades/test_satellitesync.py +++ b/tests/upgrades/test_satellitesync.py @@ -4,17 +4,12 @@ :CaseAutomation: Automated -:CaseLevel: Component - :CaseComponent: InterSatelliteSync :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest @@ -33,13 +28,12 @@ def test_pre_version_cv_export_import(self, module_org, target_sat, save_test_da :id: preupgrade-f19e4928-94db-4df6-8ce8-b5e4afe34258 :steps: - - 1. Create a ContentView - 2. Publish and promote the Content View - 3. Check the package count of promoted content view. + 1. Create a ContentView + 2. Publish and promote the Content View + 3. Check the package count of promoted content view. :expectedresults: Before the upgrade, Content view published and promoted, and package - count should be greater than 0. + count should be greater than 0. """ product = target_sat.api.Product(organization=module_org).create() repo = target_sat.api.Repository( diff --git a/tests/upgrades/test_subnet.py b/tests/upgrades/test_subnet.py index 63813c06de2..0fbc2f38277 100644 --- a/tests/upgrades/test_subnet.py +++ b/tests/upgrades/test_subnet.py @@ -4,17 +4,12 @@ :CaseAutomation: NotAutomated -:CaseLevel: Acceptance - :CaseComponent: Networking :Team: Rocket -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/upgrades/test_subscription.py b/tests/upgrades/test_subscription.py index 75a3f49fee5..b1dd84597a1 100644 --- a/tests/upgrades/test_subscription.py +++ b/tests/upgrades/test_subscription.py @@ -4,24 +4,19 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SubscriptionManagement :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest -from broker import Broker from manifester import Manifester +import pytest from robottelo import constants from robottelo.config import settings +from robottelo.hosts import ContentHost class TestManifestScenarioRefresh: @@ -75,7 +70,7 @@ def test_post_manifest_scenario_refresh(self, request, target_sat, pre_upgrade_d history = target_sat.api.Subscription(organization=org).manifest_history( data={'organization_id': org.id} ) - assert "Subscriptions deleted by foreman_admin" == history[0]['statusMessage'] + assert history[0]['statusMessage'] == "Subscriptions deleted by foreman_admin" class TestSubscriptionAutoAttach: @@ -157,9 +152,7 @@ def test_post_subscription_scenario_auto_attach(self, request, target_sat, pre_u 1. Pre-upgrade content host should get subscribed. 2. All the cleanup should be completed successfully. """ - rhel_contenthost = Broker().from_inventory( - filter=f'@inv.hostname == "{pre_upgrade_data.rhel_client}"' - )[0] + rhel_contenthost = ContentHost.get_host_by_hostname(pre_upgrade_data.rhel_client) host = target_sat.api.Host().search(query={'search': f'name={rhel_contenthost.hostname}'})[ 0 ] @@ -174,5 +167,5 @@ def test_post_subscription_scenario_auto_attach(self, request, target_sat, pre_u sub.delete_manifest(data={'organization_id': org.id}) assert len(sub.search()) == 0 manifester = Manifester(manifest_category=settings.manifest.entitlement) - manifester.allocation_uuid = pre_upgrade_data.allocation_uuid request.addfinalizer(manifester.delete_subscription_allocation) + manifester.allocation_uuid = pre_upgrade_data.allocation_uuid diff --git a/tests/upgrades/test_syncplan.py b/tests/upgrades/test_syncplan.py index b4310a0a871..5939c8e16dc 100644 --- a/tests/upgrades/test_syncplan.py +++ b/tests/upgrades/test_syncplan.py @@ -4,20 +4,15 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: SyncPlans :Team: Phoenix-content -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_choice +import pytest from robottelo.constants import SYNC_INTERVAL from robottelo.utils.datafactory import valid_cron_expressions @@ -41,7 +36,6 @@ def test_pre_sync_plan_migration(self, request, target_sat): 3. Assign sync plan to product and sync the repo :expectedresults: Run sync plan create, get, assign and verify it should pass - """ org = target_sat.api.Organization(name=f'{request.node.name}_org').create() sync_plan = target_sat.api.SyncPlan( @@ -71,7 +65,7 @@ def test_pre_disabled_sync_plan_logic(self, request, target_sat): 5. Re enable the sync plan :expectedresults: Sync plan is created and assigned to a product. The associated recurring - logic is cancelled and then the plan is re-enabled so that it gets a new recurring logic. + logic is cancelled and then the plan is re-enabled so that it gets a new recurring logic. :BZ: 1887511 @@ -114,8 +108,7 @@ def test_post_sync_plan_migration(self, request, dependent_scenario_name, target 2. Check the all available sync_interval type update with pre-created sync_plan :expectedresults: After upgrade, the sync plan should remain the same with their all - target_sat.api and sync_interval updated with their all supported sync interval type. - + target_sat.api and sync_interval updated with their all supported sync interval type. """ pre_test_name = dependent_scenario_name org = target_sat.api.Organization().search(query={'search': f'name="{pre_test_name}_org"'})[ @@ -156,7 +149,7 @@ def test_post_disabled_sync_plan_logic(self, request, dependent_scenario_name, t 2. Check the all available sync_interval type update with pre-created sync_plan. :expectedresults: Update proceedes without any errors. After upgrade, the sync plan - should remain the same with all entities + should remain the same with all entities :BZ: 1887511 diff --git a/tests/upgrades/test_user.py b/tests/upgrades/test_user.py index cd3a8e55297..b21da9b5ba1 100644 --- a/tests/upgrades/test_user.py +++ b/tests/upgrades/test_user.py @@ -4,17 +4,12 @@ :CaseAutomation: NotAutomated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ import pytest diff --git a/tests/upgrades/test_usergroup.py b/tests/upgrades/test_usergroup.py index c18b8904aea..11ac95e2af8 100644 --- a/tests/upgrades/test_usergroup.py +++ b/tests/upgrades/test_usergroup.py @@ -4,23 +4,17 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: UsersRoles :Team: Endeavour -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.constants import LDAP_ATTR -from robottelo.constants import LDAP_SERVER_TYPE +from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE class TestUserGroupMembership: @@ -44,7 +38,7 @@ def test_pre_create_user_group_with_ldap_user(self, ad_data, target_sat, save_te ad_data = ad_data() member_group = 'foobargroup' LOGEDIN_MSG = "Using configured credentials for user '{0}'." - auth_source = target_sat.cli_factory.make_ldap_auth_source( + auth_source = target_sat.cli_factory.ldap_auth_source( { 'name': gen_string('alpha'), 'onthefly-register': 'true', @@ -60,8 +54,8 @@ def test_pre_create_user_group_with_ldap_user(self, ad_data, target_sat, save_te } ) viewer_role = target_sat.cli.Role.info({'name': 'Viewer'}) - user_group = target_sat.cli_factory.make_usergroup() - target_sat.cli_factory.make_usergroup_external( + user_group = target_sat.cli_factory.usergroup() + target_sat.cli_factory.usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], @@ -102,22 +96,22 @@ def test_post_verify_user_group_membership( 2. Update ldap auth. :expectedresults: After upgrade, user group membership should remain the same and LDAP - auth update should work. + auth update should work. """ ad_data = ad_data() user_group = target_sat.api.UserGroup().search( query={'search': f'name={pre_upgrade_data["user_group_name"]}'} ) + request.addfinalizer(user_group[0].delete) auth_source = target_sat.api.AuthSourceLDAP().search( query={'search': f'name={pre_upgrade_data["auth_source_name"]}'} )[0] request.addfinalizer(auth_source.delete) - request.addfinalizer(user_group[0].delete) user = target_sat.api.User().search(query={'search': f'login={ad_data["ldap_user_name"]}'})[ 0 ] - assert user.read().id == user_group[0].read().user[0].id request.addfinalizer(user.delete) + assert user.read().id == user_group[0].read().user[0].id role_list = target_sat.cli.Role.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).list() diff --git a/tests/upgrades/test_virtwho.py b/tests/upgrades/test_virtwho.py index a0d7654badc..95b36ec5749 100644 --- a/tests/upgrades/test_virtwho.py +++ b/tests/upgrades/test_virtwho.py @@ -4,31 +4,24 @@ :CaseAutomation: Automated -:CaseLevel: Acceptance - :CaseComponent: Virt-whoConfigurePlugin :Team: Phoenix-subscriptions -:TestType: Functional - :CaseImportance: High -:Upstream: No """ -import pytest from fauxfactory import gen_string +import pytest -from robottelo.cli.host import Host -from robottelo.cli.subscription import Subscription -from robottelo.cli.virt_who_config import VirtWhoConfig from robottelo.config import settings -from robottelo.constants import DEFAULT_LOC from robottelo.utils.issue_handlers import is_open -from robottelo.utils.virtwho import deploy_configure_by_command -from robottelo.utils.virtwho import get_configure_command -from robottelo.utils.virtwho import get_configure_file -from robottelo.utils.virtwho import get_configure_option +from robottelo.utils.virtwho import ( + deploy_configure_by_command, + get_configure_command, + get_configure_file, + get_configure_option, +) @pytest.fixture @@ -44,11 +37,11 @@ def form_data(target_sat): 'satellite_url': target_sat.hostname, 'hypervisor_username': esx.hypervisor_username, 'hypervisor_password': esx.hypervisor_password, - 'name': 'preupgrade_virt_who', + 'name': f'preupgrade_virt_who_{gen_string("alpha")}', } -ORG_DATA = {'name': 'virtwho_upgrade_org_name'} +ORG_DATA = {'name': f'virtwho_upgrade_{gen_string("alpha")}'} class TestScenarioPositiveVirtWho: @@ -78,13 +71,8 @@ def test_pre_create_virt_who_configuration( 3. Report is sent to satellite. 4. Virtual sku can be generated and attached. """ - default_loc_id = ( - target_sat.api.Location().search(query={'search': f'name="{DEFAULT_LOC}"'})[0].id - ) - default_loc = target_sat.api.Location(id=default_loc_id).read() org = target_sat.api.Organization(name=ORG_DATA['name']).create() - default_loc.organization.append(target_sat.api.Organization(id=org.id)) - default_loc.update(['organization']) + target_sat.api.Location(organization=[org]).create() org.sca_disable() target_sat.upload_manifest(org.id, function_entitlement_manifest.content) form_data.update({'organization_id': org.id}) @@ -105,8 +93,10 @@ def test_pre_create_virt_who_configuration( (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), ] for hostname, sku in hosts: - host = Host.list({'search': hostname})[0] - subscriptions = Subscription.list({'organization-id': org.id, 'search': sku}) + host = target_sat.cli.Host.list({'search': hostname})[0] + subscriptions = target_sat.cli.Subscription.list( + {'organization-id': org.id, 'search': sku} + ) vdc_id = subscriptions[0]['id'] if 'type=STACK_DERIVED' in sku: for item in subscriptions: @@ -127,6 +117,10 @@ def test_pre_create_virt_who_configuration( { 'hypervisor_name': hypervisor_name, 'guest_name': guest_name, + 'org_id': org.id, + 'org_name': org.name, + 'org_label': org.label, + 'name': vhd.name, } ) @@ -140,25 +134,35 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar 1. Post upgrade, Verify virt-who exists and has same status. 2. Verify the connection of the guest on Content host. 3. Verify the virt-who config-file exists. - 4. Update virt-who config with new name. - 5. Delete virt-who config. + 4. Verify Report is sent to satellite. + 5. Update virt-who config with new name. + 6. Delete virt-who config. :expectedresults: 1. virt-who config is intact post upgrade. 2. the config and guest connection have the same status. - 3. virt-who config should update and delete successfully. + 3. Report is sent to satellite. + 4. virt-who config should update and delete successfully. """ - org = target_sat.api.Organization().search(query={'search': f'name={ORG_DATA["name"]}'})[0] + org_id = pre_upgrade_data.get('org_id') + org_name = pre_upgrade_data.get('org_name') + org_label = pre_upgrade_data.get('org_label') + name = pre_upgrade_data.get('name') # Post upgrade, Verify virt-who exists and has same status. - vhd = target_sat.api.VirtWhoConfig(organization_id=org.id).search( - query={'search': f'name={form_data["name"]}'} + vhd = target_sat.api.VirtWhoConfig(organization_id=org_id).search( + query={'search': f'name={name}'} )[0] if not is_open('BZ:1802395'): assert vhd.status == 'ok' # Verify virt-who status via CLI as we cannot check it via API now - vhd_cli = VirtWhoConfig.exists(search=('name', form_data['name'])) - assert VirtWhoConfig.info({'id': vhd_cli['id']})['general-information']['status'] == 'OK' + vhd_cli = target_sat.cli.VirtWhoConfig.exists(search=('name', name)) + assert ( + target_sat.cli.VirtWhoConfig.info({'id': vhd_cli['id']})['general-information'][ + 'status' + ] + == 'OK' + ) # Vefify the connection of the guest on Content host hypervisor_name = pre_upgrade_data.get('hypervisor_name') @@ -166,7 +170,7 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar hosts = [hypervisor_name, guest_name] for hostname in hosts: result = ( - target_sat.api.Host(organization=org.id) + target_sat.api.Host(organization=org_id) .search(query={'search': hostname})[0] .read_json() ) @@ -176,6 +180,18 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar config_file = get_configure_file(vhd.id) get_configure_option('hypervisor_id', config_file), + # Verify Report is sent to satellite. + command = get_configure_command(vhd.id, org=org_name) + deploy_configure_by_command( + command, form_data['hypervisor_type'], debug=True, org=org_label + ) + virt_who_instance = ( + target_sat.api.VirtWhoConfig(organization_id=org_id) + .search(query={'search': f'name={name}'})[0] + .status + ) + assert virt_who_instance == 'ok' + # Update virt-who config modify_name = gen_string('alpha') vhd.name = modify_name @@ -183,6 +199,6 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar # Delete virt-who config vhd.delete() - assert not target_sat.api.VirtWhoConfig(organization_id=org.id).search( + assert not target_sat.api.VirtWhoConfig(organization_id=org_id).search( query={'search': f'name={modify_name}'} )