diff --git a/pytest_fixtures/component/host.py b/pytest_fixtures/component/host.py index 4f070a86a64..11645f89f84 100644 --- a/pytest_fixtures/component/host.py +++ b/pytest_fixtures/component/host.py @@ -3,7 +3,6 @@ from nailgun import entities import pytest -from robottelo.cli.factory import setup_org_for_a_rh_repo from robottelo.constants import DEFAULT_CV, ENVIRONMENT, PRDS, REPOS, REPOSET @@ -24,7 +23,7 @@ def module_model(): @pytest.mark.skip_if_not_set('clients', 'fake_manifest') @pytest.fixture(scope="module") -def setup_rhst_repo(): +def setup_rhst_repo(module_target_sat): """Prepare Satellite tools repository for usage in specified organization""" org = entities.Organization().create() cv = entities.ContentView(organization=org).create() @@ -34,7 +33,7 @@ def setup_rhst_repo(): organization=org, ).create() repo_name = 'rhst7' - setup_org_for_a_rh_repo( + module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET[repo_name], diff --git a/pytest_fixtures/component/oscap.py b/pytest_fixtures/component/oscap.py index 356050f94a7..6e72eee1d6b 100644 --- a/pytest_fixtures/component/oscap.py +++ b/pytest_fixtures/component/oscap.py @@ -4,7 +4,6 @@ from nailgun import entities import pytest -from robottelo.cli.factory import make_scapcontent from robottelo.config import robottelo_tmp_dir, settings from robottelo.constants import OSCAP_PROFILE, OSCAP_TAILORING_FILE, DataFile @@ -29,9 +28,11 @@ def oscap_content_path(session_target_sat): @pytest.fixture(scope="module") -def scap_content(import_ansible_roles): +def scap_content(import_ansible_roles, module_target_sat): title = f"rhel-content-{gen_string('alpha')}" - scap_info = make_scapcontent({'title': title, 'scap-file': f'{settings.oscap.content_path}'}) + scap_info = module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': f'{settings.oscap.content_path}'} + ) scap_id = scap_info['id'] scap_info = entities.ScapContents(id=scap_id).read() diff --git a/robottelo/cli/base.py b/robottelo/cli/base.py index d5ce2ae224f..2d3d7974696 100644 --- a/robottelo/cli/base.py +++ b/robottelo/cli/base.py @@ -6,59 +6,11 @@ from robottelo import ssh from robottelo.cli import hammer from robottelo.config import settings +from robottelo.exceptions import CLIDataBaseError, CLIError, CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.ssh import get_client -class CLIError(Exception): - """Indicates that a CLI command could not be run.""" - - -class CLIBaseError(Exception): - """Indicates that a CLI command has finished with return code different - from zero. - - :param status: CLI command return code - :param stderr: contents of the ``stderr`` - :param msg: explanation of the error - - """ - - def __init__(self, status, stderr, msg): - self.status = status - self.stderr = stderr - self.msg = msg - super().__init__(msg) - self.message = msg - - def __str__(self): - """Include class name, status, stderr and msg to string repr so - assertRaisesRegexp can be used to assert error present on any - attribute - """ - return repr(self) - - def __repr__(self): - """Include class name status, stderr and msg to improve logging""" - return '{}(status={!r}, stderr={!r}, msg={!r}'.format( - type(self).__name__, self.status, self.stderr, self.msg - ) - - -class CLIReturnCodeError(CLIBaseError): - """Error to be raised when an error occurs due to some validation error - when execution hammer cli. - See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details - """ - - -class CLIDataBaseError(CLIBaseError): - """Error to be raised when an error occurs due to some missing parameter - which cause a data base error on hammer - See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details - """ - - class Base: """Base class for hammer CLI interaction @@ -84,7 +36,7 @@ def _handle_response(cls, response, ignore_stderr=None): :param ignore_stderr: indicates whether to throw a warning in logs if ``stderr`` is not empty. :returns: contents of ``stdout``. - :raises robottelo.cli.base.CLIReturnCodeError: If return code is + :raises robottelo.exceptions.CLIReturnCodeError: If return code is different from zero. """ if isinstance(response.stderr, tuple): diff --git a/robottelo/cli/report_template.py b/robottelo/cli/report_template.py index e8a60b9beb6..962c5929bc7 100644 --- a/robottelo/cli/report_template.py +++ b/robottelo/cli/report_template.py @@ -25,8 +25,9 @@ from tempfile import mkstemp from robottelo import ssh -from robottelo.cli.base import Base, CLIError +from robottelo.cli.base import Base from robottelo.constants import REPORT_TEMPLATE_FILE, DataFile +from robottelo.exceptions import CLIError class ReportTemplate(Base): diff --git a/robottelo/content_info.py b/robottelo/content_info.py index 1e8fd01ed64..fd404f5a7d9 100644 --- a/robottelo/content_info.py +++ b/robottelo/content_info.py @@ -5,7 +5,7 @@ import requests from robottelo import ssh -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError def get_repo_files(repo_path, extension='rpm', hostname=None): diff --git a/robottelo/exceptions.py b/robottelo/exceptions.py index d1f9886dce0..83022dfcd6e 100644 --- a/robottelo/exceptions.py +++ b/robottelo/exceptions.py @@ -65,3 +65,56 @@ class ProxyError(Exception): class DownloadFileError(Exception): """Indicates an error when failure in downloading file from server.""" + + +class CLIFactoryError(Exception): + """Indicates an error occurred while creating an entity using hammer""" + + +class CLIError(Exception): + """Indicates that a CLI command could not be run.""" + + +class CLIBaseError(Exception): + """Indicates that a CLI command has finished with return code different + from zero. + + :param status: CLI command return code + :param stderr: contents of the ``stderr`` + :param msg: explanation of the error + + """ + + def __init__(self, status, stderr, msg): + self.status = status + self.stderr = stderr + self.msg = msg + super().__init__(msg) + self.message = msg + + def __str__(self): + """Include class name, status, stderr and msg to string repr so + assertRaisesRegexp can be used to assert error present on any + attribute + """ + return repr(self) + + def __repr__(self): + """Include class name status, stderr and msg to improve logging""" + return '{}(status={!r}, stderr={!r}, msg={!r}'.format( + type(self).__name__, self.status, self.stderr, self.msg + ) + + +class CLIReturnCodeError(CLIBaseError): + """Error to be raised when an error occurs due to some validation error + when execution hammer cli. + See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details + """ + + +class CLIDataBaseError(CLIBaseError): + """Error to be raised when an error occurs due to some missing parameter + which cause a data base error on hammer + See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details + """ diff --git a/robottelo/host_helpers/cli_factory.py b/robottelo/host_helpers/cli_factory.py index cf7d8a28058..b4461e6b844 100644 --- a/robottelo/host_helpers/cli_factory.py +++ b/robottelo/host_helpers/cli_factory.py @@ -26,17 +26,13 @@ ) from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError from robottelo.cli.proxy import CapsuleTunnelError from robottelo.config import settings +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.host_helpers.repository_mixins import initiate_repo_helpers from robottelo.utils.manifest import clone -class CLIFactoryError(Exception): - """Indicates an error occurred while creating an entity using hammer""" - - def create_object(cli_object, options, values=None, credentials=None): """ Creates with dictionary of arguments. diff --git a/robottelo/host_helpers/satellite_mixins.py b/robottelo/host_helpers/satellite_mixins.py index 263510f6783..b17325a8492 100644 --- a/robottelo/host_helpers/satellite_mixins.py +++ b/robottelo/host_helpers/satellite_mixins.py @@ -7,7 +7,6 @@ import requests -from robottelo.cli.base import CLIReturnCodeError from robottelo.cli.proxy import CapsuleTunnelError from robottelo.config import settings from robottelo.constants import ( @@ -16,6 +15,7 @@ PUPPET_COMMON_INSTALLER_OPTS, PUPPET_SATELLITE_INSTALLER, ) +from robottelo.exceptions import CLIReturnCodeError from robottelo.host_helpers.api_factory import APIFactory from robottelo.host_helpers.cli_factory import CLIFactory from robottelo.host_helpers.ui_factory import UIFactory diff --git a/robottelo/hosts.py b/robottelo/hosts.py index d8838a9606b..14cf458e3a2 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -30,7 +30,6 @@ from robottelo import constants from robottelo.cli.base import Base -from robottelo.cli.factory import CLIFactoryError from robottelo.config import ( configure_airgun, configure_nailgun, @@ -53,7 +52,7 @@ RHSSO_USER_UPDATE, SATELLITE_VERSION, ) -from robottelo.exceptions import DownloadFileError, HostPingFailed +from robottelo.exceptions import CLIFactoryError, DownloadFileError, HostPingFailed from robottelo.host_helpers import CapsuleMixins, ContentHostMixins, SatelliteMixins from robottelo.logging import logger from robottelo.utils import validate_ssh_pub_key @@ -1248,18 +1247,13 @@ def virt_who_hypervisor_config( :param bool upload_manifest: whether to upload the organization manifest :param list extra_repos: (Optional) repositories dict options to setup additionally. """ - from robottelo.cli import factory as cli_factory - from robottelo.cli.lifecycleenvironment import LifecycleEnvironment - from robottelo.cli.org import Org - from robottelo.cli.subscription import Subscription - from robottelo.cli.virt_who_config import VirtWhoConfig - org = cli_factory.make_org() if org_id is None else Org.info({'id': org_id}) + org = satellite.cli_factory.make_org() if org_id is None else satellite.cli.Org.info({'id': org_id}) if lce_id is None: - lce = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + lce = satellite.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) else: - lce = LifecycleEnvironment.info({'id': lce_id, 'organization-id': org['id']}) + lce = satellite.cli.LifecycleEnvironment.info({'id': lce_id, 'organization-id': org['id']}) extra_repos = extra_repos or [] repos = [ # Red Hat Satellite Tools @@ -1273,7 +1267,7 @@ def virt_who_hypervisor_config( } ] repos.extend(extra_repos) - content_setup_data = cli_factory.setup_cdn_and_custom_repos_content( + content_setup_data = satellite.cli_factory.setup_cdn_and_custom_repos_content( org[id], lce[id], repos, @@ -1317,7 +1311,7 @@ def virt_who_hypervisor_config( # create the virt-who directory on satellite satellite = Satellite() satellite.execute(f'mkdir -p {virt_who_deploy_directory}') - VirtWhoConfig.fetch({'id': config_id, 'output': virt_who_deploy_file}) + satellite.cli.VirtWhoConfig.fetch({'id': config_id, 'output': virt_who_deploy_file}) # remote_copy from satellite to self satellite.session.remote_copy(virt_who_deploy_file, self) @@ -1383,7 +1377,7 @@ def virt_who_hypervisor_config( virt_who_hypervisor_host = org_hosts[0] subscription_id = None if hypervisor_hostname and subscription_name: - subscriptions = Subscription.list({'organization-id': org_id}, per_page=False) + subscriptions = satellite.cli.Subscription.list({'organization-id': org_id}, per_page=False) for subscription in subscriptions: if subscription['name'] == subscription_name: subscription_id = subscription['id'] diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index a20e38e7a90..88b359f3b1c 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -23,8 +23,6 @@ import pytest from robottelo import constants -from robottelo.cli.factory import setup_org_for_a_custom_repo, setup_org_for_a_rh_repo -from robottelo.cli.host import Host from robottelo.config import settings from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME @@ -48,8 +46,10 @@ def activation_key(module_org, module_lce): @pytest.fixture(scope='module') -def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, activation_key): - return setup_org_for_a_rh_repo( +def rh_repo( + module_entitlement_manifest_org, module_lce, module_cv, activation_key, module_target_sat +): + return module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': constants.PRDS['rhel'], 'repository-set': constants.REPOSET['rhst7'], @@ -63,8 +63,8 @@ def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, activation_k @pytest.fixture(scope='module') -def custom_repo(module_org, module_lce, module_cv, activation_key): - return setup_org_for_a_custom_repo( +def custom_repo(module_org, module_lce, module_cv, activation_key, module_target_sat): + return module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_9.url, 'organization-id': module_org.id, @@ -467,7 +467,7 @@ def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost @pytest.mark.tier3 -def test_positive_get_diff_for_cv_envs(): +def test_positive_get_diff_for_cv_envs(module_target_sat): """Generate a difference in errata between a set of environments for a content view @@ -490,7 +490,7 @@ def test_positive_get_diff_for_cv_envs(): content_view = entities.ContentView(organization=org).create() activation_key = entities.ActivationKey(environment=env, organization=org).create() for repo_url in [settings.repos.yum_9.url, CUSTOM_REPO_URL]: - setup_org_for_a_custom_repo( + module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': repo_url, 'organization-id': org.id, @@ -677,7 +677,7 @@ def test_errata_installation_with_swidtags( _run_remote_command_on_content_host( module_org, f'dnf -y module install {module_name}:0:{version}', rhel8_contenthost ) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) # validate swid tags Installed before_errata_apply_result = _run_remote_command_on_content_host( module_org, @@ -694,7 +694,7 @@ def test_errata_installation_with_swidtags( module_org, f'dnf -y module update {module_name}', rhel8_contenthost ) _run_remote_command_on_content_host(module_org, 'dnf -y upload-profile', rhel8_contenthost) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) applicable_errata_count -= 1 assert rhel8_contenthost.applicable_errata_count == applicable_errata_count after_errata_apply_result = _run_remote_command_on_content_host( @@ -732,9 +732,9 @@ def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): @pytest.fixture(scope='module') -def rhel8_custom_repo_cv(module_entitlement_manifest_org): +def rhel8_custom_repo_cv(module_entitlement_manifest_org, module_target_sat): """Create repo and publish CV so that packages are in Library""" - return setup_org_for_a_custom_repo( + return module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.module_stream_1.url, 'organization-id': module_entitlement_manifest_org.id, @@ -832,7 +832,7 @@ def test_apply_modular_errata_using_default_content_view( assert result.status == 0 # Check that there is now two errata applicable errata = _fetch_available_errata(module_entitlement_manifest_org, host, 2) - Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': rhel8_contenthost.nailgun_host.id}) assert len(errata) == 2 # Assert that errata package is required assert constants.FAKE_3_CUSTOM_PACKAGE in errata[0]['module_streams'][0]['packages'] diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 1303de40c27..b2e1a055e2f 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -23,9 +23,9 @@ from requests.exceptions import HTTPError from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings from robottelo.constants import DEFAULT_ARCHITECTURE, MIRRORING_POLICIES, REPOS +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import parametrized diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index 4b42408114d..f45ea8ef1f8 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -25,7 +25,6 @@ import pytest from requests.exceptions import HTTPError -from robottelo.cli.ldapauthsource import LDAPAuthSource from robottelo.config import settings from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE from robottelo.utils.datafactory import gen_string, generate_strings_list, parametrized @@ -207,7 +206,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): query={'search': f'login={ad_data["ldap_user_name"]}'} ): user.delete() - LDAPAuthSource.delete({'name': authsource_name}) + target_sat.cli.LDAPAuthSource.delete({'name': authsource_name}) @pytest.mark.tier1 def test_positive_create_role_with_taxonomies(self, role_taxonomies): diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 377d43555dd..0ac86adaf37 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -27,7 +27,6 @@ import pytest from requests.exceptions import HTTPError -from robottelo.cli.subscription import Subscription from robottelo.config import settings from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME, PRDS, REPOS, REPOSET @@ -215,7 +214,7 @@ def test_positive_delete_manifest_as_another_user( entities.Subscription(sc2, organization=function_org).delete_manifest( data={'organization_id': function_org.id} ) - assert len(Subscription.list({'organization-id': function_org.id})) == 0 + assert len(target_sat.cli.Subscription.list({'organization-id': function_org.id})) == 0 @pytest.mark.tier2 diff --git a/tests/foreman/cli/test_acs.py b/tests/foreman/cli/test_acs.py index 39ba1701dee..37750aed9bf 100644 --- a/tests/foreman/cli/test_acs.py +++ b/tests/foreman/cli/test_acs.py @@ -19,8 +19,8 @@ from fauxfactory import gen_alphanumeric import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.constants.repos import PULP_FIXTURE_ROOT, PULP_SUBPATHS_COMBINED +from robottelo.exceptions import CLIReturnCodeError ACS_UPDATED = 'Alternate Content Source updated.' ACS_DELETED = 'Alternate Content Source deleted.' diff --git a/tests/foreman/cli/test_activationkey.py b/tests/foreman/cli/test_activationkey.py index 126b4144d3c..c3064499f4f 100644 --- a/tests/foreman/cli/test_activationkey.py +++ b/tests/foreman/cli/test_activationkey.py @@ -23,28 +23,10 @@ from fauxfactory import gen_alphanumeric, gen_string import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import ( - CLIFactoryError, - add_role_permissions, - make_activation_key, - make_content_view, - make_host_collection, - make_lifecycle_environment, - make_role, - make_user, - setup_org_for_a_custom_repo, - setup_org_for_a_rh_repo, -) -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.repository import Repository -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost from robottelo.utils.datafactory import ( invalid_values_list, @@ -55,9 +37,11 @@ @pytest.fixture(scope='module') -def get_default_env(module_org): +def get_default_env(module_org, module_target_sat): """Get default lifecycle environment""" - return LifecycleEnvironment.info({'organization-id': module_org.id, 'name': 'Library'}) + return module_target_sat.cli.LifecycleEnvironment.info( + {'organization-id': module_org.id, 'name': 'Library'} + ) @pytest.mark.tier1 @@ -82,7 +66,7 @@ def test_positive_create_with_name(module_target_sat, module_entitlement_manifes @pytest.mark.tier1 @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) -def test_positive_create_with_description(desc, module_org): +def test_positive_create_with_description(desc, module_org, module_target_sat): """Create Activation key for all variations of Description :id: 5a5ca7f9-1449-4365-ac8a-978605620bf2 @@ -93,12 +77,14 @@ def test_positive_create_with_description(desc, module_org): :parametrized: yes """ - new_ak = make_activation_key({'organization-id': module_org.id, 'description': desc}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'description': desc} + ) assert new_ak['description'] == desc @pytest.mark.tier1 -def test_positive_create_with_default_lce_by_id(module_org, get_default_env): +def test_positive_create_with_default_lce_by_id(module_org, get_default_env, target_sat): """Create Activation key with associated default environment :id: 9171adb2-c9ac-4cda-978f-776826668aa3 @@ -108,14 +94,14 @@ def test_positive_create_with_default_lce_by_id(module_org, get_default_env): :CaseImportance: Critical """ lce = get_default_env - new_ak_env = make_activation_key( + new_ak_env = target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': lce['id']} ) assert new_ak_env['lifecycle-environment'] == lce['name'] @pytest.mark.tier1 -def test_positive_create_with_non_default_lce(module_org): +def test_positive_create_with_non_default_lce(module_org, module_target_sat): """Create Activation key with associated custom environment :id: ad4d4611-3fb5-4449-ae47-305f9931350e @@ -125,15 +111,17 @@ def test_positive_create_with_non_default_lce(module_org): :CaseImportance: Critical """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_ak_env = make_activation_key( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + new_ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': env['id']} ) assert new_ak_env['lifecycle-environment'] == env['name'] @pytest.mark.tier1 -def test_positive_create_with_default_lce_by_name(module_org, get_default_env): +def test_positive_create_with_default_lce_by_name(module_org, get_default_env, module_target_sat): """Create Activation key with associated environment by name :id: 7410f7c4-e8b5-4080-b6d2-65dbcedffe8a @@ -143,7 +131,7 @@ def test_positive_create_with_default_lce_by_name(module_org, get_default_env): :CaseImportance: Critical """ lce = get_default_env - new_ak_env = make_activation_key( + new_ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment': lce['name']} ) assert new_ak_env['lifecycle-environment'] == lce['name'] @@ -151,7 +139,7 @@ def test_positive_create_with_default_lce_by_name(module_org, get_default_env): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_cv(name, module_org, get_default_env): +def test_positive_create_with_cv(name, module_org, get_default_env, module_target_sat): """Create Activation key for all variations of Content Views :id: ec7b1af5-c3f4-40c3-b1df-c69c02a3b9a7 @@ -163,8 +151,10 @@ def test_positive_create_with_cv(name, module_org, get_default_env): :parametrized: yes """ - new_cv = make_content_view({'name': name, 'organization-id': module_org.id}) - new_ak_cv = make_activation_key( + new_cv = module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': module_org.id} + ) + new_ak_cv = module_target_sat.cli_factory.make_activation_key( { 'content-view': new_cv['name'], 'environment': get_default_env['name'], @@ -175,7 +165,7 @@ def test_positive_create_with_cv(name, module_org, get_default_env): @pytest.mark.tier1 -def test_positive_create_with_usage_limit_default(module_org): +def test_positive_create_with_usage_limit_default(module_org, module_target_sat): """Create Activation key with default Usage limit (Unlimited) :id: cba13c72-9845-486d-beff-e0fb55bb762c @@ -184,12 +174,12 @@ def test_positive_create_with_usage_limit_default(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) assert new_ak['host-limit'] == 'Unlimited' @pytest.mark.tier1 -def test_positive_create_with_usage_limit_finite(module_org): +def test_positive_create_with_usage_limit_finite(module_org, module_target_sat): """Create Activation key with finite Usage limit :id: 529a0f9e-977f-4e9d-a1af-88bb98c28a6a @@ -198,13 +188,15 @@ def test_positive_create_with_usage_limit_finite(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id, 'max-hosts': '10'}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': '10'} + ) assert new_ak['host-limit'] == '10' @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_create_content_and_check_enabled(module_org): +def test_positive_create_content_and_check_enabled(module_org, module_target_sat): """Create activation key and add content to it. Check enabled state. :id: abfc6c6e-acd1-4761-b309-7e68e1d17172 @@ -216,10 +208,10 @@ def test_positive_create_content_and_check_enabled(module_org): :CaseLevel: Integration """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['default-enabled?'] == 'true' @@ -227,7 +219,7 @@ def test_positive_create_content_and_check_enabled(module_org): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_invalid_name(name, module_org): +def test_negative_create_with_invalid_name(name, module_org, module_target_sat): """Create Activation key with invalid Name :id: d9b7e3a9-1d24-4e47-bd4a-dce75772d829 @@ -240,7 +232,9 @@ def test_negative_create_with_invalid_name(name, module_org): :parametrized: yes """ with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'name': name}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) if name in ['', ' ', '\t']: assert 'Name must contain at least 1 character' in str(raise_ctx) if len(name) > 255: @@ -252,7 +246,7 @@ def test_negative_create_with_invalid_name(name, module_org): 'limit', **parametrized([value for value in invalid_values_list() if not value.isdigit()] + [0.5]), ) -def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): +def test_negative_create_with_usage_limit_with_not_integers(module_org, limit, module_target_sat): """Create Activation key with non integers Usage Limit :id: 247ebc2e-c80f-488b-aeaf-6bf5eba55375 @@ -268,7 +262,9 @@ def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): # invalid_values = [value for value in invalid_values_list() if not value.isdigit()] # invalid_values.append(0.5) with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'max-hosts': limit}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': limit} + ) if type(limit) is int: if limit < 1: assert 'Max hosts cannot be less than one' in str(raise_ctx) @@ -278,7 +274,9 @@ def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): @pytest.mark.tier3 @pytest.mark.parametrize('invalid_values', ('-1', '-500', 0)) -def test_negative_create_with_usage_limit_with_invalid_integers(module_org, invalid_values): +def test_negative_create_with_usage_limit_with_invalid_integers( + module_org, invalid_values, module_target_sat +): """Create Activation key with invalid integers Usage Limit :id: 9089f756-fda8-4e28-855c-cf8273f7c6cd @@ -291,13 +289,15 @@ def test_negative_create_with_usage_limit_with_invalid_integers(module_org, inva :parametrized: yes """ with pytest.raises(CLIFactoryError) as raise_ctx: - make_activation_key({'organization-id': module_org.id, 'max-hosts': invalid_values}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': invalid_values} + ) assert 'Failed to create ActivationKey with data:' in str(raise_ctx) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete_by_name(name, module_org): +def test_positive_delete_by_name(name, module_org, module_target_sat): """Create Activation key and delete it for all variations of Activation key name @@ -309,14 +309,18 @@ def test_positive_delete_by_name(name, module_org): :parametrized: yes """ - new_ak = make_activation_key({'name': name, 'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'name': name, 'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 -def test_positive_delete_by_org_name(module_org): +def test_positive_delete_by_org_name(module_org, module_target_sat): """Create Activation key and delete it using organization name for which that key was created @@ -326,14 +330,16 @@ def test_positive_delete_by_org_name(module_org): :CaseImportance: High """ - new_ak = make_activation_key({'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 -def test_positive_delete_by_org_label(module_org): +def test_positive_delete_by_org_label(module_org, module_target_sat): """Create Activation key and delete it using organization label for which that key was created @@ -343,15 +349,17 @@ def test_positive_delete_by_org_label(module_org): :CaseImportance: High """ - new_ak = make_activation_key({'organization-id': module_org.id}) - ActivationKey.delete({'name': new_ak['name'], 'organization-label': module_org.label}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.delete( + {'name': new_ak['name'], 'organization-label': module_org.label} + ) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_with_cv(module_org): +def test_positive_delete_with_cv(module_org, module_target_sat): """Create activation key with content view assigned to it and delete it using activation key id @@ -361,15 +369,17 @@ def test_positive_delete_with_cv(module_org): :CaseLevel: Integration """ - new_cv = make_content_view({'organization-id': module_org.id}) - new_ak = make_activation_key({'organization-id': module_org.id, 'content-view': new_cv['name']}) - ActivationKey.delete({'id': new_ak['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view': new_cv['name']} + ) + module_target_sat.cli.ActivationKey.delete({'id': new_ak['id']}) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier2 -def test_positive_delete_with_lce(module_org, get_default_env): +def test_positive_delete_with_lce(module_org, get_default_env, module_target_sat): """Create activation key with lifecycle environment assigned to it and delete it using activation key id @@ -379,17 +389,17 @@ def test_positive_delete_with_lce(module_org, get_default_env): :CaseLevel: Integration """ - new_ak = make_activation_key( + new_ak = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment': get_default_env['name']} ) - ActivationKey.delete({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.delete({'id': new_ak['id']}) with pytest.raises(CLIReturnCodeError): - ActivationKey.info({'id': new_ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_update_name_by_id(module_org, name): +def test_positive_update_name_by_id(module_org, name, module_target_sat): """Update Activation Key Name in Activation key searching by ID :id: bc304894-fd9b-4622-96e3-57c2257e26ca @@ -400,16 +410,18 @@ def test_positive_update_name_by_id(module_org, name): :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( {'id': activation_key['id'], 'new-name': name, 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['name'] == name @pytest.mark.tier1 -def test_positive_update_name_by_name(module_org): +def test_positive_update_name_by_name(module_org, module_target_sat): """Update Activation Key Name in an Activation key searching by name @@ -420,17 +432,19 @@ def test_positive_update_name_by_name(module_org): :CaseImportance: Critical """ new_name = gen_string('alpha') - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( {'name': activation_key['name'], 'new-name': new_name, 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['name'] == new_name @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_update_description(description, module_org): +def test_positive_update_description(description, module_org, module_target_sat): """Update Description in an Activation key :id: 60a4e860-d99c-431e-b70b-9b0fa90d839b @@ -441,20 +455,22 @@ def test_positive_update_description(description, module_org): :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - ActivationKey.update( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ActivationKey.update( { 'description': description, 'name': activation_key['name'], 'organization-id': module_org.id, } ) - updated_ak = ActivationKey.info({'id': activation_key['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert updated_ak['description'] == description @pytest.mark.tier2 -def test_positive_update_lce(module_org, get_default_env): +def test_positive_update_lce(module_org, get_default_env, module_target_sat): """Update Environment in an Activation key :id: 55aaee60-b8c8-49f0-995a-6c526b9b653b @@ -463,15 +479,19 @@ def test_positive_update_lce(module_org, get_default_env): :CaseLevel: Integration """ - ak_env = make_activation_key( + ak_env = module_target_sat.cli_factory.make_activation_key( {'organization-id': module_org.id, 'lifecycle-environment-id': get_default_env['id']} ) - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - ActivationKey.update( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + module_target_sat.cli.ActivationKey.update( { 'id': ak_env['id'], 'lifecycle-environment-id': env['id'], @@ -479,12 +499,12 @@ def test_positive_update_lce(module_org, get_default_env): 'organization-id': module_org.id, } ) - updated_ak = ActivationKey.info({'id': ak_env['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': ak_env['id']}) assert updated_ak['lifecycle-environment'] == env['name'] @pytest.mark.tier2 -def test_positive_update_cv(module_org): +def test_positive_update_cv(module_org, module_target_sat): """Update Content View in an Activation key :id: aa94997d-fc9b-4532-aeeb-9f27b9834914 @@ -493,18 +513,20 @@ def test_positive_update_cv(module_org): :CaseLevel: Integration """ - cv = make_content_view({'organization-id': module_org.id}) - ak_cv = make_activation_key({'organization-id': module_org.id, 'content-view-id': cv['id']}) - new_cv = make_content_view({'organization-id': module_org.id}) - ActivationKey.update( + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + ak_cv = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view-id': cv['id']} + ) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ActivationKey.update( {'content-view': new_cv['name'], 'name': ak_cv['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': ak_cv['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': ak_cv['id']}) assert updated_ak['content-view'] == new_cv['name'] @pytest.mark.tier1 -def test_positive_update_usage_limit_to_finite_number(module_org): +def test_positive_update_usage_limit_to_finite_number(module_org, module_target_sat): """Update Usage limit from Unlimited to a finite number :id: a55bb8dc-c7d8-4a6a-ac0f-1d5a377da543 @@ -513,17 +535,17 @@ def test_positive_update_usage_limit_to_finite_number(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) assert new_ak['host-limit'] == 'Unlimited' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'max-hosts': '2147483647', 'name': new_ak['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['host-limit'] == '2147483647' @pytest.mark.tier1 -def test_positive_update_usage_limit_to_unlimited(module_org): +def test_positive_update_usage_limit_to_unlimited(module_org, module_target_sat): """Update Usage limit from definite number to Unlimited :id: 0b83657b-41d1-4fb2-9c23-c36011322b83 @@ -532,18 +554,20 @@ def test_positive_update_usage_limit_to_unlimited(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id, 'max-hosts': '10'}) + new_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'max-hosts': '10'} + ) assert new_ak['host-limit'] == '10' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'unlimited-hosts': True, 'name': new_ak['name'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['host-limit'] == 'Unlimited' @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_update_name(module_org, name): +def test_negative_update_name(module_org, name, module_target_sat): """Try to update Activation Key using invalid value for its name :id: b75e7c38-fde2-4110-ba65-4157319fc159 @@ -555,16 +579,16 @@ def test_negative_update_name(module_org, name): :parametrized: yes """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'id': new_ak['id'], 'new-name': name, 'organization-id': module_org.id} ) assert 'Could not update the activation key:' in raise_ctx.value.message @pytest.mark.tier2 -def test_negative_update_usage_limit(module_org): +def test_negative_update_usage_limit(module_org, module_target_sat): """Try to update Activation Key using invalid value for its usage limit attribute @@ -575,9 +599,9 @@ def test_negative_update_usage_limit(module_org): :CaseImportance: Low """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'max-hosts': int('9' * 20), 'id': new_ak['id'], 'organization-id': module_org.id} ) assert 'Validation failed: Max hosts must be less than 2147483648' in raise_ctx.value.message @@ -606,12 +630,14 @@ def test_positive_usage_limit(module_org, target_sat): :CaseLevel: System """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - new_ak = make_activation_key( + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + new_cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + new_ak = target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': new_cv['name'], @@ -633,7 +659,7 @@ def test_positive_usage_limit(module_org, target_sat): @pytest.mark.tier2 @pytest.mark.parametrize('host_col_name', **parametrized(valid_data_list())) -def test_positive_update_host_collection(module_org, host_col_name): +def test_positive_update_host_collection(module_org, host_col_name, module_target_sat): """Test that host collections can be associated to Activation Keys @@ -648,26 +674,28 @@ def test_positive_update_host_collection(module_org, host_col_name): :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col_name = make_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col_name = module_target_sat.cli_factory.make_host_collection( {'name': host_col_name, 'organization-id': module_org.id} )['name'] # Assert that name matches data passed assert new_host_col_name == host_col_name - ActivationKey.add_host_collection( + module_target_sat.cli.ActivationKey.add_host_collection( { 'host-collection': new_host_col_name, 'name': activation_key['name'], 'organization-id': module_org.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['host-collections'][0]['name'] == host_col_name @pytest.mark.run_in_one_thread @pytest.mark.tier2 -def test_positive_update_host_collection_with_default_org(module_org): +def test_positive_update_host_collection_with_default_org(module_org, module_target_sat): """Test that host collection can be associated to Activation Keys with specified default organization setting in config @@ -680,12 +708,14 @@ def test_positive_update_host_collection_with_default_org(module_org): """ Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) try: - activation_key = make_activation_key({'organization-id': module_org.id}) - host_col = make_host_collection() - ActivationKey.add_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + host_col = module_target_sat.cli_factory.make_host_collection() + module_target_sat.cli.ActivationKey.add_host_collection( {'host-collection': host_col['name'], 'name': activation_key['name']} ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['host-collections'][0]['name'] == host_col['name'] finally: Defaults.delete({'param-name': 'organization_id'}) @@ -693,7 +723,7 @@ def test_positive_update_host_collection_with_default_org(module_org): @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_add_redhat_product(function_entitlement_manifest_org): +def test_positive_add_redhat_product(function_entitlement_manifest_org, target_sat): """Test that RH product can be associated to Activation Keys :id: 7b15de8e-edde-41aa-937b-ad6aa529891a @@ -707,7 +737,7 @@ def test_positive_add_redhat_product(function_entitlement_manifest_org): # Using CDN as we need this repo to be RH one no matter are we in # downstream or cdn - result = setup_org_for_a_rh_repo( + result = target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -716,7 +746,7 @@ def test_positive_add_redhat_product(function_entitlement_manifest_org): }, force_use_cdn=True, ) - content = ActivationKey.product_content( + content = target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': org.id} ) assert content[0]['name'] == REPOSET['rhst7'] @@ -724,7 +754,7 @@ def test_positive_add_redhat_product(function_entitlement_manifest_org): @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_add_custom_product(module_org): +def test_positive_add_custom_product(module_org, module_target_sat): """Test that custom product can be associated to Activation Keys :id: 96ace967-e165-4069-8ff7-f54c4c822de0 @@ -736,11 +766,11 @@ def test_positive_add_custom_product(module_org): :BZ: 1426386 """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - repo = Repository.info({'id': result['repository-id']}) - content = ActivationKey.product_content( + repo = module_target_sat.cli.Repository.info({'id': result['repository-id']}) + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['name'] == repo['name'] @@ -773,7 +803,7 @@ def test_positive_add_redhat_and_custom_products( org = function_entitlement_manifest_org # Using CDN as we need this repo to be RH one no matter are we in # downstream or cdn - result = setup_org_for_a_rh_repo( + result = module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -782,7 +812,7 @@ def test_positive_add_redhat_and_custom_products( }, force_use_cdn=True, ) - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_0.url, 'organization-id': org.id, @@ -818,18 +848,18 @@ def test_positive_delete_manifest(function_entitlement_manifest_org, target_sat) :CaseAutomation: Automated """ org = function_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) + new_ak = target_sat.cli_factory.make_activation_key({'organization-id': org.id}) ak_subs = target_sat.cli.ActivationKey.subscriptions( {'id': new_ak['id'], 'organization-id': org.id} ) - subscription_result = Subscription.list( + subscription_result = target_sat.cli.Subscription.list( {'organization-id': org.id, 'order': 'id desc'}, per_page=False ) result = target_sat.cli.ActivationKey.add_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} ) assert 'Subscription added to activation key.' in result - Subscription.delete_manifest({'organization-id': org.id}) + target_sat.cli.Subscription.delete_manifest({'organization-id': org.id}) ak_subs_info = target_sat.cli.ActivationKey.subscriptions( {'id': new_ak['id'], 'organization-id': org.id} ) @@ -850,15 +880,17 @@ def test_positive_delete_subscription(function_entitlement_manifest_org, module_ :CaseLevel: Integration """ org = function_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) - subscription_result = Subscription.list( + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) + subscription_result = module_target_sat.cli.Subscription.list( {'organization-id': org.id, 'order': 'id desc'}, per_page=False ) result = module_target_sat.cli.ActivationKey.add_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} ) assert 'Subscription added to activation key.' in result - ak_subs_info = ActivationKey.subscriptions({'id': new_ak['id'], 'organization-id': org.id}) + ak_subs_info = module_target_sat.cli.ActivationKey.subscriptions( + {'id': new_ak['id'], 'organization-id': org.id} + ) assert subscription_result[-1]['name'] in ak_subs_info result = module_target_sat.cli.ActivationKey.remove_subscription( {'id': new_ak['id'], 'subscription-id': subscription_result[-1]['id']} @@ -886,13 +918,15 @@ def test_positive_update_aks_to_chost(module_org, rhel7_contenthost, target_sat) :CaseLevel: System """ - env = make_lifecycle_environment({'organization-id': module_org.id}) - new_cv = make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - cvv = ContentView.info({'id': new_cv['id']})['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + new_cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.publish({'id': new_cv['id']}) + cvv = target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][0] + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) new_aks = [ - make_activation_key( + target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': new_cv['name'], @@ -948,7 +982,9 @@ def test_positive_list_by_name(module_org, name, module_target_sat): :parametrized: yes """ - make_activation_key({'organization-id': module_org.id, 'name': name}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) result = module_target_sat.cli.ActivationKey.list( {'name': name, 'organization-id': module_org.id} ) @@ -966,8 +1002,10 @@ def test_positive_list_by_cv_id(module_org, module_target_sat): :CaseImportance: High """ - cv = make_content_view({'organization-id': module_org.id}) - make_activation_key({'organization-id': module_org.id, 'content-view-id': cv['id']}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'content-view-id': cv['id']} + ) result = module_target_sat.cli.ActivationKey.list( {'content-view-id': cv['id'], 'organization-id': module_org.id} ) @@ -987,14 +1025,18 @@ def test_positive_create_using_old_name(module_org, module_target_sat): :CaseImportance: High """ name = gen_string('utf8') - activation_key = make_activation_key({'organization-id': module_org.id, 'name': name}) + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id, 'name': name} + ) new_name = gen_string('utf8') module_target_sat.cli.ActivationKey.update( {'id': activation_key['id'], 'new-name': new_name, 'organization-id': module_org.id} ) activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['name'] == new_name - new_activation_key = make_activation_key({'name': name, 'organization-id': module_org.id}) + new_activation_key = module_target_sat.cli_factory.make_activation_key( + {'name': name, 'organization-id': module_org.id} + ) assert new_activation_key['name'] == name @@ -1022,8 +1064,10 @@ def test_positive_remove_host_collection_by_id(module_org, module_target_sat): :BZ: 1336716 """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col = make_host_collection( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col = module_target_sat.cli_factory.make_host_collection( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) module_target_sat.cli.ActivationKey.add_host_collection( @@ -1035,7 +1079,7 @@ def test_positive_remove_host_collection_by_id(module_org, module_target_sat): ) activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert len(activation_key['host-collections']) == 1 - ActivationKey.remove_host_collection( + module_target_sat.cli.ActivationKey.remove_host_collection( { 'host-collection-id': new_host_col['id'], 'name': activation_key['name'], @@ -1071,8 +1115,12 @@ def test_positive_remove_host_collection_by_name(module_org, host_col, module_ta :parametrized: yes """ - activation_key = make_activation_key({'organization-id': module_org.id}) - new_host_col = make_host_collection({'name': host_col, 'organization-id': module_org.id}) + activation_key = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'name': host_col, 'organization-id': module_org.id} + ) # Assert that name matches data passed assert new_host_col['name'] == host_col module_target_sat.cli.ActivationKey.add_host_collection( @@ -1113,7 +1161,7 @@ def test_create_ak_with_syspurpose_set(module_entitlement_manifest_org, module_t :BZ: 1789028 """ # Requires Cls org and manifest. Manifest is for self-support values. - new_ak = make_activation_key( + new_ak = module_target_sat.cli_factory.make_activation_key( { 'purpose-addons': "test-addon1, test-addon2", 'purpose-role': "test-role", @@ -1169,7 +1217,7 @@ def test_update_ak_with_syspurpose_values(module_entitlement_manifest_org, modul # Requires Cls org and manifest. Manifest is for self-support values. # Create an AK with no system purpose values set org = module_entitlement_manifest_org - new_ak = make_activation_key({'organization-id': org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) # Assert system purpose values are null after creating the AK and adding the manifest. assert new_ak['system-purpose']['purpose-addons'] == '' assert new_ak['system-purpose']['purpose-role'] == '' @@ -1236,7 +1284,7 @@ def test_positive_add_subscription_by_id(module_entitlement_manifest_org, module :BZ: 1463685 """ org_id = module_entitlement_manifest_org.id - ackey_id = make_activation_key({'organization-id': org_id})['id'] + ackey_id = module_target_sat.cli_factory.make_activation_key({'organization-id': org_id})['id'] subs_id = module_target_sat.cli.Subscription.list({'organization-id': org_id}, per_page=False) result = module_target_sat.cli.ActivationKey.add_subscription( {'id': ackey_id, 'subscription-id': subs_id[0]['id']} @@ -1246,7 +1294,7 @@ def test_positive_add_subscription_by_id(module_entitlement_manifest_org, module @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_copy_by_parent_id(module_org, new_name): +def test_positive_copy_by_parent_id(module_org, new_name, module_target_sat): """Copy Activation key for all valid Activation Key name variations @@ -1258,15 +1306,17 @@ def test_positive_copy_by_parent_id(module_org, new_name): :parametrized: yes """ - parent_ak = make_activation_key({'organization-id': module_org.id}) - result = ActivationKey.copy( + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + result = module_target_sat.cli.ActivationKey.copy( {'id': parent_ak['id'], 'new-name': new_name, 'organization-id': module_org.id} ) assert 'Activation key copied.' in result @pytest.mark.tier1 -def test_positive_copy_by_parent_name(module_org): +def test_positive_copy_by_parent_name(module_org, module_target_sat): """Copy Activation key by passing name of parent :id: 5d5405e6-3b26-47a3-96ff-f6c0f6c32607 @@ -1275,8 +1325,10 @@ def test_positive_copy_by_parent_name(module_org): :CaseImportance: Critical """ - parent_ak = make_activation_key({'organization-id': module_org.id}) - result = ActivationKey.copy( + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) + result = module_target_sat.cli.ActivationKey.copy( { 'name': parent_ak['name'], 'new-name': gen_string('alpha'), @@ -1287,7 +1339,7 @@ def test_positive_copy_by_parent_name(module_org): @pytest.mark.tier1 -def test_negative_copy_with_same_name(module_org): +def test_negative_copy_with_same_name(module_org, module_target_sat): """Copy activation key with duplicate name :id: f867c468-4155-495c-a1e5-c04d9868a2e0 @@ -1295,9 +1347,11 @@ def test_negative_copy_with_same_name(module_org): :expectedresults: Activation key is not successfully copied """ - parent_ak = make_activation_key({'organization-id': module_org.id}) + parent_ak = module_target_sat.cli_factory.make_activation_key( + {'organization-id': module_org.id} + ) with pytest.raises(CLIReturnCodeError) as raise_ctx: - ActivationKey.copy( + module_target_sat.cli.ActivationKey.copy( { 'name': parent_ak['name'], 'new-name': parent_ak['name'], @@ -1312,7 +1366,7 @@ def test_negative_copy_with_same_name(module_org): @pytest.mark.skip_if_not_set('fake_manifest') @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_copy_subscription(module_entitlement_manifest_org): +def test_positive_copy_subscription(module_entitlement_manifest_org, module_target_sat): """Copy Activation key and verify contents :id: f4ee8096-4120-4d06-8c9a-57ac1eaa8f68 @@ -1329,24 +1383,28 @@ def test_positive_copy_subscription(module_entitlement_manifest_org): """ # Begin test setup org = module_entitlement_manifest_org - parent_ak = make_activation_key({'organization-id': org.id}) - subscription_result = Subscription.list({'organization-id': org.id}, per_page=False) - ActivationKey.add_subscription( + parent_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': org.id}) + subscription_result = module_target_sat.cli.Subscription.list( + {'organization-id': org.id}, per_page=False + ) + module_target_sat.cli.ActivationKey.add_subscription( {'id': parent_ak['id'], 'subscription-id': subscription_result[0]['id']} ) # End test setup new_name = gen_string('utf8') - result = ActivationKey.copy( + result = module_target_sat.cli.ActivationKey.copy( {'id': parent_ak['id'], 'new-name': new_name, 'organization-id': org.id} ) assert 'Activation key copied.' in result - result = ActivationKey.subscriptions({'name': new_name, 'organization-id': org.id}) + result = module_target_sat.cli.ActivationKey.subscriptions( + {'name': new_name, 'organization-id': org.id} + ) # Verify that the subscription copied over assert subscription_result[0]['name'] in result # subscription name # subscription list @pytest.mark.tier1 -def test_positive_update_autoattach_toggle(module_org): +def test_positive_update_autoattach_toggle(module_org, module_target_sat): """Update Activation key with inverse auto-attach value :id: de3b5fb7-7963-420a-b4c9-c66e78a111dc @@ -1361,19 +1419,19 @@ def test_positive_update_autoattach_toggle(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) attach_value = new_ak['auto-attach'] # invert value new_value = 'false' if attach_value == 'true' else 'true' - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( {'auto-attach': new_value, 'id': new_ak['id'], 'organization-id': module_org.id} ) - updated_ak = ActivationKey.info({'id': new_ak['id']}) + updated_ak = module_target_sat.cli.ActivationKey.info({'id': new_ak['id']}) assert updated_ak['auto-attach'] == new_value @pytest.mark.tier1 -def test_positive_update_autoattach(module_org): +def test_positive_update_autoattach(module_org, module_target_sat): """Update Activation key with valid auto-attach values :id: 9e18b950-6f0f-4f82-a3ac-ef6aba950a78 @@ -1382,16 +1440,16 @@ def test_positive_update_autoattach(module_org): :CaseImportance: Critical """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) for new_value in ('1', '0', 'true', 'false', 'yes', 'no'): - result = ActivationKey.update( + result = module_target_sat.cli.ActivationKey.update( {'auto-attach': new_value, 'id': new_ak['id'], 'organization-id': module_org.id} ) assert 'Activation key updated.' == result[0]['message'] @pytest.mark.tier2 -def test_negative_update_autoattach(module_org): +def test_negative_update_autoattach(module_org, module_target_sat): """Attempt to update Activation key with bad auto-attach value :id: 54b6f808-ff54-4e69-a54d-e1f99a4652f9 @@ -1406,9 +1464,9 @@ def test_negative_update_autoattach(module_org): :CaseImportance: Low """ - new_ak = make_activation_key({'organization-id': module_org.id}) + new_ak = module_target_sat.cli_factory.make_activation_key({'organization-id': module_org.id}) with pytest.raises(CLIReturnCodeError) as exe: - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'auto-attach': gen_string('utf8'), 'id': new_ak['id'], @@ -1420,7 +1478,7 @@ def test_negative_update_autoattach(module_org): @pytest.mark.tier3 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_content_override(module_org): +def test_positive_content_override(module_org, module_target_sat): """Positive content override :id: a4912cc0-3bf7-4e90-bb51-ec88b2fad227 @@ -1436,14 +1494,14 @@ def test_positive_content_override(module_org): :CaseLevel: System """ - result = setup_org_for_a_custom_repo( + result = module_target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_0.url, 'organization-id': module_org.id} ) - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) for override_value in (True, False): - ActivationKey.content_override( + module_target_sat.cli.ActivationKey.content_override( { 'content-label': content[0]['label'], 'id': result['activationkey-id'], @@ -1452,14 +1510,14 @@ def test_positive_content_override(module_org): } ) # Retrieve the product content enabled flag - content = ActivationKey.product_content( + content = module_target_sat.cli.ActivationKey.product_content( {'id': result['activationkey-id'], 'organization-id': module_org.id} ) assert content[0]['override'] == f'enabled:{int(override_value)}' @pytest.mark.tier2 -def test_positive_remove_user(module_org): +def test_positive_remove_user(module_org, module_target_sat): """Delete any user who has previously created an activation key and check that activation key still exists @@ -1470,20 +1528,22 @@ def test_positive_remove_user(module_org): :BZ: 1291271 """ password = gen_string('alpha') - user = make_user({'password': password, 'admin': 'true'}) - ak = ActivationKey.with_user(username=user['login'], password=password).create( - {'name': gen_string('alpha'), 'organization-id': module_org.id} - ) - User.delete({'id': user['id']}) + user = module_target_sat.cli_factory.make_user({'password': password, 'admin': 'true'}) + ak = module_target_sat.cli.ActivationKey.with_user( + username=user['login'], password=password + ).create({'name': gen_string('alpha'), 'organization-id': module_org.id}) + module_target_sat.cli.User.delete({'id': user['id']}) try: - ActivationKey.info({'id': ak['id']}) + module_target_sat.cli.ActivationKey.info({'id': ak['id']}) except CLIReturnCodeError: pytest.fail("Activation key can't be read") @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manifest_org): +def test_positive_view_subscriptions_by_non_admin_user( + module_entitlement_manifest_org, module_target_sat +): """Attempt to read activation key subscriptions by non admin user :id: af75b640-97be-431b-8ac0-a6367f8f1996 @@ -1525,18 +1585,24 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife f'Test_*_{gen_string("alpha")}', ) ak_name = f'{ak_name_like}_{gen_string("alpha")}' - available_subscriptions = Subscription.list({'organization-id': org.id}, per_page=False) + available_subscriptions = module_target_sat.cli.Subscription.list( + {'organization-id': org.id}, per_page=False + ) assert len(available_subscriptions) > 0 available_subscription_ids = [subscription['id'] for subscription in available_subscriptions] subscription_id = choice(available_subscription_ids) - activation_key = make_activation_key({'name': ak_name, 'organization-id': org.id}) - ActivationKey.add_subscription({'id': activation_key['id'], 'subscription-id': subscription_id}) - subscriptions = ActivationKey.subscriptions( + activation_key = module_target_sat.cli_factory.make_activation_key( + {'name': ak_name, 'organization-id': org.id} + ) + module_target_sat.cli.ActivationKey.add_subscription( + {'id': activation_key['id'], 'subscription-id': subscription_id} + ) + subscriptions = module_target_sat.cli.ActivationKey.subscriptions( {'organization-id': org.id, 'id': activation_key['id']}, output_format='csv', ) assert len(subscriptions) == 1 - role = make_role({'organization-id': org.id}) + role = module_target_sat.cli_factory.make_role({'organization-id': org.id}) resource_permissions = { 'Katello::ActivationKey': { 'permissions': [ @@ -1560,8 +1626,8 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife ] }, } - add_role_permissions(role['id'], resource_permissions) - user = make_user( + module_target_sat.cli_factory.add_role_permissions(role['id'], resource_permissions) + user = module_target_sat.cli_factory.make_user( { 'admin': False, 'default-organization-id': org.id, @@ -1570,8 +1636,8 @@ def test_positive_view_subscriptions_by_non_admin_user(module_entitlement_manife 'password': user_password, } ) - User.add_role({'id': user['id'], 'role-id': role['id']}) - ak_user_cli_session = ActivationKey.with_user(user_name, user_password) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) + ak_user_cli_session = module_target_sat.cli.ActivationKey.with_user(user_name, user_password) subscriptions = ak_user_cli_session.subscriptions( {'organization-id': org.id, 'id': activation_key['id']}, output_format='csv', @@ -1602,7 +1668,7 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost :BZ: 1633094 """ org = function_org - result = setup_org_for_a_rh_repo( + result = target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -1611,8 +1677,8 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost }, force_use_cdn=True, ) - ak = ActivationKey.info({'id': result['activationkey-id']}) - setup_org_for_a_custom_repo( + ak = target_sat.cli.ActivationKey.info({'id': result['activationkey-id']}) + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_0.url, 'organization-id': org['id'], @@ -1621,13 +1687,13 @@ def test_positive_subscription_quantity_attached(function_org, rhel7_contenthost 'lifecycle-environment-id': result['lifecycle-environment-id'], } ) - subs = Subscription.list({'organization-id': org['id']}, per_page=False) + subs = target_sat.cli.Subscription.list({'organization-id': org['id']}, per_page=False) subs_lookup = {s['id']: s for s in subs} rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(org['label'], activation_key=ak['name']) assert rhel7_contenthost.subscribed - ak_subs = ActivationKey.subscriptions( + ak_subs = target_sat.cli.ActivationKey.subscriptions( {'activation-key': ak['name'], 'organization-id': org['id']}, output_format='json' ) assert len(ak_subs) == 2 # one for #rh product, one for custom product diff --git a/tests/foreman/cli/test_architecture.py b/tests/foreman/cli/test_architecture.py index ec212894310..4f0fe73061b 100644 --- a/tests/foreman/cli/test_architecture.py +++ b/tests/foreman/cli/test_architecture.py @@ -19,9 +19,7 @@ from fauxfactory import gen_choice import pytest -from robottelo.cli.architecture import Architecture -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_architecture +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_id_list, invalid_values_list, @@ -34,12 +32,12 @@ class TestArchitecture: """Architecture CLI related tests.""" @pytest.fixture(scope='class') - def class_architecture(self): + def class_architecture(self, class_target_sat): """Shared architecture for tests""" - return make_architecture() + return class_target_sat.cli_factory.make_architecture() @pytest.mark.tier1 - def test_positive_CRUD(self): + def test_positive_CRUD(self, module_target_sat): """Create a new Architecture, update the name and delete the Architecture itself. :id: cd8654b8-e603-11ea-adc1-0242ac120002 @@ -52,18 +50,18 @@ def test_positive_CRUD(self): name = gen_choice(list(valid_data_list().values())) new_name = gen_choice(list(valid_data_list().values())) - architecture = make_architecture({'name': name}) + architecture = module_target_sat.cli_factory.make_architecture({'name': name}) assert architecture['name'] == name - Architecture.update({'id': architecture['id'], 'new-name': new_name}) - architecture = Architecture.info({'id': architecture['id']}) + module_target_sat.cli.Architecture.update({'id': architecture['id'], 'new-name': new_name}) + architecture = module_target_sat.cli.Architecture.info({'id': architecture['id']}) assert architecture['name'] == new_name - Architecture.delete({'id': architecture['id']}) + module_target_sat.cli.Architecture.delete({'id': architecture['id']}) with pytest.raises(CLIReturnCodeError): - Architecture.info({'id': architecture['id']}) + module_target_sat.cli.Architecture.info({'id': architecture['id']}) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, name, module_target_sat): """Don't create an Architecture with invalid data. :id: cfed972e-9b09-4852-bdd2-b5a8a8aed170 @@ -76,13 +74,13 @@ def test_negative_create_with_name(self, name): """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.create({'name': name}) + module_target_sat.cli.Architecture.create({'name': name}) assert 'Could not create the architecture:' in error.value.message @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, class_architecture, new_name): + def test_negative_update_name(self, class_architecture, new_name, module_target_sat): """Create Architecture then fail to update its name :id: 037c4892-5e62-46dd-a2ed-92243e870e40 @@ -95,16 +93,18 @@ def test_negative_update_name(self, class_architecture, new_name): """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.update({'id': class_architecture['id'], 'new-name': new_name}) + module_target_sat.cli.Architecture.update( + {'id': class_architecture['id'], 'new-name': new_name} + ) assert 'Could not update the architecture:' in error.value.message - result = Architecture.info({'id': class_architecture['id']}) + result = module_target_sat.cli.Architecture.info({'id': class_architecture['id']}) assert class_architecture['name'] == result['name'] @pytest.mark.tier1 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) - def test_negative_delete_by_id(self, entity_id): + def test_negative_delete_by_id(self, entity_id, module_target_sat): """Delete architecture by invalid ID :id: 78bae664-6493-4c74-a587-94170f20746e @@ -116,6 +116,6 @@ def test_negative_delete_by_id(self, entity_id): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError) as error: - Architecture.delete({'id': entity_id}) + module_target_sat.cli.Architecture.delete({'id': entity_id}) assert 'Could not delete the architecture' in error.value.message diff --git a/tests/foreman/cli/test_auth.py b/tests/foreman/cli/test_auth.py index 51945a6445a..2f6a52a6ffd 100644 --- a/tests/foreman/cli/test_auth.py +++ b/tests/foreman/cli/test_auth.py @@ -21,14 +21,9 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.auth import Auth, AuthLogin -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_user -from robottelo.cli.org import Org -from robottelo.cli.settings import Settings -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import HAMMER_CONFIG +from robottelo.exceptions import CLIReturnCodeError LOGEDIN_MSG = "Session exists, currently logged in as '{0}'" NOTCONF_MSG = "Credentials are not configured." @@ -52,18 +47,20 @@ def configure_sessions(satellite, enable=True, add_default_creds=False): @pytest.fixture(scope='module') -def admin_user(): +def admin_user(module_target_sat): """create the admin role user for tests""" uname_admin = gen_string('alpha') - return make_user({'login': uname_admin, 'password': password, 'admin': '1'}) + return module_target_sat.cli_factory.make_user( + {'login': uname_admin, 'password': password, 'admin': '1'} + ) @pytest.fixture(scope='module') -def non_admin_user(): +def non_admin_user(module_target_sat): """create the non-admin role user for tests""" uname_viewer = gen_string('alpha') - user = make_user({'login': uname_viewer, 'password': password}) - User.add_role({'login': uname_viewer, 'role': 'Viewer'}) + user = module_target_sat.cli_factory.make_user({'login': uname_viewer, 'password': password}) + module_target_sat.cli.User.add_role({'login': uname_viewer, 'role': 'Viewer'}) return user @@ -85,24 +82,24 @@ def test_positive_create_session(admin_user, target_sat): expires after specified time """ try: - idle_timeout = Settings.list({'search': 'name=idle_timeout'})[0]['value'] - Settings.set({'name': 'idle_timeout', 'value': 1}) + idle_timeout = target_sat.cli.Settings.list({'search': 'name=idle_timeout'})[0]['value'] + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': 1}) result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() + assert target_sat.cli.Org.with_user().list() # wait until session expires sleep(70) with pytest.raises(CLIReturnCodeError): - Org.with_user().list() - result = Auth.with_user().status() + target_sat.cli.Org.with_user().list() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] finally: # reset timeout to default - Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) @pytest.mark.tier1 @@ -124,18 +121,18 @@ def test_positive_disable_session(admin_user, target_sat): """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() + assert target_sat.cli.Org.with_user().list() # disabling sessions result = configure_sessions(satellite=target_sat, enable=False) assert result == 0, 'Failed to configure hammer sessions' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -156,16 +153,16 @@ def test_positive_log_out_from_session(admin_user, target_sat): """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert Org.with_user().list() - Auth.logout() - result = Auth.with_user().status() + assert target_sat.cli.Org.with_user().list() + target_sat.cli.Auth.logout() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -188,15 +185,15 @@ def test_positive_change_session(admin_user, non_admin_user, target_sat): """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - assert User.with_user().list() - AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) - result = Auth.with_user().status() + assert target_sat.cli.User.with_user().list() + target_sat.cli.AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(non_admin_user['login']) in result[0]['message'] - assert User.with_user().list() + assert target_sat.cli.User.with_user().list() @pytest.mark.tier1 @@ -219,16 +216,16 @@ def test_positive_session_survives_unauthenticated_call(admin_user, target_sat): """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials - Org.with_user().list() + target_sat.cli.Org.with_user().list() result = target_sat.execute('hammer ping') assert result.status == 0, 'Failed to run hammer ping' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -251,17 +248,19 @@ def test_positive_session_survives_failed_login(admin_user, non_admin_user, targ """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() # using invalid password with pytest.raises(CLIReturnCodeError): - AuthLogin.basic({'username': non_admin_user['login'], 'password': gen_string('alpha')}) + target_sat.cli.AuthLogin.basic( + {'username': non_admin_user['login'], 'password': gen_string('alpha')} + ) # checking the session status again - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.e2e @@ -289,20 +288,20 @@ def test_positive_session_preceeds_saved_credentials(admin_user, target_sat): """ try: - idle_timeout = Settings.list({'search': 'name=idle_timeout'})[0]['value'] - Settings.set({'name': 'idle_timeout', 'value': 1}) + idle_timeout = target_sat.cli.Settings.list({'search': 'name=idle_timeout'})[0]['value'] + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': 1}) result = configure_sessions(satellite=target_sat, add_default_creds=True) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(admin_user['login']) in result[0]['message'] # list organizations without supplying credentials sleep(70) with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() finally: # reset timeout to default - Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) + target_sat.cli.Settings.set({'name': 'idle_timeout', 'value': f'{idle_timeout}'}) @pytest.mark.tier1 @@ -317,10 +316,10 @@ def test_negative_no_credentials(target_sat): """ result = configure_sessions(satellite=target_sat, enable=False) assert result == 0, 'Failed to configure hammer sessions' - result = Auth.with_user().status() + result = target_sat.cli.Auth.with_user().status() assert NOTCONF_MSG in result[0]['message'] with pytest.raises(CLIReturnCodeError): - Org.with_user().list() + target_sat.cli.Org.with_user().list() @pytest.mark.tier1 @@ -336,9 +335,11 @@ def test_negative_no_permissions(admin_user, non_admin_user, target_sat): """ result = configure_sessions(target_sat) assert result == 0, 'Failed to configure hammer sessions' - AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) - result = Auth.with_user().status() + target_sat.cli.AuthLogin.basic({'username': non_admin_user['login'], 'password': password}) + result = target_sat.cli.Auth.with_user().status() assert LOGEDIN_MSG.format(non_admin_user['login']) in result[0]['message'] # try to update user from viewer's session with pytest.raises(CLIReturnCodeError): - User.with_user().update({'login': admin_user['login'], 'new-login': gen_string('alpha')}) + target_sat.cli.User.with_user().update( + {'login': admin_user['login'], 'new-login': gen_string('alpha')} + ) diff --git a/tests/foreman/cli/test_capsule.py b/tests/foreman/cli/test_capsule.py index 8dee2e6a3dc..94d8fdb8f0c 100644 --- a/tests/foreman/cli/test_capsule.py +++ b/tests/foreman/cli/test_capsule.py @@ -18,14 +18,12 @@ """ import pytest -from robottelo.cli.proxy import Proxy - pytestmark = [pytest.mark.run_in_one_thread] @pytest.mark.skip_if_not_set('fake_capsules') @pytest.mark.tier1 -def test_positive_import_puppet_classes(session_puppet_enabled_sat, puppet_proxy_port_range): +def test_positive_import_puppet_classes(session_puppet_enabled_sat): """Import puppet classes from proxy :id: 42e3a9c0-62e1-4049-9667-f3c0cdfe0b04 @@ -38,8 +36,8 @@ def test_positive_import_puppet_classes(session_puppet_enabled_sat, puppet_proxy port = puppet_sat.available_capsule_port with puppet_sat.default_url_on_new_port(9090, port) as url: proxy = puppet_sat.cli_factory.make_proxy({'url': url}) - Proxy.import_classes({'id': proxy['id']}) - Proxy.delete({'id': proxy['id']}) + puppet_sat.cli.Proxy.import_classes({'id': proxy['id']}) + puppet_sat.cli.Proxy.delete({'id': proxy['id']}) @pytest.mark.stubbed diff --git a/tests/foreman/cli/test_classparameters.py b/tests/foreman/cli/test_classparameters.py index f48019c4963..bfa07d4b8c2 100644 --- a/tests/foreman/cli/test_classparameters.py +++ b/tests/foreman/cli/test_classparameters.py @@ -18,8 +18,8 @@ """ import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import gen_string diff --git a/tests/foreman/cli/test_computeresource_azurerm.py b/tests/foreman/cli/test_computeresource_azurerm.py index 998175e48ed..e2814110372 100644 --- a/tests/foreman/cli/test_computeresource_azurerm.py +++ b/tests/foreman/cli/test_computeresource_azurerm.py @@ -19,8 +19,6 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.host import Host from robottelo.config import settings from robottelo.constants import ( AZURERM_FILE_URI, @@ -117,7 +115,7 @@ def test_positive_crud_azurerm_cr( # Delete CR sat_azure.cli.ComputeResource.delete({'name': result['name']}) - assert not ComputeResource.exists(search=('name', result['name'])) + assert not sat_azure.cli.ComputeResource.exists(search=('name', result['name'])) @pytest.mark.upgrade @pytest.mark.tier2 @@ -377,8 +375,8 @@ def class_host_ft( ) yield host with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): - if Host.exists(search=('name', host['name'])): - Host.delete({'name': self.fullhostname}, timeout=1800000) + if sat_azure.cli.Host.exists(search=('name', host['name'])): + sat_azure.cli.Host.delete({'name': self.fullhostname}, timeout=1800000) @pytest.fixture(scope='class') def azureclient_host(self, azurermclient, class_host_ft): @@ -505,8 +503,8 @@ def class_host_ud( ) yield host with sat_azure.api_factory.satellite_setting('destroy_vm_on_host_delete=True'): - if Host.exists(search=('name', host['name'])): - Host.delete({'name': self.fullhostname}, timeout=1800000) + if sat_azure.cli.Host.exists(search=('name', host['name'])): + sat_azure.cli.Host.delete({'name': self.fullhostname}, timeout=1800000) @pytest.fixture(scope='class') def azureclient_host(self, azurermclient, class_host_ud): diff --git a/tests/foreman/cli/test_computeresource_ec2.py b/tests/foreman/cli/test_computeresource_ec2.py index e0f3d8cc4c3..3331f582f30 100644 --- a/tests/foreman/cli/test_computeresource_ec2.py +++ b/tests/foreman/cli/test_computeresource_ec2.py @@ -16,18 +16,16 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.factory import make_compute_resource, make_location, make_org -from robottelo.cli.org import Org from robottelo.config import settings from robottelo.constants import EC2_REGION_CA_CENTRAL_1, FOREMAN_PROVIDERS @pytest.fixture(scope='module') -def aws(): +def aws(module_target_sat): aws = type('rhev', (object,), {})() - aws.org = make_org() - aws.loc = make_location() - Org.add_location({'id': aws.org['id'], 'location-id': aws.loc['id']}) + aws.org = module_target_sat.cli_factory.make_org() + aws.loc = module_target_sat.cli_factory.make_location() + module_target_sat.cli.Org.add_location({'id': aws.org['id'], 'location-id': aws.loc['id']}) aws.aws_access_key = settings.ec2.access_key aws.aws_secret_key = settings.ec2.secret_key aws.aws_region = settings.ec2.region @@ -41,7 +39,7 @@ def aws(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_create_ec2_with_custom_region(aws): +def test_positive_create_ec2_with_custom_region(aws, module_target_sat): """Create a new ec2 compute resource with custom region :id: 28eb592d-ebf0-4659-900a-87112b3b2ad7 @@ -60,7 +58,7 @@ def test_positive_create_ec2_with_custom_region(aws): """ cr_name = gen_string(str_type='alpha') cr_description = gen_string(str_type='alpha') - cr = make_compute_resource( + cr = module_target_sat.cli_factory.make_compute_resource( { 'name': cr_name, 'description': cr_description, diff --git a/tests/foreman/cli/test_computeresource_libvirt.py b/tests/foreman/cli/test_computeresource_libvirt.py index e5e1997fa3b..12f332559f1 100644 --- a/tests/foreman/cli/test_computeresource_libvirt.py +++ b/tests/foreman/cli/test_computeresource_libvirt.py @@ -39,11 +39,9 @@ import pytest from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import make_compute_resource, make_location from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS, LIBVIRT_RESOURCE_URL +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import parametrized LIBVIRT_URL = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname @@ -113,7 +111,7 @@ def libvirt_url(): @pytest.mark.tier1 -def test_positive_create_with_name(libvirt_url): +def test_positive_create_with_name(libvirt_url, module_target_sat): """Create Compute Resource :id: 6460bcc7-d7f7-406a-aecb-b3d54d51e697 @@ -124,7 +122,7 @@ def test_positive_create_with_name(libvirt_url): :CaseLevel: Component """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': f'cr {gen_string("alpha")}', 'provider': 'Libvirt', @@ -134,7 +132,7 @@ def test_positive_create_with_name(libvirt_url): @pytest.mark.tier1 -def test_positive_info(libvirt_url): +def test_positive_info(libvirt_url, module_target_sat): """Test Compute Resource Info :id: f54af041-4471-4d8e-9429-45d821df0440 @@ -146,7 +144,7 @@ def test_positive_info(libvirt_url): :CaseLevel: Component """ name = gen_string('utf8') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.make_compute_resource( { 'name': name, 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -158,7 +156,7 @@ def test_positive_info(libvirt_url): @pytest.mark.tier1 -def test_positive_list(libvirt_url): +def test_positive_list(libvirt_url, module_target_sat): """Test Compute Resource List :id: 11123361-ffbc-4c59-a0df-a4af3408af7a @@ -169,17 +167,21 @@ def test_positive_list(libvirt_url): :CaseLevel: Component """ - comp_res = make_compute_resource({'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url}) + comp_res = module_target_sat.cli_factory.make_compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) assert comp_res['name'] - result_list = ComputeResource.list({'search': 'name=%s' % comp_res['name']}) + result_list = module_target_sat.cli.ComputeResource.list( + {'search': 'name=%s' % comp_res['name']} + ) assert len(result_list) > 0 - result = ComputeResource.exists(search=('name', comp_res['name'])) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert result @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_by_name(libvirt_url): +def test_positive_delete_by_name(libvirt_url, module_target_sat): """Test Compute Resource delete :id: 7fcc0b66-f1c1-4194-8a4b-7f04b1dd439a @@ -190,10 +192,12 @@ def test_positive_delete_by_name(libvirt_url): :CaseLevel: Component """ - comp_res = make_compute_resource({'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url}) + comp_res = module_target_sat.cli_factory.make_compute_resource( + {'provider': FOREMAN_PROVIDERS['libvirt'], 'url': libvirt_url} + ) assert comp_res['name'] - ComputeResource.delete({'name': comp_res['name']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'name': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert len(result) == 0 @@ -201,7 +205,7 @@ def test_positive_delete_by_name(libvirt_url): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('options', **parametrized(valid_name_desc_data())) -def test_positive_create_with_libvirt(libvirt_url, options): +def test_positive_create_with_libvirt(libvirt_url, options, target_sat): """Test Compute Resource create :id: adc6f4f8-6420-4044-89d1-c69e0bfeeab9 @@ -214,7 +218,7 @@ def test_positive_create_with_libvirt(libvirt_url, options): :parametrized: yes """ - ComputeResource.create( + target_sat.cli.ComputeResource.create( { 'description': options['description'], 'name': options['name'], @@ -225,7 +229,7 @@ def test_positive_create_with_libvirt(libvirt_url, options): @pytest.mark.tier2 -def test_positive_create_with_loc(libvirt_url): +def test_positive_create_with_loc(libvirt_url, module_target_sat): """Create Compute Resource with location :id: 224c7cbc-6bac-4a94-8141-d6249896f5a2 @@ -236,14 +240,16 @@ def test_positive_create_with_loc(libvirt_url): :CaseLevel: Integration """ - location = make_location() - comp_resource = make_compute_resource({'location-ids': location['id']}) + location = module_target_sat.cli_factory.make_location() + comp_resource = module_target_sat.cli_factory.make_compute_resource( + {'location-ids': location['id']} + ) assert len(comp_resource['locations']) == 1 assert comp_resource['locations'][0] == location['name'] @pytest.mark.tier2 -def test_positive_create_with_locs(libvirt_url): +def test_positive_create_with_locs(libvirt_url, module_target_sat): """Create Compute Resource with multiple locations :id: f665c586-39bf-480a-a0fc-81d9e1eb7c54 @@ -256,8 +262,8 @@ def test_positive_create_with_locs(libvirt_url): :CaseLevel: Integration """ locations_amount = random.randint(3, 5) - locations = [make_location() for _ in range(locations_amount)] - comp_resource = make_compute_resource( + locations = [module_target_sat.cli_factory.make_location() for _ in range(locations_amount)] + comp_resource = module_target_sat.cli_factory.make_compute_resource( {'location-ids': [location['id'] for location in locations]} ) assert len(comp_resource['locations']) == locations_amount @@ -270,7 +276,7 @@ def test_positive_create_with_locs(libvirt_url): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_create_data())) -def test_negative_create_with_name_url(libvirt_url, options): +def test_negative_create_with_name_url(libvirt_url, options, target_sat): """Compute Resource negative create with invalid values :id: cd432ff3-b3b9-49cd-9a16-ed00d81679dd @@ -284,7 +290,7 @@ def test_negative_create_with_name_url(libvirt_url, options): :parametrized: yes """ with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + target_sat.cli.ComputeResource.create( { 'name': options.get('name', gen_string(str_type='alphanumeric')), 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -294,7 +300,7 @@ def test_negative_create_with_name_url(libvirt_url, options): @pytest.mark.tier2 -def test_negative_create_with_same_name(libvirt_url): +def test_negative_create_with_same_name(libvirt_url, module_target_sat): """Compute Resource negative create with the same name :id: ddb5c45b-1ea3-46d0-b248-56c0388d2e4b @@ -305,9 +311,9 @@ def test_negative_create_with_same_name(libvirt_url): :CaseLevel: Component """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.make_compute_resource() with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': comp_res['name'], 'provider': FOREMAN_PROVIDERS['libvirt'], @@ -321,7 +327,7 @@ def test_negative_create_with_same_name(libvirt_url): @pytest.mark.tier1 @pytest.mark.parametrize('options', **parametrized(valid_update_data())) -def test_positive_update_name(libvirt_url, options): +def test_positive_update_name(libvirt_url, options, module_target_sat): """Compute Resource positive update :id: 213d7f04-4c54-4985-8ca0-d2a1a9e3b305 @@ -334,12 +340,12 @@ def test_positive_update_name(libvirt_url, options): :parametrized: yes """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.make_compute_resource() options.update({'name': comp_res['name']}) # update Compute Resource - ComputeResource.update(options) + module_target_sat.cli.ComputeResource.update(options) # check updated values - result = ComputeResource.info({'id': comp_res['id']}) + result = module_target_sat.cli.ComputeResource.info({'id': comp_res['id']}) assert result['description'] == options.get('description', comp_res['description']) assert result['name'] == options.get('new-name', comp_res['name']) assert result['url'] == options.get('url', comp_res['url']) @@ -351,7 +357,7 @@ def test_positive_update_name(libvirt_url, options): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_update_data())) -def test_negative_update(libvirt_url, options): +def test_negative_update(libvirt_url, options, module_target_sat): """Compute Resource negative update :id: e7aa9b39-dd01-4f65-8e89-ff5a6f4ee0e3 @@ -364,10 +370,10 @@ def test_negative_update(libvirt_url, options): :parametrized: yes """ - comp_res = make_compute_resource() + comp_res = module_target_sat.cli_factory.make_compute_resource() with pytest.raises(CLIReturnCodeError): - ComputeResource.update(dict({'name': comp_res['name']}, **options)) - result = ComputeResource.info({'id': comp_res['id']}) + module_target_sat.cli.ComputeResource.update(dict({'name': comp_res['name']}, **options)) + result = module_target_sat.cli.ComputeResource.info({'id': comp_res['id']}) # check attributes have not changed assert result['name'] == comp_res['name'] options.pop('new-name', None) @@ -377,7 +383,9 @@ def test_negative_update(libvirt_url, options): @pytest.mark.tier2 @pytest.mark.parametrize('set_console_password', ['true', 'false']) -def test_positive_create_with_console_password_and_name(libvirt_url, set_console_password): +def test_positive_create_with_console_password_and_name( + libvirt_url, set_console_password, module_target_sat +): """Create a compute resource with ``--set-console-password``. :id: 5b4c838a-0265-4c71-a73d-305fecbe508a @@ -392,7 +400,7 @@ def test_positive_create_with_console_password_and_name(libvirt_url, set_console :parametrized: yes """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': gen_string('utf8'), 'provider': 'Libvirt', @@ -404,7 +412,7 @@ def test_positive_create_with_console_password_and_name(libvirt_url, set_console @pytest.mark.tier2 @pytest.mark.parametrize('set_console_password', ['true', 'false']) -def test_positive_update_console_password(libvirt_url, set_console_password): +def test_positive_update_console_password(libvirt_url, set_console_password, module_target_sat): """Update a compute resource with ``--set-console-password``. :id: ef09351e-dcd3-4b4f-8d3b-995e9e5873b3 @@ -420,8 +428,12 @@ def test_positive_update_console_password(libvirt_url, set_console_password): :parametrized: yes """ cr_name = gen_string('utf8') - ComputeResource.create({'name': cr_name, 'provider': 'Libvirt', 'url': gen_url()}) - ComputeResource.update({'name': cr_name, 'set-console-password': set_console_password}) + module_target_sat.cli.ComputeResource.create( + {'name': cr_name, 'provider': 'Libvirt', 'url': gen_url()} + ) + module_target_sat.cli.ComputeResource.update( + {'name': cr_name, 'set-console-password': set_console_password} + ) @pytest.mark.e2e diff --git a/tests/foreman/cli/test_computeresource_osp.py b/tests/foreman/cli/test_computeresource_osp.py index c9018995dc2..42cffe0b7b9 100644 --- a/tests/foreman/cli/test_computeresource_osp.py +++ b/tests/foreman/cli/test_computeresource_osp.py @@ -19,8 +19,8 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.factory import CLIReturnCodeError from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError OSP_SETTINGS = Box( username=settings.osp.username, diff --git a/tests/foreman/cli/test_computeresource_rhev.py b/tests/foreman/cli/test_computeresource_rhev.py index 12e2fe9b345..9033ca003de 100644 --- a/tests/foreman/cli/test_computeresource_rhev.py +++ b/tests/foreman/cli/test_computeresource_rhev.py @@ -20,14 +20,8 @@ from wait_for import wait_for from wrapanapi import RHEVMSystem -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.factory import ( - CLIFactoryError, - CLIReturnCodeError, - make_compute_resource, -) -from robottelo.cli.host import Host from robottelo.config import settings +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError @pytest.fixture(scope='module') @@ -47,7 +41,7 @@ def rhev(): @pytest.mark.tier1 -def test_positive_create_rhev_with_valid_name(rhev): +def test_positive_create_rhev_with_valid_name(rhev, module_target_sat): """Create Compute Resource of type Rhev with valid name :id: 92a577db-144e-4761-a52e-e83887464986 @@ -58,7 +52,7 @@ def test_positive_create_rhev_with_valid_name(rhev): :BZ: 1602835 """ - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'name': f'cr {gen_string(str_type="alpha")}', 'provider': 'Ovirt', @@ -71,7 +65,7 @@ def test_positive_create_rhev_with_valid_name(rhev): @pytest.mark.tier1 -def test_positive_rhev_info(rhev): +def test_positive_rhev_info(rhev, module_target_sat): """List the info of RHEV compute resource :id: 1b18f6e8-c431-41ab-ae49-a2bbb74712f2 @@ -83,7 +77,7 @@ def test_positive_rhev_info(rhev): :BZ: 1602835 """ name = gen_string('utf8') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.make_compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -97,7 +91,7 @@ def test_positive_rhev_info(rhev): @pytest.mark.tier1 -def test_positive_delete_by_name(rhev): +def test_positive_delete_by_name(rhev, module_target_sat): """Delete the RHEV compute resource by name :id: ac84acbe-3e02-4f49-9695-b668df28b353 @@ -108,7 +102,7 @@ def test_positive_delete_by_name(rhev): :BZ: 1602835 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -118,13 +112,13 @@ def test_positive_delete_by_name(rhev): } ) assert comp_res['name'] - ComputeResource.delete({'name': comp_res['name']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'name': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert not result @pytest.mark.tier1 -def test_positive_delete_by_id(rhev): +def test_positive_delete_by_id(rhev, module_target_sat): """Delete the RHEV compute resource by id :id: 4bcd4fa3-df8b-4773-b142-e47458116552 @@ -135,7 +129,7 @@ def test_positive_delete_by_id(rhev): :BZ: 1602835 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -145,13 +139,13 @@ def test_positive_delete_by_id(rhev): } ) assert comp_res['name'] - ComputeResource.delete({'id': comp_res['id']}) - result = ComputeResource.exists(search=('name', comp_res['name'])) + module_target_sat.cli.ComputeResource.delete({'id': comp_res['id']}) + result = module_target_sat.cli.ComputeResource.exists(search=('name', comp_res['name'])) assert not result @pytest.mark.tier2 -def test_negative_create_rhev_with_url(rhev): +def test_negative_create_rhev_with_url(rhev, module_target_sat): """RHEV compute resource negative create with invalid values :id: 1f318a4b-8dca-491b-b56d-cff773ed624e @@ -161,7 +155,7 @@ def test_negative_create_rhev_with_url(rhev): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - ComputeResource.create( + module_target_sat.cli.ComputeResource.create( { 'provider': 'Ovirt', 'user': rhev.username, @@ -173,7 +167,7 @@ def test_negative_create_rhev_with_url(rhev): @pytest.mark.tier2 -def test_negative_create_with_same_name(rhev): +def test_negative_create_with_same_name(rhev, module_target_sat): """RHEV compute resource negative create with the same name :id: f00813ef-df31-462c-aa87-479b8272aea3 @@ -188,7 +182,7 @@ def test_negative_create_with_same_name(rhev): :CaseImportance: High """ name = gen_string('alpha') - compute_resource = make_compute_resource( + compute_resource = module_target_sat.cli_factory.make_compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -200,7 +194,7 @@ def test_negative_create_with_same_name(rhev): ) assert compute_resource['name'] == name with pytest.raises(CLIFactoryError): - make_compute_resource( + module_target_sat.cli_factory.make_compute_resource( { 'name': name, 'provider': 'Ovirt', @@ -214,7 +208,7 @@ def test_negative_create_with_same_name(rhev): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_update_name(rhev): +def test_positive_update_name(rhev, module_target_sat): """RHEV compute resource positive update :id: 5ca29b81-d1f0-409f-843d-aa5daf957d7f @@ -231,7 +225,7 @@ def test_positive_update_name(rhev): :BZ: 1602835 """ new_name = gen_string('alpha') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -241,12 +235,12 @@ def test_positive_update_name(rhev): } ) assert comp_res['name'] - ComputeResource.update({'name': comp_res['name'], 'new-name': new_name}) - assert new_name == ComputeResource.info({'id': comp_res['id']})['name'] + module_target_sat.cli.ComputeResource.update({'name': comp_res['name'], 'new-name': new_name}) + assert new_name == module_target_sat.cli.ComputeResource.info({'id': comp_res['id']})['name'] @pytest.mark.tier2 -def test_positive_add_image_rhev_with_name(rhev, module_os): +def test_positive_add_image_rhev_with_name(rhev, module_os, module_target_sat): """Add images to the RHEV compute resource :id: 2da84165-a56f-4282-9343-94828fa69c13 @@ -265,7 +259,7 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): if rhev.image_uuid is None: pytest.skip('Missing configuration for rhev.image_uuid') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -275,7 +269,7 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): } ) assert comp_res['name'] - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], 'name': f'img {gen_string(str_type="alpha")}', @@ -285,13 +279,15 @@ def test_positive_add_image_rhev_with_name(rhev, module_os): 'username': "root", } ) - result = ComputeResource.image_list({'compute-resource': comp_res['name']}) + result = module_target_sat.cli.ComputeResource.image_list( + {'compute-resource': comp_res['name']} + ) assert result[0]['uuid'] == rhev.image_uuid @pytest.mark.skip_if_open("BZ:1829239") @pytest.mark.tier2 -def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): +def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os, module_target_sat): """Attempt to add invalid image to the RHEV compute resource :id: e8a653f9-9749-4c76-95ed-2411a7c0a117 @@ -309,7 +305,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): :BZ: 1829239 """ - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -320,7 +316,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): ) assert comp_res['name'] with pytest.raises(CLIReturnCodeError): - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], 'name': f'img {gen_string(str_type="alpha")}', @@ -333,7 +329,7 @@ def test_negative_add_image_rhev_with_invalid_uuid(rhev, module_os): @pytest.mark.tier2 -def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): +def test_negative_add_image_rhev_with_invalid_name(rhev, module_os, module_target_sat): """Attempt to add invalid image name to the RHEV compute resource :id: 873a7d79-1e89-4e4f-81ca-b6db1e0246da @@ -353,7 +349,7 @@ def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): if rhev.image_uuid is None: pytest.skip('Missing configuration for rhev.image_uuid') - comp_res = make_compute_resource( + comp_res = module_target_sat.cli_factory.make_compute_resource( { 'provider': 'Ovirt', 'user': rhev.username, @@ -365,7 +361,7 @@ def test_negative_add_image_rhev_with_invalid_name(rhev, module_os): assert comp_res['name'] with pytest.raises(CLIReturnCodeError): - ComputeResource.image_create( + module_target_sat.cli.ComputeResource.image_create( { 'compute-resource': comp_res['name'], # too long string (>255 chars) @@ -493,7 +489,7 @@ def test_positive_provision_rhev_with_host_group( # checks hostname = f'{host_name}.{domain_name}' assert hostname == host['name'] - host_info = Host.info({'name': hostname}) + host_info = cli.Host.info({'name': hostname}) # Check on RHV, if VM exists assert rhev.rhv_api.does_vm_exist(hostname) # Get the information of created VM @@ -658,7 +654,7 @@ def test_positive_provision_rhev_image_based_and_disassociate( ) hostname = f'{host_name}.{domain_name}' assert hostname == host['name'] - host_info = Host.info({'name': hostname}) + host_info = cli.Host.info({'name': hostname}) # Check on RHV, if VM exists assert rhev.rhv_api.does_vm_exist(hostname) # Get the information of created VM diff --git a/tests/foreman/cli/test_container_management.py b/tests/foreman/cli/test_container_management.py index 70de242f8ac..9229260475a 100644 --- a/tests/foreman/cli/test_container_management.py +++ b/tests/foreman/cli/test_container_management.py @@ -16,15 +16,6 @@ import pytest from wait_for import wait_for -from robottelo.cli.factory import ( - ContentView, - LifecycleEnvironment, - Repository, - make_content_view, - make_lifecycle_environment, - make_product_wait, - make_repository, -) from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, @@ -34,7 +25,7 @@ from robottelo.logging import logger -def _repo(product_id, name=None, upstream_name=None, url=None): +def _repo(sat, product_id, name=None, upstream_name=None, url=None): """Creates a Docker-based repository. :param product_id: ID of the ``Product``. @@ -46,7 +37,7 @@ def _repo(product_id, name=None, upstream_name=None, url=None): CONTAINER_REGISTRY_HUB constant. :return: A ``Repository`` object. """ - return make_repository( + return sat.cli_factory.make_repository( { 'content-type': REPO_TYPE['docker'], 'docker-upstream-name': upstream_name or CONTAINER_UPSTREAM_NAME, @@ -82,10 +73,10 @@ def test_positive_pull_image(self, module_org, container_contenthost, target_sat :parametrized: yes """ - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id']) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id']) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) try: result = container_contenthost.execute( f'docker login -u {settings.server.admin_username}' @@ -155,18 +146,22 @@ def test_positive_container_admin_end_to_end_search( # Satellite setup: create product and add Docker repository; # create content view and add Docker repository; # create lifecycle environment and promote content view to it - lce = make_lifecycle_environment({'organization-id': module_org.id}) - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id'], upstream_name=CONTAINER_UPSTREAM_NAME) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id'], upstream_name=CONTAINER_UPSTREAM_NAME) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': registry_name_pattern, 'registry-unauthenticated-pull': 'false', @@ -207,7 +202,7 @@ def test_positive_container_admin_end_to_end_search( assert docker_repo_uri not in result.stdout # 8. Set 'Unauthenticated Pull' option to true - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-unauthenticated-pull': 'true', 'id': lce['id'], @@ -259,18 +254,22 @@ def test_positive_container_admin_end_to_end_pull( # Satellite setup: create product and add Docker repository; # create content view and add Docker repository; # create lifecycle environment and promote content view to it - lce = make_lifecycle_environment({'organization-id': module_org.id}) - product = make_product_wait({'organization-id': module_org.id}) - repo = _repo(product['id'], upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo(target_sat, product['id'], upstream_name=docker_upstream_name) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': registry_name_pattern, 'registry-unauthenticated-pull': 'false', @@ -315,7 +314,7 @@ def test_positive_container_admin_end_to_end_pull( assert result.status == 1 # 8. Set 'Unauthenticated Pull' option to true - LifecycleEnvironment.update( + target_sat.cli.LifecycleEnvironment.update( { 'registry-unauthenticated-pull': 'true', 'id': lce['id'], @@ -364,7 +363,9 @@ def test_negative_pull_content_with_longer_name( {'name': product_name, 'organization-id': module_org.id} ) - repo = _repo(product['id'], name=repo_name, upstream_name=CONTAINER_UPSTREAM_NAME) + repo = _repo( + target_sat, product['id'], name=repo_name, upstream_name=CONTAINER_UPSTREAM_NAME + ) # 2. Sync the repos target_sat.cli.Repository.synchronize({'id': repo['id']}) diff --git a/tests/foreman/cli/test_contentaccess.py b/tests/foreman/cli/test_contentaccess.py index fb8aaca5228..3f044995399 100644 --- a/tests/foreman/cli/test_contentaccess.py +++ b/tests/foreman/cli/test_contentaccess.py @@ -19,8 +19,6 @@ from nailgun import entities import pytest -from robottelo.cli.host import Host -from robottelo.cli.package import Package from robottelo.config import settings from robottelo.constants import ( REAL_0_ERRATA_ID, @@ -95,7 +93,7 @@ def vm( @pytest.mark.tier2 @pytest.mark.pit_client @pytest.mark.pit_server -def test_positive_list_installable_updates(vm): +def test_positive_list_installable_updates(vm, module_target_sat): """Ensure packages applicability is functioning properly. :id: 4feb692c-165b-4f96-bb97-c8447bd2cf6e @@ -119,7 +117,7 @@ def test_positive_list_installable_updates(vm): :CaseImportance: Critical """ for _ in range(30): - applicable_packages = Package.list( + applicable_packages = module_target_sat.cli.Package.list( { 'host': vm.hostname, 'packages-restrict-applicable': 'true', @@ -139,7 +137,7 @@ def test_positive_list_installable_updates(vm): @pytest.mark.upgrade @pytest.mark.pit_client @pytest.mark.pit_server -def test_positive_erratum_installable(vm): +def test_positive_erratum_installable(vm, module_target_sat): """Ensure erratum applicability is showing properly, without attaching any subscription. @@ -161,7 +159,9 @@ def test_positive_erratum_installable(vm): """ # check that package errata is applicable for _ in range(30): - erratum = Host.errata_list({'host': vm.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + erratum = module_target_sat.cli.Host.errata_list( + {'host': vm.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'} + ) if erratum: break time.sleep(10) diff --git a/tests/foreman/cli/test_contentcredentials.py b/tests/foreman/cli/test_contentcredentials.py index 269170ffdc1..8541ce0f718 100644 --- a/tests/foreman/cli/test_contentcredentials.py +++ b/tests/foreman/cli/test_contentcredentials.py @@ -23,9 +23,8 @@ from fauxfactory import gen_alphanumeric, gen_choice, gen_integer, gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.constants import DataFile -from robottelo.host_helpers.cli_factory import CLIFactoryError +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_values_list, parametrized, diff --git a/tests/foreman/cli/test_contentview.py b/tests/foreman/cli/test_contentview.py index 4effe29ecf4..8122f53940a 100644 --- a/tests/foreman/cli/test_contentview.py +++ b/tests/foreman/cli/test_contentview.py @@ -24,28 +24,13 @@ from wrapanapi.entities.vm import VmState from robottelo import constants -from robottelo.cli import factory as cli_factory -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.capsule import Capsule -from robottelo.cli.contentview import ContentView -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.location import Location -from robottelo.cli.module_stream import ModuleStream -from robottelo.cli.org import Org -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.role import Role -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import ( FAKE_2_CUSTOM_PACKAGE, FAKE_2_CUSTOM_PACKAGE_NAME, DataFile, ) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( generate_strings_list, invalid_names_list, @@ -55,7 +40,7 @@ @pytest.fixture(scope='module') -def module_rhel_content(module_entitlement_manifest_org): +def module_rhel_content(module_entitlement_manifest_org, module_target_sat): """Returns RH repo after syncing it""" product = entities.Product( name=constants.PRDS['rhel'], organization=module_entitlement_manifest_org @@ -64,14 +49,14 @@ def module_rhel_content(module_entitlement_manifest_org): data = {'basearch': 'x86_64', 'releasever': '6Server', 'product_id': product.id} reposet.enable(data=data) - repo = Repository.info( + repo = module_target_sat.cli.Repository.info( { 'name': constants.REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, 'product': product.name, } ) - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': constants.REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, @@ -81,12 +66,12 @@ def module_rhel_content(module_entitlement_manifest_org): return repo -def _get_content_view_version_lce_names_set(content_view_id, version_id): +def _get_content_view_version_lce_names_set(content_view_id, version_id, sat): """returns a set of content view version lifecycle environment names :rtype: set that it belongs under """ - lifecycle_environments = ContentView.version_info( + lifecycle_environments = sat.cli.ContentView.version_info( {'content-view-id': content_view_id, 'id': version_id} )['lifecycle-environments'] return {lce['name'] for lce in lifecycle_environments} @@ -97,7 +82,7 @@ class TestContentView: @pytest.mark.parametrize('name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_create_with_name(self, module_org, name): + def test_positive_create_with_name(self, module_org, module_target_sat, name): """create content views with different names :id: a154308c-3982-4cf1-a236-3051e740970e @@ -108,14 +93,14 @@ def test_positive_create_with_name(self, module_org, name): :parametrized: yes """ - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'name': name, 'organization-id': module_org.id} ) assert content_view['name'] == name.strip() @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, module_org, name): + def test_negative_create_with_invalid_name(self, module_org, module_target_sat, name): """create content views with invalid names :id: 83046271-76f9-4cda-b579-a2fe63493295 @@ -127,11 +112,13 @@ def test_negative_create_with_invalid_name(self, module_org, name): :parametrized: yes """ - with pytest.raises(cli_factory.CLIFactoryError): - cli_factory.make_content_view({'name': name, 'organization-id': module_org.id}) + with pytest.raises(CLIFactoryError): + module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.tier1 - def test_negative_create_with_org_name(self): + def test_negative_create_with_org_name(self, module_target_sat): """Create content view with invalid org name :id: f8b76e98-ccc8-41ac-af04-541650e8f5ba @@ -142,10 +129,10 @@ def test_negative_create_with_org_name(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.create({'organization-id': gen_string('alpha')}) + module_target_sat.cli.ContentView.create({'organization-id': gen_string('alpha')}) @pytest.mark.tier2 - def test_positive_create_with_repo_id(self, module_org, module_product): + def test_positive_create_with_repo_id(self, module_org, module_product, module_target_sat): """Create content view providing repository id :id: bb91affe-f8d4-4724-8b61-41f3cb898fd3 @@ -156,15 +143,17 @@ def test_positive_create_with_repo_id(self, module_org, module_product): :CaseImportance: High :BZ: 1213097 """ - repo = cli_factory.make_repository({'product-id': module_product.id}) - cv = cli_factory.make_content_view( + repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) + cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': [repo['id']]} ) assert cv['yum-repositories'][0]['id'] == repo['id'] @pytest.mark.parametrize('new_name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_update_name_by_id(self, module_org, new_name): + def test_positive_update_name_by_id(self, module_org, module_target_sat, new_name): """Find content view by its id and update its name afterwards :id: 35fccf2c-abc4-4ca8-a565-a7a6adaaf429 @@ -177,16 +166,16 @@ def test_positive_update_name_by_id(self, module_org, new_name): :parametrized: yes """ - cv = cli_factory.make_content_view( + cv = module_target_sat.cli_factory.make_content_view( {'name': gen_string('utf8'), 'organization-id': module_org.id} ) - ContentView.update({'id': cv['id'], 'new-name': new_name}) - cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.update({'id': cv['id'], 'new-name': new_name}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name.strip() @pytest.mark.parametrize('new_name', **parametrized(valid_names_list())) @pytest.mark.tier1 - def test_positive_update_name_by_name(self, module_org, new_name): + def test_positive_update_name_by_name(self, module_org, module_target_sat, new_name): """Find content view by its name and update it :id: aa9bced6-ee6c-4a18-90ac-874ab4979711 @@ -199,16 +188,16 @@ def test_positive_update_name_by_name(self, module_org, new_name): :parametrized: yes """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.update( + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.update( {'name': cv['name'], 'organization-label': module_org.label, 'new-name': new_name} ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name.strip() @pytest.mark.run_in_one_thread @pytest.mark.tier2 - def test_positive_update_filter(self, repo_setup): + def test_positive_update_filter(self, repo_setup, module_target_sat): """Edit content views for a rh content, add a filter and update filter :id: 4beab1e4-fc58-460e-af24-cdd2c3d283e6 @@ -221,38 +210,40 @@ def test_positive_update_filter(self, repo_setup): :CaseImportance: High """ # Create CV - new_cv = cli_factory.make_content_view({'organization-id': repo_setup['org'].id}) + new_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': repo_setup['org'].id} + ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': repo_setup['org'].id, 'repository-id': repo_setup['repo'].id, } ) - Repository.synchronize({'id': repo_setup['repo'].id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_setup['repo'].id}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == repo_setup['repo'].name - cvf = cli_factory.make_content_view_filter( + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'true', 'type': 'erratum'} ) - cvf_rule = cli_factory.make_content_view_filter_rule( + cvf_rule = module_target_sat.cli_factory.content_view_filter_rule( {'content-view-filter-id': cvf['filter-id'], 'types': ['bugfix', 'enhancement']} ) - cvf = ContentView.filter.info({'id': cvf['filter-id']}) + cvf = module_target_sat.cli.ContentView.filter.info({'id': cvf['filter-id']}) assert 'security' not in cvf['rules'][0]['types'] - ContentView.filter.rule.update( + module_target_sat.cli.ContentView.filter.rule.update( { 'id': cvf_rule['rule-id'], 'types': 'security', 'content-view-filter-id': cvf['filter-id'], } ) - cvf = ContentView.filter.info({'id': cvf['filter-id']}) + cvf = module_target_sat.cli.ContentView.filter.info({'id': cvf['filter-id']}) assert 'security' == cvf['rules'][0]['types'] @pytest.mark.tier1 - def test_positive_delete_by_id(self, module_org): + def test_positive_delete_by_id(self, module_org, module_target_sat): """delete content view by its id :id: e96d6d47-8be4-4705-979f-e5c320eca293 @@ -261,13 +252,13 @@ def test_positive_delete_by_id(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.delete({'id': cv['id']}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.delete({'id': cv['id']}) with pytest.raises(CLIReturnCodeError): - ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.info({'id': cv['id']}) @pytest.mark.tier1 - def test_positive_delete_by_name(self, module_org): + def test_positive_delete_by_name(self, module_org, module_target_sat): """delete content view by its name :id: 014b85f3-003b-42d9-bbfe-21620e8eb84b @@ -278,13 +269,15 @@ def test_positive_delete_by_name(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.delete({'name': cv['name'], 'organization': module_org.name}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.delete( + {'name': cv['name'], 'organization': module_org.name} + ) with pytest.raises(CLIReturnCodeError): - ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.info({'id': cv['id']}) @pytest.mark.tier2 - def test_positive_delete_version_by_name(self, module_org): + def test_positive_delete_version_by_name(self, module_org, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'remove-from-environment' command and delete content view version from @@ -298,28 +291,30 @@ def test_positive_delete_version_by_name(self, module_org): :CaseLevel: Integration """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cvv = content_view['versions'][0] env_id = content_view['lifecycle-environments'][0]['id'] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': content_view['id'], 'lifecycle-environment-id': env_id} ) - ContentView.version_delete( + module_target_sat.cli.ContentView.version_delete( { 'content-view': content_view['name'], 'organization': module_org.name, 'version': cvv['version'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 0 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_delete_version_by_id(self, module_org, module_product): + def test_positive_delete_version_by_id(self, module_org, module_product, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'remove-from-environment' command and delete content view version from @@ -333,12 +328,14 @@ def test_positive_delete_version_by_id(self, module_org, module_product): :CaseImportance: High """ # Create new organization, product and repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -346,25 +343,25 @@ def test_positive_delete_version_by_id(self, module_org, module_product): } ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Store the associated environment_id env_id = new_cv['lifecycle-environments'][0]['id'] # Store the version1 id version1_id = new_cv['versions'][0]['id'] # Remove the CV from selected environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env_id} ) # Delete the version - ContentView.version_delete({'id': version1_id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_delete({'id': version1_id}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier2 - def test_negative_delete_version_by_id(self, module_org): + def test_negative_delete_version_by_id(self, module_org, module_target_sat): """Create content view and publish it. Try to delete content view version while content view is still associated with lifecycle environment @@ -377,20 +374,22 @@ def test_negative_delete_version_by_id(self, module_org): :CaseLevel: Integration """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cv = content_view['versions'][0] # Try to delete content view version while it is in environment Library with pytest.raises(CLIReturnCodeError): - ContentView.version_delete({'id': cv['id']}) + module_target_sat.cli.ContentView.version_delete({'id': cv['id']}) # Check that version was not actually removed from the cv - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 @pytest.mark.tier1 - def test_positive_remove_lce_by_id(self, module_org): + def test_positive_remove_lce_by_id(self, module_org, module_target_sat): """Remove content view from lifecycle environment :id: 1bf8a647-d82e-4145-b13b-f92bf6642532 @@ -399,16 +398,16 @@ def test_positive_remove_lce_by_id(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) env = new_cv['lifecycle-environments'][0] - ContentView.remove({'id': new_cv['id'], 'environment-ids': env['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove({'id': new_cv['id'], 'environment-ids': env['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['lifecycle-environments']) == 0 @pytest.mark.tier3 - def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): + def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org, module_target_sat): """Remove content view environment and re-assign activation key to another environment and content view @@ -421,23 +420,33 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): :CaseLevel: Integration """ env = [ - cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(2) ] - source_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': source_cv['id']}) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) cvv = source_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']} + ) - destination_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': destination_cv['id']}) - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) cvv = destination_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']} + ) - ac_key = cli_factory.make_activation_key( + ac_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': source_cv['id'], 'lifecycle-environment-id': env[0]['id'], @@ -445,12 +454,12 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): 'organization-id': module_org.id, } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['activation-keys'][0] == ac_key['name'] - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert len(destination_cv['activation-keys']) == 0 - ContentView.remove( + module_target_sat.cli.ContentView.remove( { 'id': source_cv['id'], 'environment-ids': env[0]['id'], @@ -458,14 +467,14 @@ def test_positive_remove_lce_by_id_and_reassign_ak(self, module_org): 'key-environment-id': env[1]['id'], } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert len(source_cv['activation-keys']) == 0 - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['activation-keys'][0] == ac_key['name'] @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): + def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org, module_target_sat): """Remove content view environment and re-assign content host to another environment and content view @@ -478,26 +487,36 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): :CaseLevel: Integration """ env = [ - cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(2) ] - source_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': source_cv['id']}) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) cvv = source_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[0]['id']} + ) - destination_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': destination_cv['id']}) - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) cvv = destination_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env[1]['id']} + ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': source_cv['id'], 'lifecycle-environment-id': env[0]['id'], @@ -506,12 +525,12 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '1' - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['content-host-count'] == '0' - ContentView.remove( + module_target_sat.cli.ContentView.remove( { 'environment-ids': env[0]['id'], 'id': source_cv['id'], @@ -519,13 +538,13 @@ def test_positive_remove_lce_by_id_and_reassign_chost(self, module_org): 'system-environment-id': env[1]['id'], } ) - source_cv = ContentView.info({'id': source_cv['id']}) + source_cv = module_target_sat.cli.ContentView.info({'id': source_cv['id']}) assert source_cv['content-host-count'] == '0' - destination_cv = ContentView.info({'id': destination_cv['id']}) + destination_cv = module_target_sat.cli.ContentView.info({'id': destination_cv['id']}) assert destination_cv['content-host-count'] == '1' @pytest.mark.tier1 - def test_positive_remove_version_by_id(self, module_org): + def test_positive_remove_version_by_id(self, module_org, module_target_sat): """Delete content view version using 'remove' command by id :id: e8664353-6601-4566-8478-440be20a089d @@ -534,22 +553,24 @@ def test_positive_remove_version_by_id(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-version-ids': cvv['id'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-version-ids': cvv['id'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier1 - def test_positive_remove_version_by_name(self, module_org): + def test_positive_remove_version_by_name(self, module_org, module_target_sat): """Delete content view version using 'remove' command by name :id: 2c838716-dcd3-4017-bffc-da53727c22a3 @@ -560,21 +581,23 @@ def test_positive_remove_version_by_name(self, module_org): :CaseImportance: Critical """ - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-versions': cvv['version'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-versions': cvv['version'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier1 - def test_positive_remove_repository_by_id(self, module_org, module_product): + def test_positive_remove_repository_by_id(self, module_org, module_product, module_target_sat): """Remove associated repository from content view by id :id: 90703181-b3f8-44f6-959a-b65c79b6b6ee @@ -585,20 +608,28 @@ def test_positive_remove_repository_by_id(self, module_org, module_product): :CaseImportance: Critical """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Remove repository from CV - ContentView.remove_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 @pytest.mark.tier1 - def test_positive_remove_repository_by_name(self, module_org, module_product): + def test_positive_remove_repository_by_name( + self, module_org, module_product, module_target_sat + ): """Remove associated repository from content view by name :id: dc952fe7-eb89-4760-889b-6a3fa17c3e75 @@ -609,20 +640,26 @@ def test_positive_remove_repository_by_name(self, module_org, module_product): :CaseImportance: Critical """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Remove repository from CV - ContentView.remove_repository({'id': new_cv['id'], 'repository': new_repo['name']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository': new_repo['name']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 @pytest.mark.tier2 - def test_positive_create_composite(self, module_org): + def test_positive_create_composite(self, module_org, module_target_sat): """create a composite content view :id: bded6acd-8da3-45ea-9e39-19bdc6c06341 @@ -636,31 +673,37 @@ def test_positive_create_composite(self, module_org): :CaseImportance: High """ # Create REPO - new_product = cli_factory.make_product({'organization-id': module_org.id}) - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Create CV - con_view = cli_factory.make_content_view( + con_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': con_view['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': con_view['id']} + ) # Assert whether version was associated to composite CV - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) assert con_view['components'][0]['id'] == version1_id @pytest.mark.tier2 - def test_positive_create_composite_by_name(self, module_org): + def test_positive_create_composite_by_name(self, module_org, module_target_sat): """Create a composite content view and add non-composite content view by its name @@ -675,24 +718,30 @@ def test_positive_create_composite_by_name(self, module_org): :CaseImportance: High """ - new_product = cli_factory.make_product({'organization-id': module_org.id}) + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) # Create REPO - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] # Create CV - cv = cli_factory.make_content_view({'composite': True, 'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) assert len(cv['components']) == 0 # Associate version to composite CV - ContentView.add_version( + module_target_sat.cli.ContentView.add_version( { 'content-view-version': cvv['version'], 'content-view': new_cv['name'], @@ -701,12 +750,14 @@ def test_positive_create_composite_by_name(self, module_org): } ) # Assert whether version was associated to composite CV - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['components']) == 1 assert cv['components'][0]['id'] == cvv['id'] @pytest.mark.tier2 - def test_positive_remove_version_by_id_from_composite(self, module_org, module_product): + def test_positive_remove_version_by_id_from_composite( + self, module_org, module_product, module_target_sat + ): """Create a composite content view and remove its content version by id :id: 0ff675d0-45d6-4f15-9e84-3b5ce98ce7de @@ -719,12 +770,14 @@ def test_positive_remove_version_by_id_from_composite(self, module_org, module_p :CaseImportance: High """ # Create new repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -732,30 +785,32 @@ def test_positive_remove_version_by_id_from_composite(self, module_org, module_p } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, 'component-ids': new_cv['versions'][0]['id'], } ) - ContentView.publish({'id': comp_cv['id']}) - new_cv = ContentView.info({'id': comp_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) env = new_cv['lifecycle-environments'][0] cvv = new_cv['versions'][0] - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( {'id': new_cv['id'], 'lifecycle-environment-id': env['id']} ) - ContentView.remove({'content-view-version-ids': cvv['id'], 'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove( + {'content-view-version-ids': cvv['id'], 'id': new_cv['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 0 @pytest.mark.tier2 - def test_positive_remove_component_by_name(self, module_org, module_product): + def test_positive_remove_component_by_name(self, module_org, module_product, module_target_sat): """Create a composite content view and remove component from it by name :id: 908f9cad-b985-4bae-96c0-037ea1d395a6 @@ -770,12 +825,14 @@ def test_positive_remove_component_by_name(self, module_org, module_product): :CaseImportance: High """ # Create new repository - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create new content-view and add repository to view - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_org.id, @@ -783,11 +840,11 @@ def test_positive_remove_component_by_name(self, module_org, module_product): } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Get the CV info - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, @@ -795,7 +852,7 @@ def test_positive_remove_component_by_name(self, module_org, module_product): } ) assert len(comp_cv['components']) == 1 - ContentView.remove_version( + module_target_sat.cli.ContentView.remove_version( { 'content-view-version': new_cv['versions'][0]['version'], 'content-view': new_cv['name'], @@ -803,11 +860,11 @@ def test_positive_remove_component_by_name(self, module_org, module_product): 'name': comp_cv['name'], } ) - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert len(comp_cv['components']) == 0 @pytest.mark.tier3 - def test_positive_create_composite_with_component_ids(self, module_org): + def test_positive_create_composite_with_component_ids(self, module_org, module_target_sat): """Create a composite content view with a component_ids option which ids are from different content views @@ -822,29 +879,29 @@ def test_positive_create_composite_with_component_ids(self, module_org): :CaseImportance: High """ # Create first CV - cv1 = cli_factory.make_content_view({'organization-id': module_org.id}) + cv1 = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV - ContentView.publish({'id': cv1['id']}) - cv1 = ContentView.info({'id': cv1['id']}) + module_target_sat.cli.ContentView.publish({'id': cv1['id']}) + cv1 = module_target_sat.cli.ContentView.info({'id': cv1['id']}) # Create second CV - cv2 = cli_factory.make_content_view({'organization-id': module_org.id}) + cv2 = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV - ContentView.publish({'id': cv2['id']}) - cv2 = ContentView.info({'id': cv2['id']}) + module_target_sat.cli.ContentView.publish({'id': cv2['id']}) + cv2 = module_target_sat.cli.ContentView.info({'id': cv2['id']}) # Let us now store the version ids component_ids = [cv1['versions'][0]['id'], cv2['versions'][0]['id']] # Create CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id, 'component-ids': component_ids} ) # Assert whether the composite content view components IDs are equal # to the component_ids input values - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert {comp['id'] for comp in comp_cv['components']} == set(component_ids) @pytest.mark.tier3 - def test_negative_create_composite_with_component_ids(self, module_org): + def test_negative_create_composite_with_component_ids(self, module_org, module_target_sat): """Attempt to create a composite content view with a component_ids option which ids are from the same content view @@ -859,16 +916,16 @@ def test_negative_create_composite_with_component_ids(self, module_org): :CaseImportance: Low """ # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of CV twice for _ in range(2): - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version ids component_ids = [version['id'] for version in new_cv['versions']] # Try create CV - with pytest.raises(cli_factory.CLIFactoryError) as context: - cli_factory.make_content_view( + with pytest.raises(CLIFactoryError) as context: + module_target_sat.cli_factory.make_content_view( { 'composite': True, 'organization-id': module_org.id, @@ -878,7 +935,7 @@ def test_negative_create_composite_with_component_ids(self, module_org): assert 'Failed to create ContentView with data:' in str(context) @pytest.mark.tier3 - def test_positive_update_composite_with_component_ids(module_org): + def test_positive_update_composite_with_component_ids(module_org, module_target_sat): """Update a composite content view with a component_ids option :id: e6106ff6-c526-40f2-bdc0-ae291f7b267e @@ -891,26 +948,30 @@ def test_positive_update_composite_with_component_ids(module_org): :CaseImportance: Low """ # Create a CV to add to the composite one - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Publish a new version of the CV - ContentView.publish({'id': cv['id']}) - new_cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) # Let us now store the version ids component_ids = new_cv['versions'][0]['id'] # Create a composite CV - comp_cv = cli_factory.make_content_view( + comp_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Update a composite content view with a component id version - ContentView.update({'id': comp_cv['id'], 'component-ids': component_ids}) + module_target_sat.cli.ContentView.update( + {'id': comp_cv['id'], 'component-ids': component_ids} + ) # Assert whether the composite content view components IDs are equal # to the component_ids input values - comp_cv = ContentView.info({'id': comp_cv['id']}) + comp_cv = module_target_sat.cli.ContentView.info({'id': comp_cv['id']}) assert comp_cv['components'][0]['id'] == component_ids @pytest.mark.run_in_one_thread @pytest.mark.tier1 - def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, module_rhel_content): + def test_positive_add_rh_repo_by_id( + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat + ): """Associate Red Hat content to a content view :id: b31a85c3-aa56-461b-9e3a-f7754c742573 @@ -924,18 +985,18 @@ def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, modul :CaseImportance: Critical """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] @@ -943,7 +1004,7 @@ def test_positive_add_rh_repo_by_id(self, module_entitlement_manifest_org, modul @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_add_rh_repo_by_id_and_create_filter( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Associate Red Hat content to a content view and create filter @@ -963,24 +1024,24 @@ def test_positive_add_rh_repo_by_id_and_create_filter( :BZ: 1359665 """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] name = gen_string('alphanumeric') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( {'content-view-id': new_cv['id'], 'inclusion': 'true', 'name': name, 'type': 'rpm'} ) - ContentView.filter.rule.create( + module_target_sat.cli.ContentView.filter.rule.create( {'content-view-filter': name, 'content-view-id': new_cv['id'], 'name': 'walgrind'} ) @@ -1016,10 +1077,10 @@ def test_positive_add_module_stream_filter_rule(self, module_org, target_sat): 0 ] content_view = entities.ContentView(organization=module_org.id, repository=[repo]).create() - walrus_stream = ModuleStream.list({'search': "name=walrus, stream=5.21"})[0] - content_view = ContentView.info({'id': content_view.id}) + walrus_stream = target_sat.cli.ModuleStream.list({'search': "name=walrus, stream=5.21"})[0] + content_view = target_sat.cli.ContentView.info({'id': content_view.id}) assert content_view['yum-repositories'][0]['name'] == repo.name - content_view_filter = ContentView.filter.create( + content_view_filter = target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -1027,18 +1088,20 @@ def test_positive_add_module_stream_filter_rule(self, module_org, target_sat): 'type': 'modulemd', } ) - content_view_filter_rule = ContentView.filter.rule.create( + content_view_filter_rule = target_sat.cli.ContentView.filter.rule.create( { 'content-view-filter': filter_name, 'content-view-id': content_view['id'], 'module-stream-ids': walrus_stream['id'], } ) - filter_info = ContentView.filter.info({'id': content_view_filter['filter-id']}) + filter_info = target_sat.cli.ContentView.filter.info( + {'id': content_view_filter['filter-id']} + ) assert filter_info['rules'][0]['id'] == content_view_filter_rule['rule-id'] @pytest.mark.tier2 - def test_positive_add_custom_repo_by_id(self, module_org, module_product): + def test_positive_add_custom_repo_by_id(self, module_org, module_product, module_target_sat): """Associate custom content to a Content view :id: b813b222-b984-47e0-8d9b-2daa43f9a221 @@ -1051,18 +1114,22 @@ def test_positive_add_custom_repo_by_id(self, module_org, module_product): :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] @pytest.mark.tier1 - def test_positive_add_custom_repo_by_name(self, module_org, module_product): + def test_positive_add_custom_repo_by_name(self, module_org, module_product, module_target_sat): """Associate custom content to a content view with name :id: 62431e11-bec6-4444-abb0-e3758ba25fd8 @@ -1073,13 +1140,15 @@ def test_positive_add_custom_repo_by_name(self, module_org, module_product): :BZ: 1343006 """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV with names. - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'name': new_cv['name'], 'organization': module_org.name, @@ -1087,11 +1156,13 @@ def test_positive_add_custom_repo_by_name(self, module_org, module_product): 'repository': new_repo['name'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] @pytest.mark.tier2 - def test_negative_add_component_in_non_composite_cv(self, module_org, module_product): + def test_negative_add_component_in_non_composite_cv( + self, module_org, module_product, module_target_sat + ): """attempt to associate components in a non-composite content view @@ -1104,25 +1175,31 @@ def test_negative_add_component_in_non_composite_cv(self, module_org, module_pro :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create component CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Fetch version id - cv_version = ContentView.version_list({'content-view-id': new_cv['id']}) + cv_version = module_target_sat.cli.ContentView.version_list( + {'content-view-id': new_cv['id']} + ) # Create non-composite CV - with pytest.raises(cli_factory.CLIFactoryError): - cli_factory.make_content_view( + with pytest.raises(CLIFactoryError): + module_target_sat.cli_factory.make_content_view( {'component-ids': cv_version[0]['id'], 'organization-id': module_org.id} ) @pytest.mark.tier2 - def test_negative_add_same_yum_repo_twice(self, module_org, module_product): + def test_negative_add_same_yum_repo_twice(self, module_org, module_product, module_target_sat): """attempt to associate the same repo multiple times within a content view @@ -1134,25 +1211,31 @@ def test_negative_add_same_yum_repo_twice(self, module_org, module_product): :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] repos_length = len(new_cv['yum-repositories']) # Re-associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == repos_length @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_promote_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to promote a content view containing RH content @@ -1167,29 +1250,31 @@ def test_positive_promote_rh_content( :CaseLevel: Integration """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) - env1 = cli_factory.make_lifecycle_environment( + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) + env1 = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': env1['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) environment = {'id': env1['id'], 'name': env1['name']} assert environment in new_cv['lifecycle-environments'] @pytest.mark.run_in_one_thread @pytest.mark.tier3 def test_positive_promote_rh_and_custom_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to promote a content view containing RH content and custom content using filters @@ -1205,26 +1290,31 @@ def test_positive_promote_rh_and_custom_content( :CaseLevel: Integration """ # Create custom repo - new_repo = cli_factory.make_repository( + new_repo = module_target_sat.cli_factory.make_repository( { - 'product-id': cli_factory.make_product( + 'content-type': 'yum', + 'product-id': module_target_sat.cli_factory.make_product( {'organization-id': module_entitlement_manifest_org.id} - )['id'] + )['id'], } ) # Sync custom repo - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repos with CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - cvf = cli_factory.make_content_view_filter( + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'false', 'type': 'rpm'} ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'min-version': 5, @@ -1232,22 +1322,22 @@ def test_positive_promote_rh_and_custom_content( } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) - env1 = cli_factory.make_lifecycle_environment( + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) + env1 = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': env1['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) environment = {'id': env1['id'], 'name': env1['name']} assert environment in new_cv['lifecycle-environments'] @pytest.mark.build_sanity @pytest.mark.tier2 - def test_positive_promote_custom_content(self, module_org, module_product): + def test_positive_promote_custom_content(self, module_org, module_product, module_target_sat): """attempt to promote a content view containing custom content :id: 64c2f1c2-7443-4836-a108-060b913ad2b1 @@ -1261,32 +1351,38 @@ def test_positive_promote_custom_content(self, module_org, module_product): :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': environment['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': environment['id'], 'name': environment['name']} in new_cv[ 'lifecycle-environments' ] @pytest.mark.tier2 - def test_positive_promote_ccv(self, module_org, module_product): + def test_positive_promote_ccv(self, module_org, module_product, module_target_sat): """attempt to promote a content view containing custom content :id: 9d31113d-39ec-4524-854c-7f03b0f028fe @@ -1302,44 +1398,52 @@ def test_positive_promote_ccv(self, module_org, module_product): :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Create CV - con_view = cli_factory.make_content_view( + con_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': con_view['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': con_view['id']} + ) # Publish a new version of CV - ContentView.publish({'id': con_view['id']}) + module_target_sat.cli.ContentView.publish({'id': con_view['id']}) # As version info is populated after publishing only - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) # Promote the Published version of CV to the next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': con_view['versions'][0]['id'], 'to-lifecycle-environment-id': environment['id'], } ) - con_view = ContentView.info({'id': con_view['id']}) + con_view = module_target_sat.cli.ContentView.info({'id': con_view['id']}) assert {'id': environment['id'], 'name': environment['name']} in con_view[ 'lifecycle-environments' ] @pytest.mark.tier2 - def test_negative_promote_default_cv(self, module_org): + def test_negative_promote_default_cv(self, module_org, module_target_sat): """attempt to promote a default content view :id: ef25a4d9-8852-4d2c-8355-e9b07eb0560b @@ -1350,19 +1454,25 @@ def test_negative_promote_default_cv(self, module_org): :CaseLevel: Integration """ - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) print("Hello, the org ID is currently", module_org.id) - result = ContentView.list({'organization-id': module_org.id}, per_page=False) + result = module_target_sat.cli.ContentView.list( + {'organization-id': module_org.id}, per_page=False + ) content_view = random.choice([cv for cv in result if cv['name'] == constants.DEFAULT_CV]) - cvv = ContentView.version_list({'content-view-id': content_view['content-view-id']})[0] + cvv = module_target_sat.cli.ContentView.version_list( + {'content-view-id': content_view['content-view-id']} + )[0] # Promote the Default CV to the next env with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': environment['id']} ) @pytest.mark.tier2 - def test_negative_promote_with_invalid_lce(self, module_org, module_product): + def test_negative_promote_with_invalid_lce(self, module_org, module_product, module_target_sat): """attempt to promote a content view using an invalid environment @@ -1375,20 +1485,24 @@ def test_negative_promote_with_invalid_lce(self, module_org, module_product): :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'product-id': module_product.id, 'content-type': 'yum'} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Promote the Published version of CV, # to the previous env which is Library with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': new_cv['versions'][0]['id'], 'to-lifecycle-environment-id': new_cv['lifecycle-environments'][0]['id'], @@ -1401,7 +1515,7 @@ def test_negative_promote_with_invalid_lce(self, module_org, module_product): @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_publish_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to publish a content view containing RH content @@ -1416,14 +1530,16 @@ def test_positive_publish_rh_content( :CaseLevel: Integration """ # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == module_rhel_content['name'] assert new_cv['versions'][0]['version'] == '1.0' @@ -1432,7 +1548,7 @@ def test_positive_publish_rh_content( @pytest.mark.pit_server @pytest.mark.tier3 def test_positive_publish_rh_and_custom_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """attempt to publish a content view containing a RH and custom repos and has filters @@ -1448,26 +1564,31 @@ def test_positive_publish_rh_and_custom_content( :CaseLevel: Integration """ # Create custom repo - new_repo = cli_factory.make_repository( + new_repo = module_target_sat.cli_factory.make_repository( { - 'product-id': cli_factory.make_product( + 'content-type': 'yum', + 'product-id': module_target_sat.cli_factory.make_product( {'organization-id': module_entitlement_manifest_org.id} - )['id'] + )['id'], } ) # Sync custom repo - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repos with CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': module_rhel_content['id']}) - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) - cvf = cli_factory.make_content_view_filter( + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) + cvf = module_target_sat.cli_factory.make_content_view_filter( {'content-view-id': new_cv['id'], 'inclusion': 'false', 'type': 'rpm'} ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'min-version': 5, @@ -1475,15 +1596,15 @@ def test_positive_publish_rh_and_custom_content( } ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {module_rhel_content['name'], new_repo['name']}.issubset( {repo['name'] for repo in new_cv['yum-repositories']} ) assert new_cv['versions'][0]['version'] == '1.0' @pytest.mark.tier2 - def test_positive_publish_custom_content(self, module_org, module_product): + def test_positive_publish_custom_content(self, module_org, module_product, module_target_sat): """attempt to publish a content view containing custom content :id: 84158023-3980-45c6-87d8-faacea3c942f @@ -1496,21 +1617,25 @@ def test_positive_publish_custom_content(self, module_org, module_product): :CaseLevel: Integration """ - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['yum-repositories'][0]['name'] == new_repo['name'] assert new_cv['versions'][0]['version'] == '1.0' @pytest.mark.tier2 - def test_positive_publish_custom_major_minor_cv_version(self): + def test_positive_publish_custom_major_minor_cv_version(self, module_target_sat): """CV can published with custom major and minor versions :id: 6697cd22-253a-4bdc-a108-7e0af22caaf4 @@ -1528,14 +1653,16 @@ def test_positive_publish_custom_major_minor_cv_version(self): :CaseLevel: System """ - org = cli_factory.make_org() + org = module_target_sat.cli_factory.make_org() major = random.randint(1, 1000) minor = random.randint(1, 1000) - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'name': gen_string('alpha'), 'organization-id': org['id']} ) - ContentView.publish({'id': content_view['id'], 'major': major, 'minor': minor}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish( + {'id': content_view['id'], 'major': major, 'minor': minor} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0]['version'] assert cvv.split('.')[0] == str(major) assert cvv.split('.')[1] == str(minor) @@ -1545,7 +1672,9 @@ def test_positive_publish_custom_major_minor_cv_version(self): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_publish_custom_content_module_stream(self, module_org, module_product): + def test_positive_publish_custom_content_module_stream( + self, module_org, module_product, module_target_sat + ): """attempt to publish a content view containing custom content module streams @@ -1560,7 +1689,7 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ :CaseLevel: Integration """ - software_repo = cli_factory.make_repository( + software_repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', @@ -1568,7 +1697,7 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ } ) - animal_repo = cli_factory.make_repository( + animal_repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', @@ -1577,26 +1706,40 @@ def test_positive_publish_custom_content_module_stream(self, module_org, module_ ) # Sync REPO's - Repository.synchronize({'id': animal_repo['id']}) - Repository.synchronize({'id': software_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': animal_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': software_repo['id']}) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': animal_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': animal_repo['id']} + ) # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv_version_1 = ContentView.info({'id': new_cv['id']})['versions'][0] - module_streams = ModuleStream.list({'content-view-version-id': (new_cv_version_1['id'])}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv_version_1 = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][ + 0 + ] + module_streams = module_target_sat.cli.ModuleStream.list( + {'content-view-version-id': (new_cv_version_1['id'])} + ) assert len(module_streams) > 6 # Publish another new version of CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': software_repo['id']}) - ContentView.publish({'id': new_cv['id']}) - new_cv_version_2 = ContentView.info({'id': new_cv['id']})['versions'][1] - module_streams = ModuleStream.list({'content-view-version-id': new_cv_version_2['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': software_repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv_version_2 = module_target_sat.cli.ContentView.info({'id': new_cv['id']})['versions'][ + 1 + ] + module_streams = module_target_sat.cli.ModuleStream.list( + {'content-view-version-id': new_cv_version_2['id']} + ) assert len(module_streams) > 13 @pytest.mark.tier2 - def test_positive_republish_after_content_removed(self, module_org, module_product): + def test_positive_republish_after_content_removed( + self, module_org, module_product, module_target_sat + ): """Attempt to re-publish content view after all associated content were removed from that CV @@ -1616,9 +1759,11 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ :CaseLevel: Integration """ # Create new Yum repository - yum_repo = cli_factory.make_repository({'product-id': module_product.id}) + yum_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Create new Docker repository - docker_repo = cli_factory.make_repository( + docker_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'quay/busybox', @@ -1628,9 +1773,9 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ ) # Sync all three repos for repo_id in [yum_repo['id'], docker_repo['id']]: - Repository.synchronize({'id': repo_id}) + module_target_sat.cli.Repository.synchronize({'id': repo_id}) # Create CV with different content types - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( { 'organization-id': module_org.id, 'repository-ids': [yum_repo['id'], docker_repo['id']], @@ -1643,27 +1788,29 @@ def test_positive_republish_after_content_removed(self, module_org, module_produ ]: assert len(new_cv[repo_type]) == 1 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Remove content from CV for repo_id in [yum_repo['id'], docker_repo['id']]: - ContentView.remove_repository({'id': new_cv['id'], 'repository-id': repo_id}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': new_cv['id'], 'repository-id': repo_id} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) for repo_type in [ 'yum-repositories', 'container-image-repositories', ]: assert len(new_cv[repo_type]) == 0 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 2 @pytest.mark.run_in_one_thread @pytest.mark.tier2 def test_positive_republish_after_rh_content_removed( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to re-publish content view after all RH associated content was removed from that CV @@ -1683,36 +1830,36 @@ def test_positive_republish_after_rh_content_removed( :CaseImportance: Medium """ - new_cv = cli_factory.make_content_view( + new_cv = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) # Associate repo to CV - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': new_cv['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 1 # Publish a new version of CV - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 1 # Remove content from CV - ContentView.remove_repository( + module_target_sat.cli.ContentView.remove_repository( {'id': new_cv['id'], 'repository-id': module_rhel_content['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['yum-repositories']) == 0 # Publish a new version of CV once more - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert len(new_cv['versions']) == 2 @pytest.mark.tier2 - def test_positive_publish_ccv(self, module_org, module_product): + def test_positive_publish_ccv(self, module_org, module_product, module_target_sat): """attempt to publish a composite content view containing custom content @@ -1726,37 +1873,45 @@ def test_positive_publish_ccv(self, module_org, module_product): :CaseLevel: Integration """ - repository = cli_factory.make_repository({'product-id': module_product.id}) + repository = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': repository['id']}) + module_target_sat.cli.Repository.synchronize({'id': repository['id']}) # Create CV - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) # Associate repo to CV - ContentView.add_repository({'id': content_view['id'], 'repository-id': repository['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repository['id']} + ) # Publish a new version of CV - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) # Let us now store the version1 id version1_id = content_view['versions'][0]['id'] # Create composite CV - composite_cv = cli_factory.make_content_view( + composite_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) # Associate version to composite CV - ContentView.add_version({'content-view-version-id': version1_id, 'id': composite_cv['id']}) + module_target_sat.cli.ContentView.add_version( + {'content-view-version-id': version1_id, 'id': composite_cv['id']} + ) # Assert whether version was associated to composite CV - composite_cv = ContentView.info({'id': composite_cv['id']}) + composite_cv = module_target_sat.cli.ContentView.info({'id': composite_cv['id']}) assert composite_cv['components'][0]['id'] == version1_id # Publish a new version of CV - ContentView.publish({'id': composite_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': composite_cv['id']}) # Assert whether Version1 was created and exists in Library Env. - composite_cv = ContentView.info({'id': composite_cv['id']}) + composite_cv = module_target_sat.cli.ContentView.info({'id': composite_cv['id']}) assert composite_cv['lifecycle-environments'][0]['name'] == constants.ENVIRONMENT assert composite_cv['versions'][0]['version'] == '1.0' @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_update_version_once(self, module_org, module_product): + def test_positive_update_version_once(self, module_org, module_product, module_target_sat): # Dev notes: # If Dev has version x, then when I promote version y into # Dev, version x goes away (ie when I promote version 1 to Dev, @@ -1782,54 +1937,60 @@ def test_positive_update_version_once(self, module_org, module_product): :CaseImportance: Critical """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version1 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Actual assert for this test happens HERE # Test whether the version1 now belongs to Library - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert constants.ENVIRONMENT in [env['label'] for env in version1['lifecycle-environments']] # Promotion of version1 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version1_id, 'to-lifecycle-environment-id': environment['id']} ) # The only way to validate whether env has the version is to # validate that version has the env. - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert environment['id'] in [env['id'] for env in version1['lifecycle-environments']] # Now Publish version2 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version2 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) new_cv['versions'].sort(key=lambda version: version['id']) # Let us now store the version2 id version2_id = new_cv['versions'][1]['id'] # Test whether the version2 now belongs to Library - version2 = ContentView.version_info({'id': version2_id}) + version2 = module_target_sat.cli.ContentView.version_info({'id': version2_id}) assert constants.ENVIRONMENT in [env['label'] for env in version2['lifecycle-environments']] # Promotion of version2 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version2_id, 'to-lifecycle-environment-id': environment['id']} ) # Actual assert for this test happens here. # Test whether the version2 now belongs to next env - version2 = ContentView.version_info({'id': version2_id}) + version2 = module_target_sat.cli.ContentView.version_info({'id': version2_id}) assert environment['id'] in [env['id'] for env in version2['lifecycle-environments']] @pytest.mark.tier2 - def test_positive_update_version_multiple(self, module_org, module_product): + def test_positive_update_version_multiple(self, module_org, module_product, module_target_sat): # Dev notes: # Similarly when I publish version y, version x goes away from # Library (ie when I publish version 2, version 1 disappears) @@ -1853,61 +2014,69 @@ def test_positive_update_version_multiple(self, module_org, module_product): :CaseLevel: Integration """ # Create REPO - new_repo = cli_factory.make_repository({'product-id': module_product.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': module_product.id} + ) # Sync REPO - Repository.synchronize({'id': new_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) # Create lce - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Create CV - new_cv = cli_factory.make_content_view({'organization-id': module_org.id}) + new_cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV - ContentView.add_repository({'id': new_cv['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': new_cv['id'], 'repository-id': new_repo['id']} + ) # Publish a version1 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # Only after we publish version1 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) # Let us now store the version1 id version1_id = new_cv['versions'][0]['id'] # Test whether the version1 now belongs to Library - version = ContentView.version_info({'id': version1_id}) + version = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert constants.ENVIRONMENT in [env['label'] for env in version['lifecycle-environments']] # Promotion of version1 to Dev env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version1_id, 'to-lifecycle-environment-id': environment['id']} ) # The only way to validate whether env has the version is to # validate that version has the env. # Test whether the version1 now belongs to next env - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert environment['id'] in [env['id'] for env in version1['lifecycle-environments']] # Now Publish version2 of CV - ContentView.publish({'id': new_cv['id']}) + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) # As per Dev Notes: # Similarly when I publish version y, version x goes away from Library. # Actual assert for this test happens here. # Test that version1 does not exist in Library after publishing v2 - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert len(version1['lifecycle-environments']) == 1 assert constants.ENVIRONMENT not in [ env['label'] for env in version1['lifecycle-environments'] ] # Only after we publish version2 the info is populated. - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) new_cv['versions'].sort(key=lambda version: version['id']) # Let us now store the version2 id version2_id = new_cv['versions'][1]['id'] # Promotion of version2 to next env - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': version2_id, 'to-lifecycle-environment-id': environment['id']} ) # Actual assert for this test happens here. # Test that version1 does not exist in any/next env after, # promoting version2 to next env - version1 = ContentView.version_info({'id': version1_id}) + version1 = module_target_sat.cli.ContentView.version_info({'id': version1_id}) assert len(version1['lifecycle-environments']) == 0 @pytest.mark.tier2 - def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): + def test_positive_auto_update_composite_to_latest_cv_version( + self, module_org, module_target_sat + ): """Ensure that composite content view component is auto updated to the latest content view version. @@ -1933,15 +2102,17 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): :CaseImportance: High """ - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 version_1_id = content_view['versions'][0]['id'] - composite_cv = cli_factory.make_content_view( + composite_cv = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) - ContentView.component_add( + module_target_sat.cli.ContentView.component_add( { 'composite-content-view-id': composite_cv['id'], 'component-content-view-id': content_view['id'], @@ -1949,21 +2120,25 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): } ) # Ensure that version 1 is in composite content view components - components = ContentView.component_list({'composite-content-view-id': composite_cv['id']}) + components = module_target_sat.cli.ContentView.component_list( + {'composite-content-view-id': composite_cv['id']} + ) assert len(components) == 1 component_id = components[0]['content-view-id'] assert components[0]['version-id'] == f'{version_1_id} (Latest)' assert components[0]['current-version'] == '1.0' # Publish the content view a second time - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 2 content_view['versions'].sort(key=lambda version: version['id']) # Ensure that composite content view component has been updated to # version 2 version_2_id = content_view['versions'][1]['id'] assert version_1_id != version_2_id - components = ContentView.component_list({'composite-content-view-id': composite_cv['id']}) + components = module_target_sat.cli.ContentView.component_list( + {'composite-content-view-id': composite_cv['id']} + ) assert len(components) == 1 # Ensure that this is the same component that is updated assert component_id == components[0]['content-view-id'] @@ -1971,7 +2146,7 @@ def test_positive_auto_update_composite_to_latest_cv_version(self, module_org): assert components[0]['current-version'] == '2.0' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id(self, module_org): + def test_positive_subscribe_chost_by_id(self, module_org, module_target_sat): """Attempt to subscribe content host to content view :id: db0bfd9d-3150-427e-9683-a68af33813e7 @@ -1982,15 +2157,21 @@ def test_positive_subscribe_chost_by_id(self, module_org): :CaseLevel: System """ - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -1998,13 +2179,13 @@ def test_positive_subscribe_chost_by_id(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.run_in_one_thread @pytest.mark.tier3 def test_positive_subscribe_chost_by_id_using_rh_content( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to subscribe content host to content view that has Red Hat repository assigned to it @@ -2018,28 +2199,30 @@ def test_positive_subscribe_chost_by_id_using_rh_content( :CaseImportance: Medium """ - env = cli_factory.make_lifecycle_environment( + env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['yum-repositories'][0]['name'] == module_rhel_content['name'] - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2047,14 +2230,14 @@ def test_positive_subscribe_chost_by_id_using_rh_content( 'organization-id': module_entitlement_manifest_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.run_in_one_thread @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( - self, module_entitlement_manifest_org, module_rhel_content + self, module_entitlement_manifest_org, module_rhel_content, module_target_sat ): """Attempt to subscribe content host to filtered content view that has Red Hat repository assigned to it @@ -2070,24 +2253,24 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( :CaseImportance: Low """ - env = cli_factory.make_lifecycle_environment( + env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_entitlement_manifest_org.id} ) - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_entitlement_manifest_org.id} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': module_rhel_content['id'], } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['yum-repositories'][0]['name'] == module_rhel_content['name'] name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -2096,7 +2279,7 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( } ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter': name, 'content-view-id': content_view['id'], @@ -2104,15 +2287,17 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2120,11 +2305,13 @@ def test_positive_subscribe_chost_by_id_using_rh_content_and_filters( 'organization-id': module_entitlement_manifest_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): + def test_positive_subscribe_chost_by_id_using_custom_content( + self, module_org, module_target_sat + ): """Attempt to subscribe content host to content view that has custom repository assigned to it @@ -2137,12 +2324,18 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): :CaseImportance: High """ - new_product = cli_factory.make_product({'organization-id': module_org.id}) - new_repo = cli_factory.make_repository({'product-id': new_product['id']}) - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': new_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + new_product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + new_repo = module_target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': new_product['id']} + ) + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2150,15 +2343,17 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2166,11 +2361,11 @@ def test_positive_subscribe_chost_by_id_using_custom_content(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 - def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): + def test_positive_subscribe_chost_by_id_using_ccv(self, module_org, module_target_sat): """Attempt to subscribe content host to composite content view :id: 4be340c0-9e58-4b96-ab37-d7e3b12c724f @@ -2182,19 +2377,23 @@ def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): :CaseLevel: System """ - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( {'composite': True, 'organization-id': module_org.id} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': env['id']} + ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '0' - cli_factory.make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': env['id'], @@ -2202,7 +2401,7 @@ def test_positive_subscribe_chost_by_id_using_ccv(self, module_org): 'organization-id': module_org.id, } ) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert content_view['content-host-count'] == '1' @pytest.mark.tier3 @@ -2274,12 +2473,12 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( 'Architecture': ['view_architectures'], } # Create a location and organization - loc = cli_factory.make_location() - default_loc = Location.info({'name': constants.DEFAULT_LOC}) - org = cli_factory.make_org() - Org.add_location({'id': org['id'], 'location-id': loc['id']}) + loc = target_sat.cli_factory.make_location() + default_loc = target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) + org = target_sat.cli_factory.make_org() + target_sat.cli.Org.add_location({'id': org['id'], 'location-id': loc['id']}) # Create a non admin user, for the moment without any permissions - user = cli_factory.make_user( + user = target_sat.cli_factory.make_user( { 'admin': False, 'default-organization-id': org['id'], @@ -2291,9 +2490,9 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( } ) # Create a new role - role = cli_factory.make_role() + role = target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -2313,40 +2512,42 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - cli_factory.make_filter({'role-id': role['id'], 'permissions': permission_names}) + target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises(CLIReturnCodeError) as context: - Role.with_user(user_name, user_password).info({'id': role['id']}) + target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) assert 'Access denied' in str(context) # Create a lifecycle environment - env = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) # Create a product - product = cli_factory.make_product({'organization-id': org['id']}) + product = target_sat.cli_factory.make_product({'organization-id': org['id']}) # Create a yum repository and synchronize - repo = cli_factory.make_repository( - {'product-id': product['id'], 'url': settings.repos.yum_1.url} + repo = target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': product['id'], 'url': settings.repos.yum_1.url} ) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view, add the yum repository and publish - content_view = cli_factory.make_content_view({'organization-id': org['id']}) - ContentView.add_repository( + content_view = target_sat.cli_factory.make_content_view({'organization-id': org['id']}) + target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': repo['id']} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) # assert that the content view has been published and has versions assert len(content_view['versions']) > 0 content_view_version = content_view['versions'][0] # Promote the content view version to the created environment - ContentView.version_promote( + target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': env['id']} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( + user_content_view = target_sat.cli.ContentView.with_user(user_name, user_password).info( {'id': content_view['id']} ) # assert that this is the same content view @@ -2361,7 +2562,7 @@ def test_positive_sub_host_with_restricted_user_perm_at_custom_loc( ) assert rhel7_contenthost.subscribed # check that the client host exist in the system - org_hosts = Host.list({'organization-id': org['id']}) + org_hosts = target_sat.cli.Host.list({'organization-id': org['id']}) assert len(org_hosts) == 1 assert org_hosts[0]['name'] == rhel7_contenthost.hostname @@ -2428,11 +2629,11 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( 'Architecture': ['view_architectures'], } # Create organization - loc = Location.info({'name': constants.DEFAULT_LOC}) - org = cli_factory.make_org() - Org.add_location({'id': org['id'], 'location-id': loc['id']}) + loc = target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) + org = target_sat.cli_factory.make_org() + target_sat.cli.Org.add_location({'id': org['id'], 'location-id': loc['id']}) # Create a non admin user, for the moment without any permissions - user = cli_factory.make_user( + user = target_sat.cli_factory.make_user( { 'admin': False, 'default-organization-id': org['id'], @@ -2444,9 +2645,9 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( } ) # Create a new role - role = cli_factory.make_role() + role = target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -2466,40 +2667,42 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - cli_factory.make_filter({'role-id': role['id'], 'permissions': permission_names}) + target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises(CLIReturnCodeError) as context: - Role.with_user(user_name, user_password).info({'id': role['id']}) + target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) assert '403 Forbidden' in str(context) # Create a lifecycle environment - env = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) + env = target_sat.cli_factory.make_lifecycle_environment({'organization-id': org['id']}) # Create a product - product = cli_factory.make_product({'organization-id': org['id']}) + product = target_sat.cli_factory.make_product({'organization-id': org['id']}) # Create a yum repository and synchronize - repo = cli_factory.make_repository( - {'product-id': product['id'], 'url': settings.repos.yum_1.url} + repo = target_sat.cli_factory.make_repository( + {'content-type': 'yum', 'product-id': product['id'], 'url': settings.repos.yum_1.url} ) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view, add the yum repository and publish - content_view = cli_factory.make_content_view({'organization-id': org['id']}) - ContentView.add_repository( + content_view = target_sat.cli_factory.make_content_view({'organization-id': org['id']}) + target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': repo['id']} ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) # assert that the content view has been published and has versions assert len(content_view['versions']) > 0 content_view_version = content_view['versions'][0] # Promote the content view version to the created environment - ContentView.version_promote( + target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': env['id']} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( + user_content_view = target_sat.cli.ContentView.with_user(user_name, user_password).info( {'id': content_view['id']} ) # assert that this is the same content view @@ -2514,12 +2717,12 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( ) assert rhel7_contenthost.subscribed # check that the client host exist in the system - org_hosts = Host.list({'organization-id': org['id']}) + org_hosts = target_sat.cli.Host.list({'organization-id': org['id']}) assert len(org_hosts) == 1 assert org_hosts[0]['name'] == rhel7_contenthost.hostname @pytest.mark.tier1 - def test_positive_clone_by_id(self, module_org): + def test_positive_clone_by_id(self, module_org, module_target_sat): """Clone existing content view by id :id: e3b63e6e-0964-45fb-a765-e1885c0ecbdd @@ -2529,13 +2732,17 @@ def test_positive_clone_by_id(self, module_org): :CaseImportance: Critical """ cloned_cv_name = gen_string('alpha') - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - new_cv = ContentView.info({'id': new_cv['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['name'] == cloned_cv_name @pytest.mark.tier1 - def test_positive_clone_by_name(self, module_org): + def test_positive_clone_by_name(self, module_org, module_target_sat): """Clone existing content view by name :id: b4c94286-ebbe-4e4c-a1df-22cb7055984d @@ -2547,19 +2754,21 @@ def test_positive_clone_by_name(self, module_org): :CaseImportance: Critical """ cloned_cv_name = gen_string('alpha') - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - new_cv = ContentView.copy( + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + new_cv = module_target_sat.cli.ContentView.copy( { 'name': content_view['name'], 'organization-id': module_org.id, 'new-name': cloned_cv_name, } )[0] - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert new_cv['name'] == cloned_cv_name @pytest.mark.tier2 - def test_positive_clone_within_same_env(self, module_org): + def test_positive_clone_within_same_env(self, module_org, module_target_sat): """Attempt to create, publish and promote new content view based on existing view within the same environment as the original content view @@ -2573,22 +2782,32 @@ def test_positive_clone_within_same_env(self, module_org): :CaseImportance: High """ cloned_cv_name = gen_string('alpha') - lc_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': lc_env['id'], 'name': lc_env['name']} in new_cv['lifecycle-environments'] @pytest.mark.tier2 - def test_positive_clone_with_diff_env(self, module_org): + def test_positive_clone_with_diff_env(self, module_org, module_target_sat): """Attempt to create, publish and promote new content view based on existing view but promoted to a different environment @@ -2602,21 +2821,31 @@ def test_positive_clone_with_diff_env(self, module_org): :CaseLevel: Integration """ cloned_cv_name = gen_string('alpha') - lc_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lc_env_cloned = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lc_env_cloned = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']}) - new_cv = ContentView.copy({'id': content_view['id'], 'new-name': cloned_cv_name})[0] - ContentView.publish({'id': new_cv['id']}) - new_cv = ContentView.info({'id': new_cv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env['id']} + ) + new_cv = module_target_sat.cli.ContentView.copy( + {'id': content_view['id'], 'new-name': cloned_cv_name} + )[0] + module_target_sat.cli.ContentView.publish({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) cvv = new_cv['versions'][0] - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': lc_env_cloned['id']} ) - new_cv = ContentView.info({'id': new_cv['id']}) + new_cv = module_target_sat.cli.ContentView.info({'id': new_cv['id']}) assert {'id': lc_env_cloned['id'], 'name': lc_env_cloned['name']} in new_cv[ 'lifecycle-environments' ] @@ -2657,7 +2886,9 @@ def test_positive_restart_dynflow_publish(self): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): + def test_positive_remove_renamed_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove version of renamed content view from Library environment :id: aa9bbfda-72e8-45ec-b26d-fdf2691980cf @@ -2678,37 +2909,43 @@ def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): :CaseImportance: Low """ new_name = gen_string('alpha') - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': custom_yum_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) # ensure that the published content version is in Library environment - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] assert constants.ENVIRONMENT in _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) # rename the content view - ContentView.update({'id': content_view['id'], 'new-name': new_name}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.update({'id': content_view['id'], 'new-name': new_name}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert new_name == content_view['name'] # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2718,14 +2955,16 @@ def test_positive_remove_renamed_cv_version_from_default_env(self, module_org): # ensure that the published content version is not in Library # environment assert constants.ENVIRONMENT not in _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove promoted content view version from Library environment :id: 6643837a-560a-47de-aa4d-90778914dcfa @@ -2747,34 +2986,42 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - custom_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + custom_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': custom_yum_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce_dev['id']} ) # ensure that the published content version is in Library and DEV # environments - content_view_version_info = ContentView.version_info( + content_view_version_info = module_target_sat.cli.ContentView.version_info( { 'organization-id': module_org.id, 'content-view-id': content_view['id'], @@ -2786,7 +3033,7 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): } assert {constants.ENVIRONMENT, lce_dev['name']} == content_view_version_lce_names # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2794,7 +3041,7 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): } ) # ensure content view version not in Library and only in DEV - content_view_version_info = ContentView.version_info( + content_view_version_info = module_target_sat.cli.ContentView.version_info( { 'organization-id': module_org.id, 'content-view-id': content_view['id'], @@ -2807,7 +3054,9 @@ def test_positive_remove_promoted_cv_version_from_default_env(self, module_org): assert {lce_dev['name']} == content_view_version_lce_names @pytest.mark.tier2 - def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_qe_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove QE promoted content view version from Library environment :id: e286697f-4113-40a3-b8e8-9ca50647e6d5 @@ -2828,12 +3077,16 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -2842,21 +3095,25 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) - ContentView.add_repository( + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': docker_repository['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV and QE @@ -2865,9 +3122,11 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or constants.ENVIRONMENT, lce_dev['name'], lce_qe['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2877,14 +3136,16 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(self, module_or # ensure content view version is not in Library and only in DEV and QE # environments assert {lce_dev['name'], lce_qe['name']} == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_org): + def test_positive_remove_prod_promoted_cv_version_from_default_env( + self, module_org, module_target_sat + ): """Remove PROD promoted content view version from Library environment :id: ffe3d64e-c3d2-4889-9454-ccc6b10f4db7 @@ -2905,24 +3166,30 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ :CaseLevel: Integration """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -2931,22 +3198,26 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE and @@ -2956,9 +3227,11 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ lce_dev['name'], lce_qe['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from Library lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -2971,13 +3244,15 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(self, module_ lce_dev['name'], lce_qe['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) @pytest.mark.tier2 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_cv_version_from_env(self, module_org): + def test_positive_remove_cv_version_from_env(self, module_org, module_target_sat): """Remove promoted content view version from environment :id: 577757ac-b184-4ece-9310-182dd5ceb718 @@ -3001,27 +3276,33 @@ def test_positive_remove_cv_version_from_env(self, module_org): :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3030,22 +3311,26 @@ def test_positive_remove_cv_version_from_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, @@ -3056,9 +3341,11 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from PROD lifecycle environment - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3072,9 +3359,11 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_dev['name'], lce_qe['name'], lce_stage['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # promote content view version to PROD environment again - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce_prod['id']} ) assert { @@ -3083,13 +3372,15 @@ def test_positive_remove_cv_version_from_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) @pytest.mark.tier3 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_remove_cv_version_from_multi_env(self, module_org): + def test_positive_remove_cv_version_from_multi_env(self, module_org, module_target_sat): """Remove promoted content view version from multiple environment :id: 997cfd7d-9029-47e2-a41e-84f4370b5ce5 @@ -3109,27 +3400,33 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3138,22 +3435,26 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, @@ -3164,11 +3465,13 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): lce_qe['name'], lce_stage['name'], lce_prod['name'], - } == _get_content_view_version_lce_names_set(content_view['id'], content_view_version['id']) + } == _get_content_view_version_lce_names_set( + content_view['id'], content_view_version['id'], sat=module_target_sat + ) # remove content view version from QE, STAGE, PROD lifecycle # environments for lce in [lce_qe, lce_stage, lce_prod]: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3178,11 +3481,11 @@ def test_positive_remove_cv_version_from_multi_env(self, module_org): # ensure content view version is not in PROD and only in Library, DEV, # QE and STAGE environments assert {constants.ENVIRONMENT, lce_dev['name']} == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) @pytest.mark.tier3 - def test_positive_delete_cv_promoted_to_multi_env(self, module_org): + def test_positive_delete_cv_promoted_to_multi_env(self, module_org, module_target_sat): """Delete published content view with version promoted to multiple environments @@ -3204,27 +3507,33 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): :CaseImportance: High """ - lce_dev = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - lce_qe = cli_factory.make_lifecycle_environment( + lce_dev = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + lce_qe = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_dev['name']} ) - lce_stage = cli_factory.make_lifecycle_environment( + lce_stage = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_qe['name']} ) - lce_prod = cli_factory.make_lifecycle_environment( + lce_prod = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': lce_stage['name']} ) - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3233,28 +3542,32 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - content_view_versions = ContentView.info({'id': content_view['id']})['versions'] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_versions = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ] assert len(content_view_versions) > 0 content_view_version = content_view_versions[-1] for lce in [lce_dev, lce_qe, lce_stage, lce_prod]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view_version['id'], 'to-lifecycle-environment-id': lce['id']} ) # ensure that the published content version is in Library, DEV, QE, # STAGE and PROD environments promoted_lce_names_set = _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) assert { constants.ENVIRONMENT, @@ -3265,7 +3578,7 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): } == promoted_lce_names_set # remove from all promoted lifecycle environments for lce_name in promoted_lce_names_set: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3273,12 +3586,12 @@ def test_positive_delete_cv_promoted_to_multi_env(self, module_org): } ) # ensure content view in content views list - content_views = ContentView.list({'organization-id': module_org.id}) + content_views = module_target_sat.cli.ContentView.list({'organization-id': module_org.id}) assert content_view['name'] in [cv['name'] for cv in content_views] # delete the content view - ContentView.delete({'id': content_view['id']}) + module_target_sat.cli.ContentView.delete({'id': content_view['id']}) # ensure the content view is not in content views list - content_views = ContentView.list({'organization-id': module_org.id}) + content_views = module_target_sat.cli.ContentView.list({'organization-id': module_org.id}) assert content_view['name'] not in [cv['name'] for cv in content_views] @pytest.mark.stubbed @@ -3367,7 +3680,7 @@ def test_positive_delete_cv_multi_env_promoted_with_host_registered(self): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) def test_positive_remove_cv_version_from_multi_env_capsule_scenario( - self, module_org, capsule_configured + self, module_org, capsule_configured, module_target_sat ): """Remove promoted content view version from multiple environment, with satellite setup to use capsule @@ -3405,41 +3718,47 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( """ # Note: This test case requires complete external capsule # configuration. - dev_env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - qe_env = cli_factory.make_lifecycle_environment( + dev_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + qe_env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': dev_env['name']} ) - prod_env = cli_factory.make_lifecycle_environment( + prod_env = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'prior': qe_env['name']} ) - capsule = Capsule().info({'name': capsule_configured.hostname}) + capsule = module_target_sat.cli.Capsule().info({'name': capsule_configured.hostname}) # Add all environments to capsule environments = {constants.ENVIRONMENT, dev_env['name'], qe_env['name'], prod_env['name']} for env_name in environments: - Capsule.content_add_lifecycle_environment( + module_target_sat.cli.Capsule.content_add_lifecycle_environment( { 'id': capsule['id'], 'organization-id': module_org.id, 'environment': env_name, } ) - capsule_environments = Capsule.content_lifecycle_environments( + capsule_environments = module_target_sat.cli.Capsule.content_lifecycle_environments( {'id': capsule['id'], 'organization-id': module_org.id} ) capsule_environments_names = {env['name'] for env in capsule_environments} assert environments == capsule_environments_names # Setup a yum repo - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - docker_product = cli_factory.make_product({'organization-id': module_org.id}) - docker_repository = cli_factory.make_repository( + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + docker_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': constants.CONTAINER_UPSTREAM_NAME, @@ -3448,10 +3767,12 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( 'url': constants.CONTAINER_REGISTRY_HUB, } ) - Repository.synchronize({'id': docker_repository['id']}) - content_view = cli_factory.make_content_view({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': docker_repository['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_org.id} + ) for repo in [custom_yum_repo, docker_repository]: - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3459,11 +3780,13 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( } ) # Publish the content view - ContentView.publish({'id': content_view['id']}) - content_view_version = ContentView.info({'id': content_view['id']})['versions'][-1] + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view_version = module_target_sat.cli.ContentView.info({'id': content_view['id']})[ + 'versions' + ][-1] # Promote the content view to DEV, QE, PROD for env in [dev_env, qe_env, prod_env]: - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': content_view_version['id'], 'organization-id': module_org.id, @@ -3471,8 +3794,10 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( } ) # Synchronize the capsule content - Capsule.content_synchronize({'id': capsule['id'], 'organization-id': module_org.id}) - capsule_content_info = Capsule.content_info( + module_target_sat.cli.Capsule.content_synchronize( + {'id': capsule['id'], 'organization-id': module_org.id} + ) + capsule_content_info = module_target_sat.cli.Capsule.content_info( {'id': capsule['id'], 'organization-id': module_org.id} ) # Ensure that all environments exists in capsule content @@ -3502,7 +3827,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( pytest.skip('Power control host workflow is not defined') # Remove the content view version from Library and DEV environments for lce_name in [constants.ENVIRONMENT, dev_env['name']]: - ContentView.remove_from_environment( + module_target_sat.cli.ContentView.remove_from_environment( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -3513,7 +3838,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( # DEV and exist only in QE and PROD environments_with_cv = {qe_env['name'], prod_env['name']} assert environments_with_cv == _get_content_view_version_lce_names_set( - content_view['id'], content_view_version['id'] + content_view['id'], content_view_version['id'], sat=module_target_sat ) # Resume the capsule with ensure True to ping the virtual machine try: @@ -3522,7 +3847,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( pytest.skip('Power control host workflow is not defined') # Assert that in capsule content the content view version # does not exit in Library and DEV and exist only in QE and PROD - capsule_content_info = Capsule.content_info( + capsule_content_info = module_target_sat.cli.Capsule.content_info( {'id': capsule['id'], 'organization-id': module_org.id} ) capsule_content_info_lces = capsule_content_info['lifecycle-environments'] @@ -3541,7 +3866,7 @@ def test_positive_remove_cv_version_from_multi_env_capsule_scenario( assert content_view['name'] not in capsule_content_info_lce_cvs_names @pytest.mark.tier2 - def test_negative_user_with_no_create_view_cv_permissions(self, module_org): + def test_negative_user_with_no_create_view_cv_permissions(self, module_org, module_target_sat): """Unauthorized users are not able to create/view content views :id: 17617893-27c2-4cb2-a2ed-47378ef90e7a @@ -3554,24 +3879,26 @@ def test_negative_user_with_no_create_view_cv_permissions(self, module_org): :CaseImportance: Critical """ password = gen_alphanumeric() - no_rights_user = cli_factory.make_user({'password': password}) + no_rights_user = module_target_sat.cli_factory.make_user({'password': password}) no_rights_user['password'] = password - org_id = cli_factory.make_org(cached=True)['id'] + org_id = module_target_sat.cli_factory.make_org(cached=True)['id'] for name in generate_strings_list(exclude_types=['cjk']): # test that user can't create with pytest.raises(CLIReturnCodeError): - ContentView.with_user(no_rights_user['login'], no_rights_user['password']).create( - {'name': name, 'organization-id': org_id} - ) + module_target_sat.cli.ContentView.with_user( + no_rights_user['login'], no_rights_user['password'] + ).create({'name': name, 'organization-id': org_id}) # test that user can't read - con_view = cli_factory.make_content_view({'name': name, 'organization-id': org_id}) + con_view = module_target_sat.cli_factory.make_content_view( + {'name': name, 'organization-id': org_id} + ) with pytest.raises(CLIReturnCodeError): - ContentView.with_user(no_rights_user['login'], no_rights_user['password']).info( - {'id': con_view['id']} - ) + module_target_sat.cli.ContentView.with_user( + no_rights_user['login'], no_rights_user['password'] + ).info({'id': con_view['id']}) @pytest.mark.tier2 - def test_negative_user_with_read_only_cv_permission(self, module_org): + def test_negative_user_with_read_only_cv_permission(self, module_org, module_target_sat): """Read-only user is able to view content view :id: 588f57b5-9855-4c14-80d0-64b617c6b6dc @@ -3588,12 +3915,14 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) password = gen_string('alphanumeric') - user = cli_factory.make_user({'password': password}) - role = cli_factory.make_role() - cli_factory.make_filter( + user = module_target_sat.cli_factory.make_user({'password': password}) + role = module_target_sat.cli_factory.make_role() + module_target_sat.cli_factory.make_filter( { 'organization-ids': module_org.id, 'permissions': 'view_content_views', @@ -3601,26 +3930,28 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): 'override': 1, } ) - User.add_role({'id': user['id'], 'role-id': role['id']}) - ContentView.with_user(user['login'], password).info({'id': cv['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).info({'id': cv['id']}) # Verify read-only user can't either edit CV with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).update( + module_target_sat.cli.ContentView.with_user(user['login'], password).update( {'id': cv['id'], 'new-name': gen_string('alphanumeric')} ) # or create a new one with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).create( + module_target_sat.cli.ContentView.with_user(user['login'], password).create( {'name': gen_string('alphanumeric'), 'organization-id': module_org.id} ) # or publish with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).publish({'id': cv['id']}) - ContentView.publish({'id': cv['id']}) - cvv = ContentView.info({'id': cv['id']})['versions'][-1] + module_target_sat.cli.ContentView.with_user(user['login'], password).publish( + {'id': cv['id']} + ) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + cvv = module_target_sat.cli.ContentView.info({'id': cv['id']})['versions'][-1] # or promote with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user['login'], password).version_promote( + module_target_sat.cli.ContentView.with_user(user['login'], password).version_promote( { 'id': cvv['id'], 'organization-id': module_org.id, @@ -3629,22 +3960,22 @@ def test_negative_user_with_read_only_cv_permission(self, module_org): ) # or create a product with pytest.raises(CLIReturnCodeError): - Product.with_user(user['login'], password).create( + module_target_sat.cli.Product.with_user(user['login'], password).create( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) # cannot create activation key with pytest.raises(CLIReturnCodeError): - ActivationKey.with_user(user['login'], password).create( + module_target_sat.cli.ActivationKey.with_user(user['login'], password).create( {'name': gen_string('alpha'), 'organization-id': module_org.id} ) # cannot create host collection with pytest.raises(CLIReturnCodeError): - HostCollection.with_user(user['login'], password).create( + module_target_sat.cli.HostCollection.with_user(user['login'], password).create( {'organization-id': module_org.id} ) @pytest.mark.tier2 - def test_positive_user_with_all_cv_permissions(self, module_org): + def test_positive_user_with_all_cv_permissions(self, module_org, module_target_sat): """A user with all content view permissions is able to create, read, modify, promote, publish content views @@ -3661,57 +3992,63 @@ def test_positive_user_with_all_cv_permissions(self, module_org): :CaseImportance: Critical """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) - environment = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + environment = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) password = gen_string('alphanumeric') - user = cli_factory.make_user({'password': password, 'organization-ids': module_org.id}) - role = cli_factory.make_role({'organization-ids': module_org.id}) + user = module_target_sat.cli_factory.make_user( + {'password': password, 'organization-ids': module_org.id} + ) + role = module_target_sat.cli_factory.make_role({'organization-ids': module_org.id}) # note: the filters inherit role organizations - cli_factory.make_filter( + module_target_sat.cli_factory.make_filter( {'permissions': constants.PERMISSIONS['Katello::ContentView'], 'role-id': role['id']} ) - cli_factory.make_filter( + module_target_sat.cli_factory.make_filter( {'permissions': constants.PERMISSIONS['Katello::KTEnvironment'], 'role-id': role['id']} ) - User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # Make sure user is not admin and has only expected roles assigned - user = User.info({'id': user['id']}) + user = module_target_sat.cli.User.info({'id': user['id']}) assert user['admin'] == 'no' assert set(user['roles']) == {role['name']} # Verify user can either edit CV - ContentView.with_user(user['login'], password).info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).info({'id': cv['id']}) new_name = gen_string('alphanumeric') - ContentView.with_user(user['login'], password).update( + module_target_sat.cli.ContentView.with_user(user['login'], password).update( {'id': cv['id'], 'new-name': new_name} ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['name'] == new_name # or create a new one new_cv_name = gen_string('alphanumeric') - new_cv = ContentView.with_user(user['login'], password).create( + new_cv = module_target_sat.cli.ContentView.with_user(user['login'], password).create( {'name': new_cv_name, 'organization-id': module_org.id} ) assert new_cv['name'] == new_cv_name # or publish - ContentView.with_user(user['login'], password).publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(user['login'], password).publish( + {'id': cv['id']} + ) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 1 # or promote - ContentView.with_user(user['login'], password).version_promote( + module_target_sat.cli.ContentView.with_user(user['login'], password).version_promote( { 'id': cv['versions'][-1]['id'], 'organization-id': module_org.id, 'to-lifecycle-environment-id': environment['id'], } ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert environment['id'] in [env['id'] for env in cv['lifecycle-environments']] @pytest.mark.tier3 @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_inc_update_no_lce(self, module_org, module_product): + def test_positive_inc_update_no_lce(self, module_org, module_product, module_target_sat): """Publish incremental update without providing lifecycle environment for a content view version not promoted to any lifecycle environment @@ -3727,25 +4064,25 @@ def test_positive_inc_update_no_lce(self, module_org, module_product): :CaseLevel: Integration """ - repo = cli_factory.make_repository( + repo = module_target_sat.cli_factory.make_repository( { 'product-id': module_product.id, 'content-type': 'yum', 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': repo['id']}) - content_view = cli_factory.make_content_view( + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': repo['id']} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - cvf = cli_factory.make_content_view_filter( + cvf = module_target_sat.cli_factory.make_content_view_filter( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -3753,25 +4090,25 @@ def test_positive_inc_update_no_lce(self, module_org, module_product): 'type': 'rpm', }, ) - cli_factory.make_content_view_filter_rule( + module_target_sat.cli_factory.content_view_filter_rule( { 'content-view-filter-id': cvf['filter-id'], 'name': FAKE_2_CUSTOM_PACKAGE_NAME, 'version': 5.21, } ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cvv = content_view['versions'][0] - result = ContentView.version_incremental_update( + result = module_target_sat.cli.ContentView.version_incremental_update( {'content-view-version-id': cvv['id'], 'errata-ids': settings.repos.yum_1.errata[1]} ) # Inc update output format is pretty weird - list of dicts where each # key's value is actual line from stdout result = [line.strip() for line_dict in result for line in line_dict.values()] assert FAKE_2_CUSTOM_PACKAGE not in [line.strip() for line in result] - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert '1.1' in [cvv_['version'] for cvv_ in content_view['versions']] @@ -3788,11 +4125,11 @@ def make_file_repository_upload_contents( if options is None: options = {'product-id': module_product.id, 'content-type': 'file'} if not options.get('content-type'): - raise cli_factory.CLIFactoryError('Please provide a valid Content Type.') - new_repo = cli_factory.make_repository(options) + raise CLIFactoryError('Please provide a valid Content Type.') + new_repo = satellite.cli_factory.make_repository(options) remote_path = f'/tmp/{constants.RPM_TO_UPLOAD}' satellite.put(DataFile.RPM_TO_UPLOAD, remote_path) - Repository.upload_content( + satellite.cli.Repository.upload_content( { 'name': new_repo['name'], 'organization-id': module_org.id, @@ -3800,13 +4137,15 @@ def make_file_repository_upload_contents( 'product-id': new_repo['product']['id'], } ) - new_repo = Repository.info({'id': new_repo['id']}) + new_repo = satellite.cli.Repository.info({'id': new_repo['id']}) assert int(new_repo['content-counts']['files']) > 0 return new_repo @pytest.mark.skip_if_open('BZ:1610309') @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_addition( + self, module_org, module_product, module_target_sat + ): """Check a File Repository with Arbitrary File can be added to a Content View @@ -3831,11 +4170,11 @@ def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, :BZ: 1610309, 1908465 """ repo = self.make_file_repository_upload_contents( - module_org, module_product, satellite=target_sat + module_org, module_product, satellite=module_target_sat ) - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Associate repo to CV with names. - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'name': cv['name'], 'organization-id': module_org.id, @@ -3843,12 +4182,14 @@ def test_positive_arbitrary_file_repo_addition(self, module_org, module_product, 'repository': repo['name'], } ) - cv = ContentView.info({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.info({'id': cv['id']}) assert cv['file-repositories'][0]['name'] == repo['name'] @pytest.mark.skip_if_open('BZ:1908465') @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_removal(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_removal( + self, module_org, module_product, module_target_sat + ): """Check a File Repository with Arbitrary File can be removed from a Content View @@ -3871,14 +4212,16 @@ def test_positive_arbitrary_file_repo_removal(self, module_org, module_product, :BZ: 1908465 """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) repo = self.make_file_repository_upload_contents( - module_org, module_product, satellite=target_sat + module_org, module_product, satellite=module_target_sat ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': cv['id'], 'repository-id': repo['id'], 'organization-id': module_org.id} ) - ContentView.remove_repository({'id': cv['id'], 'repository-id': repo['id']}) + module_target_sat.cli.ContentView.remove_repository( + {'id': cv['id'], 'repository-id': repo['id']} + ) assert cv['file-repositories'][0]['id'] != repo['id'] @pytest.mark.stubbed @@ -3909,7 +4252,9 @@ def test_positive_arbitrary_file_sync_over_capsule(self): """ @pytest.mark.tier3 - def test_positive_arbitrary_file_repo_promotion(self, module_org, module_product, target_sat): + def test_positive_arbitrary_file_repo_promotion( + self, module_org, module_product, module_target_sat + ): """Check arbitrary files availability for Content view version after content-view promotion. @@ -3935,20 +4280,24 @@ def test_positive_arbitrary_file_repo_promotion(self, module_org, module_product :CaseImportance: High """ - cv = cli_factory.make_content_view({'organization-id': module_org.id}) + cv = module_target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) repo = self.make_file_repository_upload_contents( - module_product, module_product, satellite=target_sat + module_product, module_product, satellite=module_target_sat ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': cv['id'], 'repository-id': repo['id'], 'organization-id': module_org.id} ) - env = cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) - ContentView.publish({'id': cv['id']}) - content_view_info = ContentView.version_info({'content-view-id': cv['id'], 'version': 1}) - ContentView.version_promote( + env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view_info = module_target_sat.cli.ContentView.version_info( + {'content-view-id': cv['id'], 'version': 1} + ) + module_target_sat.cli.ContentView.version_promote( {'id': content_view_info['id'], 'to-lifecycle-environment-id': env['id']} ) - expected_repo = ContentView.version_info( + expected_repo = module_target_sat.cli.ContentView.version_info( { 'content-view-id': cv['id'], 'lifecycle-environment': env['name'], @@ -3980,7 +4329,7 @@ def test_positive_katello_repo_rpms_max_int(self, target_sat): assert 'id|bigint' in result.stdout.splitlines()[3].replace(' ', '') @pytest.mark.tier3 - def test_positive_inc_update_should_not_fail(self, module_org): + def test_positive_inc_update_should_not_fail(self, module_org, module_target_sat): """Incremental update after removing a package should not give a 400 error code :BZ: 2041497 @@ -3991,41 +4340,43 @@ def test_positive_inc_update_should_not_fail(self, module_org): :expectedresults: Incremental update is successful """ - custom_yum_product = cli_factory.make_product({'organization-id': module_org.id}) - custom_yum_repo = cli_factory.make_repository( + custom_yum_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id} + ) + custom_yum_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'yum', 'product-id': custom_yum_product['id'], 'url': settings.repos.yum_1.url, } ) - Repository.synchronize({'id': custom_yum_repo['id']}) - repo = Repository.info({'id': custom_yum_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': custom_yum_repo['id']}) assert repo['content-counts']['packages'] == '32' # grab and remove the 'bear' package - package = Package.list({'repository-id': repo['id']})[0] - Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) - repo = Repository.info({'id': repo['id']}) + package = module_target_sat.cli.Package.list({'repository-id': repo['id']})[0] + module_target_sat.cli.Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '31' - content_view = cli_factory.make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': repo['id']} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - Repository.synchronize({'id': custom_yum_repo['id']}) - repo = Repository.info({'id': custom_yum_repo['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.Repository.synchronize({'id': custom_yum_repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': custom_yum_repo['id']}) assert repo['content-counts']['packages'] == '32' - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv = content_view['versions'][0] - result = ContentView.version_incremental_update( + result = module_target_sat.cli.ContentView.version_incremental_update( {'content-view-version-id': cvv['id'], 'errata-ids': settings.repos.yum_1.errata[0]} ) assert result[2] - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert '1.1' in [cvv_['version'] for cvv_ in content_view['versions']] diff --git a/tests/foreman/cli/test_contentviewfilter.py b/tests/foreman/cli/test_contentviewfilter.py index c19091df054..dac2b0d0a6a 100644 --- a/tests/foreman/cli/test_contentviewfilter.py +++ b/tests/foreman/cli/test_contentviewfilter.py @@ -21,12 +21,9 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import make_content_view, make_repository -from robottelo.cli.repository import Repository from robottelo.constants import CONTAINER_REGISTRY_HUB +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_values_list, parametrized, @@ -35,15 +32,17 @@ @pytest.fixture(scope='module') -def sync_repo(module_org, module_product): - repo = make_repository({'organization-id': module_org.id, 'product-id': module_product.id}) - Repository.synchronize({'id': repo['id']}) +def sync_repo(module_org, module_product, module_target_sat): + repo = module_target_sat.cli_factory.make_repository( + {'organization-id': module_org.id, 'product-id': module_product.id} + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) return repo @pytest.fixture -def content_view(module_org, sync_repo): - return make_content_view( +def content_view(module_org, sync_repo, module_target_sat): + return module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'repository-ids': [sync_repo['id']]} ) @@ -55,7 +54,7 @@ class TestContentViewFilter: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.parametrize('filter_content_type', ['rpm', 'package_group', 'erratum', 'modulemd']) def test_positive_create_with_name_by_cv_id( - self, name, filter_content_type, module_org, content_view + self, name, filter_content_type, module_org, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter @@ -70,7 +69,7 @@ def test_positive_create_with_name_by_cv_id( :CaseImportance: Critical """ - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -78,14 +77,16 @@ def test_positive_create_with_name_by_cv_id( 'type': filter_content_type, }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) assert cvf['name'] == name assert cvf['type'] == filter_content_type @pytest.mark.tier1 @pytest.mark.parametrize('filter_content_type', ['rpm', 'package_group', 'erratum', 'modulemd']) def test_positive_create_with_content_type_by_cv_id( - self, filter_content_type, module_org, content_view + self, filter_content_type, module_org, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by id. Use different content types as a parameter @@ -100,7 +101,7 @@ def test_positive_create_with_content_type_by_cv_id( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -108,12 +109,16 @@ def test_positive_create_with_content_type_by_cv_id( 'type': filter_content_type, }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['type'] == filter_content_type @pytest.mark.tier1 @pytest.mark.parametrize('inclusion', ['true', 'false']) - def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, content_view): + def test_positive_create_with_inclusion_by_cv_id( + self, inclusion, module_org, content_view, module_target_sat + ): """Create new content view filter and assign it to existing content view by id. Use different inclusions as a parameter @@ -127,7 +132,7 @@ def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, co :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': inclusion, @@ -136,11 +141,15 @@ def test_positive_create_with_inclusion_by_cv_id(self, inclusion, module_org, co 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == inclusion @pytest.mark.tier1 - def test_positive_create_with_description_by_cv_id(self, module_org, content_view): + def test_positive_create_with_description_by_cv_id( + self, module_org, content_view, module_target_sat + ): """Create new content view filter with description and assign it to existing content view. @@ -153,7 +162,7 @@ def test_positive_create_with_description_by_cv_id(self, module_org, content_vie """ description = gen_string('utf8') cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'description': description, @@ -162,13 +171,15 @@ def test_positive_create_with_description_by_cv_id(self, module_org, content_vie 'type': 'package_group', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['description'] == description @pytest.mark.run_in_one_thread @pytest.mark.tier1 def test_positive_create_with_default_taxonomies( - self, module_org, module_location, content_view + self, module_org, module_location, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view by name. Use default organization and location to find necessary @@ -187,7 +198,7 @@ def test_positive_create_with_default_taxonomies( Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) Defaults.add({'param-name': 'location_id', 'param-value': module_location.id}) try: - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'name': name, @@ -195,14 +206,16 @@ def test_positive_create_with_default_taxonomies( 'inclusion': 'true', }, ) - cvf = ContentView.filter.info({'content-view': content_view['name'], 'name': name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view': content_view['name'], 'name': name} + ) assert cvf['name'] == name finally: Defaults.delete({'param-name': 'organization_id'}) Defaults.delete({'param-name': 'location_id'}) @pytest.mark.tier1 - def test_positive_list_by_name_and_org(self, module_org, content_view): + def test_positive_list_by_name_and_org(self, module_org, content_view, module_target_sat): """Create new content view filter and try to list it by its name and organization it belongs @@ -217,7 +230,7 @@ def test_positive_list_by_name_and_org(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -225,14 +238,14 @@ def test_positive_list_by_name_and_org(self, module_org, content_view): 'type': 'package_group', }, ) - cv_filters = ContentView.filter.list( + cv_filters = module_target_sat.cli.ContentView.filter.list( {'content-view': content_view['name'], 'organization': module_org.name} ) assert len(cv_filters) >= 1 assert cvf_name in [cvf['name'] for cvf in cv_filters] @pytest.mark.tier1 - def test_positive_create_by_cv_name(self, module_org, content_view): + def test_positive_create_by_cv_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization id for reference @@ -245,7 +258,7 @@ def test_positive_create_by_cv_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'true', @@ -254,10 +267,12 @@ def test_positive_create_by_cv_name(self, module_org, content_view): 'type': 'package_group', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_by_org_name(self, module_org, content_view): + def test_positive_create_by_org_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization name for reference @@ -268,7 +283,7 @@ def test_positive_create_by_org_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'false', @@ -277,10 +292,12 @@ def test_positive_create_by_org_name(self, module_org, content_view): 'type': 'erratum', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_by_org_label(self, module_org, content_view): + def test_positive_create_by_org_label(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by name. Use organization label for reference @@ -291,7 +308,7 @@ def test_positive_create_by_org_label(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view': content_view['name'], 'inclusion': 'true', @@ -300,10 +317,14 @@ def test_positive_create_by_org_label(self, module_org, content_view): 'type': 'erratum', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_view): + def test_positive_create_with_repo_by_id( + self, module_org, sync_repo, content_view, module_target_sat + ): """Create new content view filter and assign it to existing content view that has repository assigned to it. Use that repository id for proper filter assignment. @@ -316,7 +337,7 @@ def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_vi :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -326,14 +347,16 @@ def test_positive_create_with_repo_by_id(self, module_org, sync_repo, content_vi 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) # Check that only one, specified above, repository is displayed assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier1 def test_positive_create_with_repo_by_name( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and assign it to existing content view that has repository assigned to it. Use that repository name for @@ -349,7 +372,7 @@ def test_positive_create_with_repo_by_name( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'false', @@ -360,13 +383,15 @@ def test_positive_create_with_repo_by_name( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) # Check that only one, specified above, repository is displayed assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier1 - def test_positive_create_with_original_pkgs(self, sync_repo, content_view): + def test_positive_create_with_original_pkgs(self, sync_repo, content_view, module_target_sat): """Create new content view filter and assign it to existing content view that has repository assigned to it. Enable 'original packages' option for that filter @@ -379,7 +404,7 @@ def test_positive_create_with_original_pkgs(self, sync_repo, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -389,12 +414,14 @@ def test_positive_create_with_original_pkgs(self, sync_repo, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['repositories'][0]['name'] == sync_repo['name'] @pytest.mark.tier2 def test_positive_create_with_repos_yum_and_docker( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new docker repository and add to content view that has yum repo already assigned to it. Create new content view filter and assign @@ -406,7 +433,7 @@ def test_positive_create_with_repos_yum_and_docker( :expectedresults: Content view filter created successfully and has both repositories affected (yum and docker) """ - docker_repository = make_repository( + docker_repository = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'busybox', @@ -416,12 +443,12 @@ def test_positive_create_with_repos_yum_and_docker( }, ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( {'id': content_view['id'], 'repository-id': docker_repository['id']} ) repos = [sync_repo['id'], docker_repository['id']] cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -431,14 +458,18 @@ def test_positive_create_with_repos_yum_and_docker( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 2 for repo in cvf['repositories']: assert repo['id'] in repos @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name, module_org, content_view): + def test_negative_create_with_invalid_name( + self, name, module_org, content_view, module_target_sat + ): """Try to create content view filter using invalid names only :id: f3497a23-6e34-4fee-9964-f95762fc737c @@ -450,7 +481,7 @@ def test_negative_create_with_invalid_name(self, name, module_org, content_view) :CaseImportance: Low """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -460,7 +491,7 @@ def test_negative_create_with_invalid_name(self, name, module_org, content_view) ) @pytest.mark.tier1 - def test_negative_create_with_same_name(self, module_org, content_view): + def test_negative_create_with_same_name(self, module_org, content_view, module_target_sat): """Try to create content view filter using same name twice :id: 7e7444f4-e2b5-406d-a210-49b4008c88d9 @@ -470,7 +501,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): :CaseImportance: Low """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -479,7 +510,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -489,7 +520,7 @@ def test_negative_create_with_same_name(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_create_without_type(self, module_org, content_view): + def test_negative_create_without_type(self, module_org, content_view, module_target_sat): """Try to create content view filter without providing required parameter 'type' @@ -500,7 +531,7 @@ def test_negative_create_without_type(self, module_org, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': gen_string('utf8'), @@ -509,7 +540,7 @@ def test_negative_create_without_type(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_create_without_cv(self): + def test_negative_create_without_cv(self, module_target_sat): """Try to create content view filter without providing content view information which should be used as basis for filter @@ -520,10 +551,14 @@ def test_negative_create_without_cv(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create({'name': gen_string('utf8'), 'type': 'rpm'}) + module_target_sat.cli.ContentView.filter.create( + {'name': gen_string('utf8'), 'type': 'rpm'} + ) @pytest.mark.tier1 - def test_negative_create_with_invalid_repo_id(self, module_org, content_view): + def test_negative_create_with_invalid_repo_id( + self, module_org, content_view, module_target_sat + ): """Try to create content view filter using incorrect repository :id: 21fdbeca-ad0a-4e29-93dc-f850b5639f4f @@ -533,7 +568,7 @@ def test_negative_create_with_invalid_repo_id(self, module_org, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': gen_string('utf8'), @@ -545,7 +580,7 @@ def test_negative_create_with_invalid_repo_id(self, module_org, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) - def test_positive_update_name(self, new_name, module_org, content_view): + def test_positive_update_name(self, new_name, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to update that filter using different value types as a name @@ -562,7 +597,7 @@ def test_positive_update_name(self, new_name, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - cvf = ContentView.filter.create( + cvf = module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -570,19 +605,21 @@ def test_positive_update_name(self, new_name, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'id': cvf['filter-id'], 'new-name': new_name, } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': new_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': new_name} + ) assert cvf['name'] == new_name @pytest.mark.tier2 def test_positive_update_repo_with_same_type( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and apply it to existing content view that has repository assigned to it. Try to update that filter and @@ -598,7 +635,7 @@ def test_positive_update_repo_with_same_type( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -606,16 +643,20 @@ def test_positive_update_repo_with_same_type( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] - new_repo = make_repository( + new_repo = module_target_sat.cli_factory.make_repository( {'organization-id': module_org.id, 'product-id': module_product.id}, ) - ContentView.add_repository({'id': content_view['id'], 'repository-id': new_repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': new_repo['id']} + ) - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -623,7 +664,9 @@ def test_positive_update_repo_with_same_type( } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] != sync_repo['name'] assert cvf['repositories'][0]['name'] == new_repo['name'] @@ -631,7 +674,7 @@ def test_positive_update_repo_with_same_type( @pytest.mark.tier2 @pytest.mark.upgrade def test_positive_update_repo_with_different_type( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Create new content view filter and apply it to existing content view that has repository assigned to it. Try to update that filter and @@ -646,7 +689,7 @@ def test_positive_update_repo_with_different_type( :CaseLevel: Integration """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -654,10 +697,12 @@ def test_positive_update_repo_with_different_type( 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] == sync_repo['name'] - docker_repo = make_repository( + docker_repo = module_target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'busybox', @@ -666,21 +711,25 @@ def test_positive_update_repo_with_different_type( 'url': CONTAINER_REGISTRY_HUB, }, ) - ContentView.add_repository({'id': content_view['id'], 'repository-id': docker_repo['id']}) - ContentView.filter.update( + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': docker_repo['id']} + ) + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, 'repository-ids': docker_repo['id'], } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert len(cvf['repositories']) == 1 assert cvf['repositories'][0]['name'] != sync_repo['name'] assert cvf['repositories'][0]['name'] == docker_repo['name'] @pytest.mark.tier2 - def test_positive_update_inclusion(self, module_org, content_view): + def test_positive_update_inclusion(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to update that filter and assign opposite inclusion value for it @@ -693,7 +742,7 @@ def test_positive_update_inclusion(self, module_org, content_view): :CaseLevel: Integration """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -702,21 +751,25 @@ def test_positive_update_inclusion(self, module_org, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'true' - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, 'inclusion': 'false', } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'false' @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_with_name(self, new_name, content_view): + def test_negative_update_with_name(self, new_name, content_view, module_target_sat): """Try to update content view filter using invalid names only :id: 6c40e452-f786-4e28-9f03-b1935b55b33a @@ -730,11 +783,11 @@ def test_negative_update_with_name(self, new_name, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( {'content-view-id': content_view['id'], 'name': cvf_name, 'type': 'rpm'} ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -742,10 +795,12 @@ def test_negative_update_with_name(self, new_name, content_view): } ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': new_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': new_name} + ) @pytest.mark.tier1 - def test_negative_update_with_same_name(self, module_org, content_view): + def test_negative_update_with_same_name(self, module_org, content_view, module_target_sat): """Try to update content view filter using name of already existing entity @@ -756,7 +811,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -765,7 +820,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): }, ) new_name = gen_string('alpha', 100) - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': new_name, @@ -774,7 +829,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': new_name, @@ -783,7 +838,7 @@ def test_negative_update_with_same_name(self, module_org, content_view): ) @pytest.mark.tier1 - def test_negative_update_inclusion(self, module_org, content_view): + def test_negative_update_inclusion(self, module_org, content_view, module_target_sat): """Try to update content view filter and assign incorrect inclusion value for it @@ -794,7 +849,7 @@ def test_negative_update_inclusion(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'inclusion': 'true', @@ -804,18 +859,22 @@ def test_negative_update_inclusion(self, module_org, content_view): }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'inclusion': 'wrong_value', 'name': cvf_name, } ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) assert cvf['inclusion'] == 'true' @pytest.mark.tier1 - def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view): + def test_negative_update_with_non_existent_repo_id( + self, sync_repo, content_view, module_target_sat + ): """Try to update content view filter using non-existing repository ID :id: 457af8c2-fb32-4164-9e19-98676f4ea063 @@ -825,7 +884,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -834,7 +893,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view }, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -844,7 +903,7 @@ def test_negative_update_with_non_existent_repo_id(self, sync_repo, content_view @pytest.mark.tier1 def test_negative_update_with_invalid_repo_id( - self, module_org, module_product, sync_repo, content_view + self, module_org, module_product, sync_repo, content_view, module_target_sat ): """Try to update filter and assign repository which does not belong to filter content view @@ -856,7 +915,7 @@ def test_negative_update_with_invalid_repo_id( :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -864,11 +923,11 @@ def test_negative_update_with_invalid_repo_id( 'type': 'rpm', }, ) - new_repo = make_repository( + new_repo = module_target_sat.cli_factory.make_repository( {'organization-id': module_org.id, 'product-id': module_product.id}, ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.update( + module_target_sat.cli.ContentView.filter.update( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -878,7 +937,7 @@ def test_negative_update_with_invalid_repo_id( @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_delete_by_name(self, name, module_org, content_view): + def test_positive_delete_by_name(self, name, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using different value types as a name @@ -891,7 +950,7 @@ def test_positive_delete_by_name(self, name, module_org, content_view): :CaseImportance: Critical """ - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': name, @@ -899,14 +958,20 @@ def test_positive_delete_by_name(self, name, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) - ContentView.filter.delete({'content-view-id': content_view['id'], 'name': name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) + module_target_sat.cli.ContentView.filter.delete( + {'content-view-id': content_view['id'], 'name': name} + ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': name} + ) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_by_id(self, module_org, content_view): + def test_positive_delete_by_id(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using its id as a parameter @@ -917,7 +982,7 @@ def test_positive_delete_by_id(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -925,13 +990,17 @@ def test_positive_delete_by_id(self, module_org, content_view): 'type': 'rpm', }, ) - cvf = ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) - ContentView.filter.delete({'id': cvf['filter-id']}) + cvf = module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) + module_target_sat.cli.ContentView.filter.delete({'id': cvf['filter-id']}) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_positive_delete_by_org_name(self, module_org, content_view): + def test_positive_delete_by_org_name(self, module_org, content_view, module_target_sat): """Create new content view filter and assign it to existing content view by id. Try to delete that filter using organization and content view names where that filter was applied @@ -943,7 +1012,7 @@ def test_positive_delete_by_org_name(self, module_org, content_view): :CaseImportance: Critical """ cvf_name = gen_string('utf8') - ContentView.filter.create( + module_target_sat.cli.ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, @@ -951,8 +1020,10 @@ def test_positive_delete_by_org_name(self, module_org, content_view): 'type': 'rpm', }, ) - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) - ContentView.filter.delete( + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) + module_target_sat.cli.ContentView.filter.delete( { 'content-view': content_view['name'], 'name': cvf_name, @@ -960,10 +1031,12 @@ def test_positive_delete_by_org_name(self, module_org, content_view): } ) with pytest.raises(CLIReturnCodeError): - ContentView.filter.info({'content-view-id': content_view['id'], 'name': cvf_name}) + module_target_sat.cli.ContentView.filter.info( + {'content-view-id': content_view['id'], 'name': cvf_name} + ) @pytest.mark.tier1 - def test_negative_delete_by_name(self, content_view): + def test_negative_delete_by_name(self, content_view, module_target_sat): """Try to delete non-existent filter using generated name :id: 84509061-6652-4594-b68a-4566c04bc289 @@ -973,7 +1046,7 @@ def test_negative_delete_by_name(self, content_view): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - ContentView.filter.delete( + module_target_sat.cli.ContentView.filter.delete( {'content-view-id': content_view['id'], 'name': gen_string('utf8')} ) diff --git a/tests/foreman/cli/test_discoveryrule.py b/tests/foreman/cli/test_discoveryrule.py index efdc081d79f..b3ec46375e7 100644 --- a/tests/foreman/cli/test_discoveryrule.py +++ b/tests/foreman/cli/test_discoveryrule.py @@ -25,8 +25,7 @@ import pytest from requests import HTTPError -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError, make_discoveryrule +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.datafactory import ( filtered_datapoint, @@ -62,7 +61,7 @@ class TestDiscoveryRule: """Implements Foreman discovery Rules tests in CLI.""" @pytest.fixture(scope='function') - def discoveryrule_factory(self, class_org, class_location, class_hostgroup): + def discoveryrule_factory(self, class_org, class_location, class_hostgroup, class_target_sat): def _create_discoveryrule(org, loc, hostgroup, options=None): """Makes a new discovery rule and asserts its success""" options = options or {} @@ -89,7 +88,7 @@ def _create_discoveryrule(org, loc, hostgroup, options=None): # create a simple object from the dictionary that the CLI factory provides # This allows for consistent attributized access of all fixture entities in the tests - return Box(make_discoveryrule(options)) + return Box(class_target_sat.cli_factory.discoveryrule(options)) return partial( _create_discoveryrule, org=class_org, loc=class_location, hostgroup=class_hostgroup diff --git a/tests/foreman/cli/test_docker.py b/tests/foreman/cli/test_docker.py index 9e7ca7b5ed4..e745f44af8c 100644 --- a/tests/foreman/cli/test_docker.py +++ b/tests/foreman/cli/test_docker.py @@ -17,20 +17,6 @@ from fauxfactory import gen_string, gen_url import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.docker import Docker -from robottelo.cli.factory import ( - make_activation_key, - make_content_view, - make_lifecycle_environment, - make_product_wait, - make_repository, -) -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, @@ -38,6 +24,7 @@ CONTAINER_UPSTREAM_NAME, REPO_TYPE, ) +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_docker_upstream_names, parametrized, @@ -46,7 +33,7 @@ ) -def _repo(product_id, name=None, upstream_name=None, url=None): +def _repo(sat, product_id, name=None, upstream_name=None, url=None): """Creates a Docker-based repository. :param product_id: ID of the ``Product``. @@ -58,7 +45,7 @@ def _repo(product_id, name=None, upstream_name=None, url=None): CONTAINER_REGISTRY_HUB constant. :return: A ``Repository`` object. """ - return make_repository( + return sat.cli_factory.make_repository( { 'content-type': REPO_TYPE['docker'], 'docker-upstream-name': upstream_name or CONTAINER_UPSTREAM_NAME, @@ -69,39 +56,41 @@ def _repo(product_id, name=None, upstream_name=None, url=None): ) -def _content_view(repo_id, org_id): +def _content_view(sat, repo_id, org_id): """Create a content view and link it to the given repository.""" - content_view = make_content_view({'composite': False, 'organization-id': org_id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo_id}) - return ContentView.info({'id': content_view['id']}) + content_view = sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': org_id} + ) + sat.cli.ContentView.add_repository({'id': content_view['id'], 'repository-id': repo_id}) + return sat.cli.ContentView.info({'id': content_view['id']}) @pytest.fixture -def repo(module_product): - return _repo(module_product.id) +def repo(module_product, target_sat): + return _repo(target_sat, module_product.id) @pytest.fixture -def content_view(module_org, repo): - return _content_view(repo['id'], module_org.id) +def content_view(module_org, target_sat, repo): + return _content_view(target_sat, repo['id'], module_org.id) @pytest.fixture -def content_view_publish(content_view): - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - return ContentView.version_info({'id': content_view['versions'][0]['id']}) +def content_view_publish(content_view, target_sat): + target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = target_sat.cli.ContentView.info({'id': content_view['id']}) + return target_sat.cli.ContentView.version_info({'id': content_view['versions'][0]['id']}) @pytest.fixture -def content_view_promote(content_view_publish, module_lce): - ContentView.version_promote( +def content_view_promote(content_view_publish, module_lce, target_sat): + target_sat.cli.ContentView.version_promote( { 'id': content_view_publish['id'], 'to-lifecycle-environment-id': module_lce.id, } ) - return ContentView.version_info({'id': content_view_publish['id']}) + return target_sat.cli.ContentView.version_info({'id': content_view_publish['id']}) class TestDockerManifest: @@ -113,7 +102,7 @@ class TestDockerManifest: """ @pytest.mark.tier2 - def test_positive_read_docker_tags(self, repo): + def test_positive_read_docker_tags(self, repo, module_target_sat): """docker manifest displays tags information for a docker manifest :id: 59b605b5-ac2d-46e3-a85e-a259e78a07a8 @@ -125,18 +114,18 @@ def test_positive_read_docker_tags(self, repo): :BZ: 1658274 """ - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Grab all available manifests related to repository - manifests_list = Docker.manifest.list({'repository-id': repo['id']}) + manifests_list = module_target_sat.cli.Docker.manifest.list({'repository-id': repo['id']}) # Some manifests do not have tags associated with it, ignore those # because we want to check the tag information manifests = [m_iter for m_iter in manifests_list if not m_iter['tags'] == ''] assert manifests - tags_list = Docker.tag.list({'repository-id': repo['id']}) + tags_list = module_target_sat.cli.Docker.tag.list({'repository-id': repo['id']}) # Extract tag names for the repository out of docker tag list repo_tag_names = [tag['tag'] for tag in tags_list] for manifest in manifests: - manifest_info = Docker.manifest.info({'id': manifest['id']}) + manifest_info = module_target_sat.cli.Docker.manifest.info({'id': manifest['id']}) # Check that manifest's tag is listed in tags for the repository for t_iter in manifest_info['tags']: assert t_iter['name'] in repo_tag_names @@ -152,7 +141,7 @@ class TestDockerRepository: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_docker_repository_names())) - def test_positive_create_with_name(self, module_org, module_product, name): + def test_positive_create_with_name(self, module_product, name, module_target_sat): """Create one Docker-type repository :id: e82a36c8-3265-4c10-bafe-c7e07db3be78 @@ -164,13 +153,15 @@ def test_positive_create_with_name(self, module_org, module_product, name): :CaseImportance: Critical """ - repo = _repo(module_product.id, name) + repo = _repo(module_target_sat, module_product.id, name) assert repo['name'] == name assert repo['upstream-repository-name'] == CONTAINER_UPSTREAM_NAME assert repo['content-type'] == REPO_TYPE['docker'] @pytest.mark.tier2 - def test_positive_create_repos_using_same_product(self, module_org, module_product): + def test_positive_create_repos_using_same_product( + self, module_org, module_product, module_target_sat + ): """Create multiple Docker-type repositories :id: 6dd25cf4-f8b6-4958-976a-c116daf27b44 @@ -182,13 +173,15 @@ def test_positive_create_repos_using_same_product(self, module_org, module_produ """ repo_names = set() for _ in range(randint(2, 5)): - repo = _repo(module_product.id) + repo = _repo(module_target_sat, module_product.id) repo_names.add(repo['name']) - product = Product.info({'id': module_product.id, 'organization-id': module_org.id}) + product = module_target_sat.cli.Product.info( + {'id': module_product.id, 'organization-id': module_org.id} + ) assert repo_names.issubset({repo_['repo-name'] for repo_ in product['content']}) @pytest.mark.tier2 - def test_positive_create_repos_using_multiple_products(self, module_org): + def test_positive_create_repos_using_multiple_products(self, module_org, module_target_sat): """Create multiple Docker-type repositories on multiple products. @@ -201,16 +194,20 @@ def test_positive_create_repos_using_multiple_products(self, module_org): :CaseLevel: Integration """ for _ in range(randint(2, 5)): - product = make_product_wait({'organization-id': module_org.id}) + product = module_target_sat.cli_factory.make_product_wait( + {'organization-id': module_org.id} + ) repo_names = set() for _ in range(randint(2, 3)): - repo = _repo(product['id']) + repo = _repo(module_target_sat, product['id']) repo_names.add(repo['name']) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) assert repo_names == {repo_['repo-name'] for repo_ in product['content']} @pytest.mark.tier1 - def test_positive_sync(self, repo): + def test_positive_sync(self, repo, module_target_sat): """Create and sync a Docker-type repository :id: bff1d40e-181b-48b2-8141-8c86e0db62a2 @@ -221,13 +218,13 @@ def test_positive_sync(self, repo): :CaseImportance: Critical """ assert int(repo['content-counts']['container-image-manifests']) == 0 - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['container-image-manifests']) > 0 @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_docker_repository_names())) - def test_positive_update_name(self, repo, new_name): + def test_positive_update_name(self, repo, new_name, module_target_sat): """Create a Docker-type repository and update its name. :id: 8b3a8496-e9bd-44f1-916f-6763a76b9b1b @@ -239,13 +236,15 @@ def test_positive_update_name(self, repo, new_name): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'new-name': new_name, 'url': repo['url']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'new-name': new_name, 'url': repo['url']} + ) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['name'] == new_name @pytest.mark.tier1 @pytest.mark.parametrize('new_upstream_name', **parametrized(valid_docker_upstream_names())) - def test_positive_update_upstream_name(self, repo, new_upstream_name): + def test_positive_update_upstream_name(self, repo, new_upstream_name, module_target_sat): """Create a Docker-type repository and update its upstream name. :id: 1a6985ed-43ec-4ea6-ba27-e3870457ac56 @@ -257,19 +256,19 @@ def test_positive_update_upstream_name(self, repo, new_upstream_name): :CaseImportance: Critical """ - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': new_upstream_name, 'id': repo['id'], 'url': repo['url'], } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['upstream-repository-name'] == new_upstream_name @pytest.mark.tier1 @pytest.mark.parametrize('new_upstream_name', **parametrized(invalid_docker_upstream_names())) - def test_negative_update_upstream_name(self, repo, new_upstream_name): + def test_negative_update_upstream_name(self, repo, new_upstream_name, module_target_sat): """Attempt to update upstream name for a Docker-type repository. :id: 798651af-28b2-4907-b3a7-7c560bf66c7c @@ -283,7 +282,7 @@ def test_negative_update_upstream_name(self, repo, new_upstream_name): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError, match='Validation failed: Docker upstream name'): - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': new_upstream_name, 'id': repo['id'], @@ -293,7 +292,7 @@ def test_negative_update_upstream_name(self, repo, new_upstream_name): @pytest.mark.skip_if_not_set('docker') @pytest.mark.tier1 - def test_positive_create_with_long_upstream_name(self, module_product): + def test_positive_create_with_long_upstream_name(self, module_product, module_target_sat): """Create a docker repository with upstream name longer than 30 characters @@ -308,6 +307,7 @@ def test_positive_create_with_long_upstream_name(self, module_product): :CaseImportance: Critical """ repo = _repo( + module_target_sat, module_product.id, upstream_name=CONTAINER_RH_REGISTRY_UPSTREAM_NAME, url=settings.docker.external_registry_1, @@ -316,7 +316,7 @@ def test_positive_create_with_long_upstream_name(self, module_product): @pytest.mark.skip_if_not_set('docker') @pytest.mark.tier1 - def test_positive_update_with_long_upstream_name(self, repo): + def test_positive_update_with_long_upstream_name(self, repo, module_target_sat): """Create a docker repository and update its upstream name with longer than 30 characters value @@ -328,18 +328,18 @@ def test_positive_update_with_long_upstream_name(self, repo): :CaseImportance: Critical """ - Repository.update( + module_target_sat.cli.Repository.update( { 'docker-upstream-name': CONTAINER_RH_REGISTRY_UPSTREAM_NAME, 'id': repo['id'], 'url': settings.docker.external_registry_1, } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['upstream-repository-name'] == CONTAINER_RH_REGISTRY_UPSTREAM_NAME @pytest.mark.tier2 - def test_positive_update_url(self, repo): + def test_positive_update_url(self, repo, module_target_sat): """Create a Docker-type repository and update its URL. :id: 73caacd4-7f17-42a7-8d93-3dee8b9341fa @@ -348,12 +348,12 @@ def test_positive_update_url(self, repo): repository and that its URL can be updated. """ new_url = gen_url() - Repository.update({'id': repo['id'], 'url': new_url}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'url': new_url}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['url'] == new_url @pytest.mark.tier1 - def test_positive_delete_by_id(self, repo): + def test_positive_delete_by_id(self, repo, module_target_sat): """Create and delete a Docker-type repository :id: ab1e8228-92a8-45dc-a863-7181711f2745 @@ -363,12 +363,12 @@ def test_positive_delete_by_id(self, repo): :CaseImportance: Critical """ - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier2 - def test_positive_delete_random_repo_by_id(self, module_org): + def test_positive_delete_random_repo_by_id(self, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -378,22 +378,23 @@ def test_positive_delete_random_repo_by_id(self, module_org): without altering the other products. """ products = [ - make_product_wait({'organization-id': module_org.id}) for _ in range(randint(2, 5)) + module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + for _ in range(randint(2, 5)) ] repos = [] for product in products: for _ in range(randint(2, 3)): - repos.append(_repo(product['id'])) + repos.append(_repo(module_target_sat, product['id'])) # Select random repository and delete it repo = choice(repos) repos.remove(repo) - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) # Verify other repositories were not touched product_ids = [product['id'] for product in products] for repo in repos: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['product']['id'] in product_ids @@ -408,7 +409,7 @@ class TestDockerContentView: """ @pytest.mark.tier2 - def test_positive_add_docker_repo_by_id(self, module_org, repo): + def test_positive_add_docker_repo_by_id(self, module_org, repo, module_target_sat): """Add one Docker-type repository to a non-composite content view :id: 87d6c7bb-92f8-4a32-8ad2-2a1af896500b @@ -416,13 +417,17 @@ def test_positive_add_docker_repo_by_id(self, module_org, repo): :expectedresults: A repository is created with a Docker repository and the product is added to a non-composite content view """ - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert repo['id'] in [repo_['id'] for repo_ in content_view['container-image-repositories']] @pytest.mark.tier2 - def test_positive_add_docker_repos_by_id(self, module_org, module_product): + def test_positive_add_docker_repos_by_id(self, module_org, module_product, module_target_sat): """Add multiple Docker-type repositories to a non-composite CV. :id: 2eb19e28-a633-4c21-9469-75a686c83b34 @@ -431,18 +436,22 @@ def test_positive_add_docker_repos_by_id(self, module_org, module_product): repositories and the product is added to a non-composite content view. """ - repos = [_repo(module_product.id) for _ in range(randint(2, 5))] - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + repos = [_repo(module_target_sat, module_product.id) for _ in range(randint(2, 5))] + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for repo in repos: - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert {repo['id'] for repo in repos} == { repo['id'] for repo in content_view['container-image-repositories'] } @pytest.mark.tier2 - def test_positive_add_synced_docker_repo_by_id(self, module_org, repo): + def test_positive_add_synced_docker_repo_by_id(self, module_org, repo, module_target_sat): """Create and sync a Docker-type repository :id: 6f51d268-ed23-48ab-9dea-cd3571daa647 @@ -450,17 +459,23 @@ def test_positive_add_synced_docker_repo_by_id(self, module_org, repo): :expectedresults: A repository is created with a Docker repository and it is synchronized. """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['container-image-manifests']) > 0 - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert repo['id'] in [repo_['id'] for repo_ in content_view['container-image-repositories']] @pytest.mark.tier2 - def test_positive_add_docker_repo_by_id_to_ccv(self, module_org, content_view): + def test_positive_add_docker_repo_by_id_to_ccv( + self, module_org, content_view, module_target_sat + ): """Add one Docker-type repository to a composite content view :id: 8e2ef5ba-3cdf-4ef9-a22a-f1701e20a5d5 @@ -471,23 +486,27 @@ def test_positive_add_docker_repo_by_id_to_ccv(self, module_org, content_view): :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'id': comp_content_view['id'], 'component-ids': content_view['versions'][0]['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] @pytest.mark.tier2 - def test_positive_add_docker_repos_by_id_to_ccv(self, module_org, module_product): + def test_positive_add_docker_repos_by_id_to_ccv( + self, module_org, module_product, module_target_sat + ): """Add multiple Docker-type repositories to a composite content view. :id: b79cbc97-3dba-4059-907d-19316684d569 @@ -500,27 +519,33 @@ def test_positive_add_docker_repos_by_id_to_ccv(self, module_org, module_product """ cv_versions = [] for _ in range(randint(2, 5)): - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - repo = _repo(module_product.id) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + repo = _repo(module_target_sat, module_product.id) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 cv_versions.append(content_view['versions'][0]) - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': [cv_version['id'] for cv_version in cv_versions], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) comp_ids = [component['id'] for component in comp_content_view['components']] for cv_version in cv_versions: assert cv_version['id'] in comp_ids @pytest.mark.tier2 - def test_positive_publish_with_docker_repo(self, content_view): + def test_positive_publish_with_docker_repo(self, content_view, module_target_sat): """Add Docker-type repository to content view and publish it once. :id: 28480de3-ffb5-4b8e-8174-fffffeef6af4 @@ -530,12 +555,14 @@ def test_positive_publish_with_docker_repo(self, content_view): published only once. """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 @pytest.mark.tier2 - def test_positive_publish_with_docker_repo_composite(self, content_view, module_org): + def test_positive_publish_with_docker_repo_composite( + self, content_view, module_org, module_target_sat + ): """Add Docker-type repository to composite CV and publish it once. :id: 2d75419b-73ed-4f29-ae0d-9af8d9624c87 @@ -549,28 +576,30 @@ def test_positive_publish_with_docker_repo_composite(self, content_view, module_ """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert len(comp_content_view['versions']) == 1 @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo(self, content_view): + def test_positive_publish_multiple_with_docker_repo(self, content_view, module_target_sat): """Add Docker-type repository to content view and publish it multiple times. @@ -584,12 +613,14 @@ def test_positive_publish_multiple_with_docker_repo(self, content_view): publish_amount = randint(2, 5) for _ in range(publish_amount): - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == publish_amount @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo_composite(self, module_org, content_view): + def test_positive_publish_multiple_with_docker_repo_composite( + self, module_org, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it multiple times. @@ -604,30 +635,34 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org, """ assert len(content_view['versions']) == 0 - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] publish_amount = randint(2, 5) for _ in range(publish_amount): - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert len(comp_content_view['versions']) == publish_amount @pytest.mark.tier2 - def test_positive_promote_with_docker_repo(self, module_org, module_lce, content_view): + def test_positive_promote_with_docker_repo( + self, module_org, module_lce, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then promote it to the next available lifecycle-environment. @@ -636,20 +671,28 @@ def test_positive_promote_with_docker_repo(self, module_org, module_lce, content :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': module_lce.id}) - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': module_lce.id} + ) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 2 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_promote_multiple_with_docker_repo(self, module_org, content_view): + def test_positive_promote_multiple_with_docker_repo( + self, module_org, content_view, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then promote it to multiple available lifecycle-environments. @@ -658,26 +701,32 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org, content_vi :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - cvv = ContentView.version_info({'id': content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 lces = [ - make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(1, randint(3, 6)) ] for expected_lces, lce in enumerate(lces, start=2): - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) - cvv = ContentView.version_info({'id': cvv['id']}) + module_target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) + cvv = module_target_sat.cli.ContentView.version_info({'id': cvv['id']}) assert len(cvv['lifecycle-environments']) == expected_lces @pytest.mark.tier2 def test_positive_promote_with_docker_repo_composite( - self, module_org, module_lce, content_view + self, module_org, module_lce, content_view, module_target_sat ): """Add Docker-type repository to composite content view and publish it. Then promote it to the next available lifecycle-environment. @@ -689,39 +738,47 @@ def test_positive_promote_with_docker_repo_composite( :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': comp_content_view['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id, } ) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 2 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_promote_multiple_with_docker_repo_composite(self, content_view, module_org): + def test_positive_promote_multiple_with_docker_repo_composite( + self, content_view, module_org, module_target_sat + ): """Add Docker-type repository to composite content view and publish it. Then promote it to the multiple available lifecycle-environments. @@ -732,45 +789,51 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, content_view :BZ: 1359665 """ - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(content_view['versions']) == 1 - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view['versions'][0]['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view['versions'][0]['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) assert len(cvv['lifecycle-environments']) == 1 lces = [ - make_lifecycle_environment({'organization-id': module_org.id}) + module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) for _ in range(1, randint(3, 6)) ] for expected_lces, lce in enumerate(lces, start=2): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( { 'id': cvv['id'], 'to-lifecycle-environment-id': lce['id'], } ) - cvv = ContentView.version_info({'id': cvv['id']}) + cvv = module_target_sat.cli.ContentView.version_info({'id': cvv['id']}) assert len(cvv['lifecycle-environments']) == expected_lces @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_name_pattern_change(self, module_org): + def test_positive_name_pattern_change(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change registry name pattern for that environment. Verify that repository name on product changed according to new pattern. @@ -780,7 +843,9 @@ def test_positive_name_pattern_change(self, module_org): :expectedresults: Container repository name is changed according to new pattern. """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) pattern_prefix = gen_string('alpha', 5) docker_upstream_name = 'hello-world' new_pattern = ( @@ -788,37 +853,49 @@ def test_positive_name_pattern_change(self, module_org): ) repo = _repo( - make_product_wait({'organization-id': module_org.id})['id'], + module_target_sat, + module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id})[ + 'id' + ], name=gen_string('alpha', 5), upstream_name=docker_upstream_name, ) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['registry-name-pattern'] == new_pattern - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( {'name': repo['name'], 'environment-id': lce['id'], 'organization-id': module_org.id} )[0] expected_name = f'{pattern_prefix}-{content_view["label"]}/{docker_upstream_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_positive_product_name_change_after_promotion(self, module_org): + def test_positive_product_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change product name. Verify that repository name on product changed according to new pattern. @@ -833,45 +910,63 @@ def test_positive_product_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = '<%= content_view.label %>/<%= product.name %>' - lce = make_lifecycle_environment({'organization-id': module_org.id}) - prod = make_product_wait({'organization-id': module_org.id, 'name': old_prod_name}) - repo = _repo(prod['id'], name=gen_string('alpha', 5), upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - LifecycleEnvironment.update( + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + prod = module_target_sat.cli_factory.make_product_wait( + {'organization-id': module_org.id, 'name': old_prod_name} + ) + repo = _repo( + module_target_sat, + prod['id'], + name=gen_string('alpha', 5), + upstream_name=docker_upstream_name, + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['registry-name-pattern'] == new_pattern - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - Product.update({'name': new_prod_name, 'id': prod['id']}) + module_target_sat.cli.Product.update({'name': new_prod_name, 'id': prod['id']}) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( {'name': repo['name'], 'environment-id': lce['id'], 'organization-id': module_org.id} )[0] expected_name = f'{content_view["label"]}/{old_prod_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( { 'id': content_view['versions'][-1]['id'], 'to-lifecycle-environment-id': lce['id'], } ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': repo['name'], 'environment-id': lce['id'], @@ -879,10 +974,13 @@ def test_positive_product_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{new_prod_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_positive_repo_name_change_after_promotion(self, module_org): + def test_positive_repo_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change repository name. Verify that Docker repository name on product changed according to new pattern. @@ -897,27 +995,37 @@ def test_positive_repo_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = '<%= content_view.label %>/<%= repository.name %>' - lce = make_lifecycle_environment({'organization-id': module_org.id}) - prod = make_product_wait({'organization-id': module_org.id}) - repo = _repo(prod['id'], name=old_repo_name, upstream_name=docker_upstream_name) - Repository.synchronize({'id': repo['id']}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - LifecycleEnvironment.update( + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + prod = module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + repo = _repo( + module_target_sat, prod['id'], name=old_repo_name, upstream_name=docker_upstream_name + ) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], 'organization-id': module_org.id, } ) - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) - Repository.update({'name': new_repo_name, 'id': repo['id'], 'product-id': prod['id']}) + module_target_sat.cli.Repository.update( + {'name': new_repo_name, 'id': repo['id'], 'product-id': prod['id']} + ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': new_repo_name, 'environment-id': lce['id'], @@ -925,18 +1033,21 @@ def test_positive_repo_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{old_repo_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.version_promote( { 'id': content_view['versions'][-1]['id'], 'to-lifecycle-environment-id': lce['id'], } ) - repo = Repository.list( + repo = module_target_sat.cli.Repository.list( { 'name': new_repo_name, 'environment-id': lce['id'], @@ -944,10 +1055,13 @@ def test_positive_repo_name_change_after_promotion(self, module_org): } )[0] expected_name = f'{content_view["label"]}/{new_repo_name}'.lower() - assert Repository.info({'id': repo['id']})['container-repository-name'] == expected_name + assert ( + module_target_sat.cli.Repository.info({'id': repo['id']})['container-repository-name'] + == expected_name + ) @pytest.mark.tier2 - def test_negative_set_non_unique_name_pattern_and_promote(self, module_org): + def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_target_sat): """Set registry name pattern to one that does not guarantee uniqueness. Try to promote content view with multiple Docker repositories to lifecycle environment. Verify that content has not been promoted. @@ -959,26 +1073,32 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org): docker_upstream_names = ['hello-world', 'alpine'] new_pattern = '<%= organization.label %>' - lce = make_lifecycle_environment( + lce = module_target_sat.cli_factory.make_lifecycle_environment( {'organization-id': module_org.id, 'registry-name-pattern': new_pattern} ) - prod = make_product_wait({'organization-id': module_org.id}) - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + prod = module_target_sat.cli_factory.make_product_wait({'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for docker_name in docker_upstream_names: - repo = _repo(prod['id'], upstream_name=docker_name) - Repository.synchronize({'id': repo['id']}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + repo = _repo(module_target_sat, prod['id'], upstream_name=docker_name) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) with pytest.raises(CLIReturnCodeError): - ContentView.version_promote( + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) @pytest.mark.tier2 - def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, module_product): + def test_negative_promote_and_set_non_unique_name_pattern( + self, module_org, module_product, module_target_sat + ): """Promote content view with multiple Docker repositories to lifecycle environment. Set registry name pattern to one that does not guarantee uniqueness. Verify that pattern has not been @@ -991,20 +1111,26 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, modu docker_upstream_names = ['hello-world', 'alpine'] new_pattern = '<%= organization.label %>' - content_view = make_content_view({'composite': False, 'organization-id': module_org.id}) + content_view = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) for docker_name in docker_upstream_names: - repo = _repo(module_product.id, upstream_name=docker_name) - Repository.synchronize({'id': repo['id']}) - ContentView.add_repository({'id': content_view['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) - lce = make_lifecycle_environment({'organization-id': module_org.id}) - ContentView.version_promote( + repo = _repo(module_target_sat, module_product.id, upstream_name=docker_name) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.ContentView.add_repository( + {'id': content_view['id'], 'repository-id': repo['id']} + ) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) + lce = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.version_promote( {'id': content_view['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) with pytest.raises(CLIReturnCodeError): - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'registry-name-pattern': new_pattern, 'id': lce['id'], @@ -1024,7 +1150,9 @@ class TestDockerActivationKey: """ @pytest.mark.tier2 - def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_promote): + def test_positive_add_docker_repo_cv( + self, module_org, module_lce, content_view_promote, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then create an activation key and associate it with the Docker content view. @@ -1034,7 +1162,7 @@ def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_ :expectedresults: Docker-based content view can be added to activation key """ - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': content_view_promote['content-view-id'], 'lifecycle-environment-id': module_lce.id, @@ -1044,7 +1172,9 @@ def test_positive_add_docker_repo_cv(self, module_org, module_lce, content_view_ assert activation_key['content-view'] == content_view_promote['content-view-name'] @pytest.mark.tier2 - def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_view_promote): + def test_positive_remove_docker_repo_cv( + self, module_org, module_lce, content_view_promote, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Create an activation key and associate it with the Docker content view. Then remove this content view from the activation @@ -1055,7 +1185,7 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi :expectedresults: Docker-based content view can be added and then removed from the activation key. """ - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': content_view_promote['content-view-id'], 'lifecycle-environment-id': module_lce.id, @@ -1065,14 +1195,16 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi assert activation_key['content-view'] == content_view_promote['content-view-name'] # Create another content view replace with - another_cv = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.publish({'id': another_cv['id']}) - another_cv = ContentView.info({'id': another_cv['id']}) - ContentView.version_promote( + another_cv = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': another_cv['id']}) + another_cv = module_target_sat.cli.ContentView.info({'id': another_cv['id']}) + module_target_sat.cli.ContentView.version_promote( {'id': another_cv['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id} ) - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'id': activation_key['id'], 'organization-id': module_org.id, @@ -1080,11 +1212,13 @@ def test_positive_remove_docker_repo_cv(self, module_org, module_lce, content_vi 'lifecycle-environment-id': module_lce.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['content-view'] != content_view_promote['content-view-name'] @pytest.mark.tier2 - def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view_publish): + def test_positive_add_docker_repo_ccv( + self, module_org, module_lce, content_view_publish, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -1097,25 +1231,29 @@ def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view :BZ: 1359665 """ - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view_publish['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view_publish['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - comp_cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + comp_cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) + module_target_sat.cli.ContentView.version_promote( {'id': comp_cvv['id'], 'to-lifecycle-environment-id': module_lce.id} ) - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': comp_content_view['id'], 'lifecycle-environment-id': module_lce.id, @@ -1125,7 +1263,9 @@ def test_positive_add_docker_repo_ccv(self, module_org, module_lce, content_view assert activation_key['content-view'] == comp_content_view['name'] @pytest.mark.tier2 - def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_view_publish): + def test_positive_remove_docker_repo_ccv( + self, module_org, module_lce, content_view_publish, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -1139,25 +1279,29 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v :BZ: 1359665 """ - comp_content_view = make_content_view({'composite': True, 'organization-id': module_org.id}) - ContentView.update( + comp_content_view = module_target_sat.cli_factory.make_content_view( + {'composite': True, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.update( { 'component-ids': content_view_publish['id'], 'id': comp_content_view['id'], } ) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) assert content_view_publish['id'] in [ component['id'] for component in comp_content_view['components'] ] - ContentView.publish({'id': comp_content_view['id']}) - comp_content_view = ContentView.info({'id': comp_content_view['id']}) - comp_cvv = ContentView.version_info({'id': comp_content_view['versions'][0]['id']}) - ContentView.version_promote( + module_target_sat.cli.ContentView.publish({'id': comp_content_view['id']}) + comp_content_view = module_target_sat.cli.ContentView.info({'id': comp_content_view['id']}) + comp_cvv = module_target_sat.cli.ContentView.version_info( + {'id': comp_content_view['versions'][0]['id']} + ) + module_target_sat.cli.ContentView.version_promote( {'id': comp_cvv['id'], 'to-lifecycle-environment-id': module_lce.id} ) - activation_key = make_activation_key( + activation_key = module_target_sat.cli_factory.make_activation_key( { 'content-view-id': comp_content_view['id'], 'lifecycle-environment-id': module_lce.id, @@ -1167,14 +1311,16 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v assert activation_key['content-view'] == comp_content_view['name'] # Create another content view replace with - another_cv = make_content_view({'composite': False, 'organization-id': module_org.id}) - ContentView.publish({'id': another_cv['id']}) - another_cv = ContentView.info({'id': another_cv['id']}) - ContentView.version_promote( + another_cv = module_target_sat.cli_factory.make_content_view( + {'composite': False, 'organization-id': module_org.id} + ) + module_target_sat.cli.ContentView.publish({'id': another_cv['id']}) + another_cv = module_target_sat.cli.ContentView.info({'id': another_cv['id']}) + module_target_sat.cli.ContentView.version_promote( {'id': another_cv['versions'][0]['id'], 'to-lifecycle-environment-id': module_lce.id} ) - ActivationKey.update( + module_target_sat.cli.ActivationKey.update( { 'id': activation_key['id'], 'organization-id': module_org.id, @@ -1182,5 +1328,5 @@ def test_positive_remove_docker_repo_ccv(self, module_org, module_lce, content_v 'lifecycle-environment-id': module_lce.id, } ) - activation_key = ActivationKey.info({'id': activation_key['id']}) + activation_key = module_target_sat.cli.ActivationKey.info({'id': activation_key['id']}) assert activation_key['content-view'] != comp_content_view['name'] diff --git a/tests/foreman/cli/test_domain.py b/tests/foreman/cli/test_domain.py index b0d6aeca207..ad48827e8f1 100644 --- a/tests/foreman/cli/test_domain.py +++ b/tests/foreman/cli/test_domain.py @@ -19,9 +19,7 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.domain import Domain -from robottelo.cli.factory import CLIFactoryError, make_domain, make_location, make_org +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( filtered_datapoint, invalid_id_list, @@ -113,7 +111,7 @@ def valid_delete_params(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_create_update_delete_domain(): +def test_positive_create_update_delete_domain(module_target_sat): """Create domain, update and delete domain and set parameters :id: 018740bf-1551-4162-b88e-4d4905af097b @@ -123,9 +121,9 @@ def test_positive_create_update_delete_domain(): :CaseImportance: Critical """ options = valid_create_params()[0] - location = make_location() - org = make_org() - domain = make_domain( + location = module_target_sat.cli_factory.make_location() + org = module_target_sat.cli_factory.make_org() + domain = module_target_sat.cli_factory.make_domain( { 'name': options['name'], 'description': options['description'], @@ -141,8 +139,8 @@ def test_positive_create_update_delete_domain(): # set parameter parameter_options = valid_set_params()[0] parameter_options['domain-id'] = domain['id'] - Domain.set_parameter(parameter_options) - domain = Domain.info({'id': domain['id']}) + module_target_sat.cli.Domain.set_parameter(parameter_options) + domain = module_target_sat.cli.Domain.info({'id': domain['id']}) parameter = { # Satellite applies lower to parameter's name parameter_options['name'].lower(): parameter_options['value'] @@ -151,27 +149,29 @@ def test_positive_create_update_delete_domain(): # update domain options = valid_update_params()[0] - Domain.update(dict(options, id=domain['id'])) + module_target_sat.cli.Domain.update(dict(options, id=domain['id'])) # check - domain updated - domain = Domain.info({'id': domain['id']}) + domain = module_target_sat.cli.Domain.info({'id': domain['id']}) for key, val in options.items(): assert domain[key] == val # delete parameter - Domain.delete_parameter({'name': parameter_options['name'], 'domain-id': domain['id']}) + module_target_sat.cli.Domain.delete_parameter( + {'name': parameter_options['name'], 'domain-id': domain['id']} + ) # check - parameter not set - domain = Domain.info({'name': domain['name']}) + domain = module_target_sat.cli.Domain.info({'name': domain['name']}) assert len(domain['parameters']) == 0 # delete domain - Domain.delete({'id': domain['id']}) + module_target_sat.cli.Domain.delete({'id': domain['id']}) with pytest.raises(CLIReturnCodeError): - Domain.info({'id': domain['id']}) + module_target_sat.cli.Domain.info({'id': domain['id']}) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_create_params())) -def test_negative_create(options): +def test_negative_create(options, module_target_sat): """Create domain with invalid values :id: 6d3aec19-75dc-41ca-89af-fef0ca37082d @@ -183,11 +183,11 @@ def test_negative_create(options): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_domain(options) + module_target_sat.cli_factory.make_domain(options) @pytest.mark.tier2 -def test_negative_create_with_invalid_dns_id(): +def test_negative_create_with_invalid_dns_id(module_target_sat): """Attempt to register a domain with invalid id :id: 4aa52167-368a-41ad-87b7-41d468ad41a8 @@ -201,7 +201,7 @@ def test_negative_create_with_invalid_dns_id(): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError) as context: - make_domain({'name': gen_string('alpha'), 'dns-id': -1}) + module_target_sat.cli_factory.make_domain({'name': gen_string('alpha'), 'dns-id': -1}) valid_messages = ['Invalid smart-proxy id', 'Invalid capsule id'] exception_string = str(context.value) messages = [message for message in valid_messages if message in exception_string] @@ -210,7 +210,7 @@ def test_negative_create_with_invalid_dns_id(): @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_update_params())) -def test_negative_update(module_domain, options): +def test_negative_update(module_domain, options, module_target_sat): """Update domain with invalid values :id: 9fc708dc-20f9-4d7c-af53-863826462981 @@ -222,16 +222,16 @@ def test_negative_update(module_domain, options): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Domain.update(dict(options, id=module_domain.id)) + module_target_sat.cli.Domain.update(dict(options, id=module_domain.id)) # check - domain not updated - result = Domain.info({'id': module_domain.id}) + result = module_target_sat.cli.Domain.info({'id': module_domain.id}) for key in options.keys(): assert result[key] == getattr(module_domain, key) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_set_params())) -def test_negative_set_parameter(module_domain, options): +def test_negative_set_parameter(module_domain, options, module_target_sat): """Domain set-parameter with invalid values :id: 991fb849-83be-48f4-a12b-81eabb2bd8d3 @@ -245,15 +245,15 @@ def test_negative_set_parameter(module_domain, options): options['domain-id'] = module_domain.id # set parameter with pytest.raises(CLIReturnCodeError): - Domain.set_parameter(options) + module_target_sat.cli.Domain.set_parameter(options) # check - parameter not set - domain = Domain.info({'id': module_domain.id}) + domain = module_target_sat.cli.Domain.info({'id': module_domain.id}) assert len(domain['parameters']) == 0 @pytest.mark.tier2 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) -def test_negative_delete_by_id(entity_id): +def test_negative_delete_by_id(entity_id, module_target_sat): """Create Domain then delete it by wrong ID :id: 0e4ef107-f006-4433-abc3-f872613e0b91 @@ -265,4 +265,4 @@ def test_negative_delete_by_id(entity_id): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Domain.delete({'id': entity_id}) + module_target_sat.cli.Domain.delete({'id': entity_id}) diff --git a/tests/foreman/cli/test_environment.py b/tests/foreman/cli/test_environment.py index ae6e0d7e947..10481c743c6 100644 --- a/tests/foreman/cli/test_environment.py +++ b/tests/foreman/cli/test_environment.py @@ -21,8 +21,8 @@ from fauxfactory import gen_alphanumeric, gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_id_list, invalid_values_list, diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index f74c434b1ee..222473df032 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -23,23 +23,6 @@ from nailgun import entities import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.erratum import Erratum -from robottelo.cli.factory import ( - make_content_view_filter, - make_content_view_filter_rule, - make_host_collection, - make_repository, - setup_org_for_a_custom_repo, - setup_org_for_a_rh_repo, -) -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.package import Package -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet from robottelo.config import settings from robottelo.constants import ( DEFAULT_ARCHITECTURE, @@ -58,6 +41,7 @@ REPOS, REPOSET, ) +from robottelo.exceptions import CLIReturnCodeError from robottelo.hosts import ContentHost PER_PAGE = 10 @@ -127,7 +111,7 @@ def orgs(): @pytest.fixture(scope='module') -def products_with_repos(orgs): +def products_with_repos(orgs, module_target_sat): """Create and return a list of products. For each product, create and sync a single repo.""" products = [] # Create one product for each org, and a second product for the last org. @@ -137,7 +121,7 @@ def products_with_repos(orgs): # with the one we already have. product.organization = org products.append(product) - repo = make_repository( + repo = module_target_sat.cli_factory.make_repository( { 'download-policy': 'immediate', 'organization-id': product.organization.id, @@ -145,15 +129,17 @@ def products_with_repos(orgs): 'url': params['url'], } ) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) return products @pytest.fixture(scope='module') -def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce): +def rh_repo( + module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce, module_target_sat +): """Add a subscription for the Satellite Tools repo to activation key.""" - setup_org_for_a_rh_repo( + module_target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -167,9 +153,11 @@ def rh_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv @pytest.fixture(scope='module') -def custom_repo(module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce): +def custom_repo( + module_entitlement_manifest_org, module_lce, module_cv, module_ak_cv_lce, module_target_sat +): """Create custom repo and add a subscription to activation key.""" - setup_org_for_a_custom_repo( + module_target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': REPO_WITH_ERRATA['url'], 'organization-id': module_entitlement_manifest_org.id, @@ -244,18 +232,24 @@ def errata_hosts(register_hosts): @pytest.fixture(scope='module') -def host_collection(module_entitlement_manifest_org, module_ak_cv_lce, register_hosts): +def host_collection( + module_entitlement_manifest_org, module_ak_cv_lce, register_hosts, module_target_sat +): """Create and setup host collection.""" - host_collection = make_host_collection({'organization-id': module_entitlement_manifest_org.id}) - host_ids = [Host.info({'name': host.hostname})['id'] for host in register_hosts] - HostCollection.add_host( + host_collection = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_entitlement_manifest_org.id} + ) + host_ids = [ + module_target_sat.cli.Host.info({'name': host.hostname})['id'] for host in register_hosts + ] + module_target_sat.cli.HostCollection.add_host( { 'id': host_collection['id'], 'organization-id': module_entitlement_manifest_org.id, 'host-ids': host_ids, } ) - ActivationKey.add_host_collection( + module_target_sat.cli.ActivationKey.add_host_collection( { 'id': module_ak_cv_lce.id, 'host-collection-id': host_collection['id'], @@ -276,7 +270,7 @@ def is_rpm_installed(host, rpm=None): return not host.execute(f'rpm -q {rpm}').status -def get_sorted_errata_info_by_id(errata_ids, sort_by='issued', sort_reversed=False): +def get_sorted_errata_info_by_id(sat, errata_ids, sort_by='issued', sort_reversed=False): """Query hammer for erratum ids info :param errata_ids: a list of errata id @@ -292,14 +286,17 @@ def get_sorted_errata_info_by_id(errata_ids, sort_by='issued', sort_reversed=Fal if len(errata_ids) > PER_PAGE: raise Exception('Errata ids length exceeded') errata_info = [ - Erratum.info(options={'id': errata_id}, output_format='json') for errata_id in errata_ids + sat.cli.Erratum.info(options={'id': errata_id}, output_format='json') + for errata_id in errata_ids ] return sorted(errata_info, key=itemgetter(sort_by), reverse=sort_reversed) -def get_errata_ids(*params): +def get_errata_ids(sat, *params): """Return list of sets of errata ids corresponding to the provided params.""" - errata_ids = [{errata['errata-id'] for errata in Erratum.list(param)} for param in params] + errata_ids = [ + {errata['errata-id'] for errata in sat.cli.Erratum.list(param)} for param in params + ] return errata_ids[0] if len(errata_ids) == 1 else errata_ids @@ -314,7 +311,7 @@ def check_errata(errata_ids, by_org=False): assert repo_with_errata['errata_id'] in ids -def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): +def filter_sort_errata(sat, org, sort_by_date='issued', filter_by_org=None): """Compare the list of errata returned by `hammer erratum {list|info}` to the expected values, subject to the date sort and organization filter options. @@ -337,7 +334,7 @@ def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): sort_reversed = True if sort_order == 'DESC' else False - errata_list = Erratum.list(list_param) + errata_list = sat.cli.Erratum.list(list_param) assert len(errata_list) > 0 # Build a sorted errata info list, which also contains the sort field. @@ -364,7 +361,9 @@ def cv_publish_promote(sat, cv, org, lce, force=False): :param lce: lifecycle environment :type lce: entities.LifecycleEnvironment """ - sat.cli.ContentView.publish( + sat.cli.ContentView.publish({'id': cv.id}) + cvv = sat.cli.ContentView.info({'id': cv.id})['versions'][-1] + sat.cli.ContentView.version_promote( { 'id': cv.id, 'organization': org, @@ -453,7 +452,7 @@ def test_positive_install_by_host_collection_and_org( organization_key = 'organization-title' organization_value = module_entitlement_manifest_org.title - JobInvocation.create( + target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': host_collection_query, @@ -468,7 +467,7 @@ def test_positive_install_by_host_collection_and_org( @pytest.mark.tier3 def test_negative_install_by_hc_id_without_errata_info( - module_entitlement_manifest_org, host_collection, errata_hosts + module_entitlement_manifest_org, host_collection, errata_hosts, target_sat ): """Attempt to install an erratum on a host collection by host collection id but no errata info specified. @@ -487,7 +486,7 @@ def test_negative_install_by_hc_id_without_errata_info( :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match="Error: Option '--errata' is required"): - HostCollection.erratum_install( + target_sat.cli.HostCollection.erratum_install( { 'id': host_collection['id'], 'organization-id': module_entitlement_manifest_org.id, @@ -497,7 +496,7 @@ def test_negative_install_by_hc_id_without_errata_info( @pytest.mark.tier3 def test_negative_install_by_hc_name_without_errata_info( - module_entitlement_manifest_org, host_collection, errata_hosts + module_entitlement_manifest_org, host_collection, errata_hosts, target_sat ): """Attempt to install an erratum on a host collection by host collection name but no errata info specified. @@ -516,7 +515,7 @@ def test_negative_install_by_hc_name_without_errata_info( :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match="Error: Option '--errata' is required"): - HostCollection.erratum_install( + target_sat.cli.HostCollection.erratum_install( { 'name': host_collection['name'], 'organization-id': module_entitlement_manifest_org.id, @@ -525,7 +524,9 @@ def test_negative_install_by_hc_name_without_errata_info( @pytest.mark.tier3 -def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_collection): +def test_negative_install_without_hc_info( + module_entitlement_manifest_org, host_collection, module_target_sat +): """Attempt to install an erratum on a host collection without specifying host collection info. This test only works with two or more host collections (BZ#1928281). We have the one from the fixture, just need to create one more at the start of the test. @@ -545,9 +546,11 @@ def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_ :CaseLevel: System """ - make_host_collection({'organization-id': module_entitlement_manifest_org.id}) + module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_entitlement_manifest_org.id} + ) with pytest.raises(CLIReturnCodeError): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( { 'organization-id': module_entitlement_manifest_org.id, 'errata': [REPO_WITH_ERRATA['errata'][0]['id']], @@ -557,7 +560,7 @@ def test_negative_install_without_hc_info(module_entitlement_manifest_org, host_ @pytest.mark.tier3 def test_negative_install_by_hc_id_without_org_info( - module_entitlement_manifest_org, host_collection + module_entitlement_manifest_org, host_collection, module_target_sat ): """Attempt to install an erratum on a host collection by host collection id but without specifying any org info. @@ -575,14 +578,14 @@ def test_negative_install_by_hc_id_without_org_info( :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match='Error: Could not find organization'): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( {'id': host_collection['id'], 'errata': [REPO_WITH_ERRATA['errata'][0]['id']]} ) @pytest.mark.tier3 def test_negative_install_by_hc_name_without_org_info( - module_entitlement_manifest_org, host_collection + module_entitlement_manifest_org, host_collection, module_target_sat ): """Attempt to install an erratum on a host collection by host collection name but without specifying any org info. @@ -600,14 +603,14 @@ def test_negative_install_by_hc_name_without_org_info( :CaseLevel: System """ with pytest.raises(CLIReturnCodeError, match='Error: Could not find organization'): - HostCollection.erratum_install( + module_target_sat.cli.HostCollection.erratum_install( {'name': host_collection['name'], 'errata': [REPO_WITH_ERRATA['errata'][0]['id']]} ) @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_hosts): +def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_hosts, target_sat): """View a list of affected content hosts for an erratum. :id: 3b592253-52c0-4165-9a48-ba55287e9ee9 @@ -622,7 +625,7 @@ def test_positive_list_affected_chosts(module_entitlement_manifest_org, errata_h :CaseAutomation: Automated """ - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {REPO_WITH_ERRATA["errata"][0]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -671,7 +674,7 @@ def test_install_errata_to_one_host( # Add ssh keys for host in errata_hosts: host.add_rex_key(satellite=target_sat) - Host.errata_recalculate({'host-id': host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': host.nailgun_host.id}) timestamp = (datetime.utcnow() - timedelta(minutes=1)).strftime(TIMESTAMP_FMT) target_sat.wait_for_tasks( search_query=( @@ -784,7 +787,7 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( # Apply a filter and rule to the CV to hide the RPM, thus making erratum not installable # Make RPM exclude filter - cv_filter = make_content_view_filter( + cv_filter = target_sat.cli_factory.make_content_view_filter( { 'content-view-id': module_cv.id, 'name': 'erratum_restrict_test', @@ -806,7 +809,7 @@ def cleanup(): ) # Make rule to hide the RPM that creates the need for the installable erratum - make_content_view_filter_rule( + target_sat.cli_factory.content_view_filter_rule( { 'content-view-id': module_cv.id, 'content-view-filter-id': cv_filter['filter-id'], @@ -886,7 +889,7 @@ def test_host_errata_search_commands( for host in errata_hosts: timestamp = (datetime.utcnow() - timedelta(minutes=1)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': host.nailgun_host.id}) # Wait for upload profile event (in case Satellite system slow) target_sat.wait_for_tasks( search_query=( @@ -898,7 +901,7 @@ def test_host_errata_search_commands( ) # Step 1: Search for hosts that require bugfix advisories - result = Host.list( + result = target_sat.cli.Host.list( { 'search': 'errata_status = errata_needed', 'organization-id': module_entitlement_manifest_org.id, @@ -910,7 +913,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 2: Search for hosts that require security advisories - result = Host.list( + result = target_sat.cli.Host.list( { 'search': 'errata_status = security_needed', 'organization-id': module_entitlement_manifest_org.id, @@ -922,7 +925,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname in result # Step 3: Search for hosts that require the specified bugfix advisory - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {errata[1]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -934,7 +937,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 4: Search for hosts that require the specified security advisory - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_errata = {errata[0]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -946,7 +949,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname in result # Step 5: Search for hosts that require the specified bugfix package - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[1]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -958,7 +961,7 @@ def test_host_errata_search_commands( assert errata_hosts[1].hostname not in result # Step 6: Search for hosts that require the specified security package - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[0]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -971,7 +974,7 @@ def test_host_errata_search_commands( # Step 7: Apply filter and rule to CV to hide RPM, thus making erratum not installable # Make RPM exclude filter - cv_filter = make_content_view_filter( + cv_filter = target_sat.cli_factory.make_content_view_filter( { 'content-view-id': module_cv.id, 'name': 'erratum_search_test', @@ -993,7 +996,7 @@ def cleanup(): ) # Make rule to exclude the specified bugfix package - make_content_view_filter_rule( + target_sat.cli_factory.content_view_filter_rule( { 'content-view-id': module_cv.id, 'content-view-filter-id': cv_filter['filter-id'], @@ -1006,7 +1009,7 @@ def cleanup(): # Step 8: Run tests again. Applicable should still be true, installable should now be false. # Search for hosts that require the bugfix package. - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'applicable_rpms = {errata[1]["new_package"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -1018,7 +1021,7 @@ def cleanup(): assert errata_hosts[1].hostname not in result # Search for hosts that require the specified bugfix advisory. - result = Host.list( + result = target_sat.cli.Host.list( { 'search': f'installable_errata = {errata[1]["id"]}', 'organization-id': module_entitlement_manifest_org.id, @@ -1124,7 +1127,7 @@ def test_positive_list_filter_by_product_and_org( @pytest.mark.tier3 -def test_negative_list_filter_by_product_name(products_with_repos): +def test_negative_list_filter_by_product_name(products_with_repos, module_target_sat): """Attempt to Filter errata by product name :id: c7a5988b-668f-4c48-bc1e-97cb968a2563 @@ -1142,7 +1145,9 @@ def test_negative_list_filter_by_product_name(products_with_repos): :CaseLevel: System """ with pytest.raises(CLIReturnCodeError): - Erratum.list({'product': products_with_repos[0].name, 'per-page': PER_PAGE_LARGE}) + module_target_sat.cli.Erratum.list( + {'product': products_with_repos[0].name, 'per-page': PER_PAGE_LARGE} + ) @pytest.mark.tier3 @@ -1182,7 +1187,7 @@ def test_positive_list_filter_by_org(products_with_repos, filter_by_org): @pytest.mark.run_in_one_thread @pytest.mark.tier3 -def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): +def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo, target_sat): """Filter errata by CVE :id: 7791137c-95a7-4518-a56b-766a5680c5fb @@ -1194,7 +1199,7 @@ def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): :expectedresults: Errata is filtered by CVE. """ - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], 'organization-id': module_entitlement_manifest_org.id, @@ -1203,14 +1208,14 @@ def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): 'basearch': 'x86_64', } ) - Repository.synchronize( + target_sat.cli.Repository.synchronize( { 'name': REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, 'product': PRDS['rhel'], } ) - repository_info = Repository.info( + repository_info = target_sat.cli.Repository.info( { 'name': REPOS['rhva6']['name'], 'organization-id': module_entitlement_manifest_org.id, @@ -1219,17 +1224,18 @@ def test_positive_list_filter_by_cve(module_entitlement_manifest_org, rh_repo): ) assert REAL_4_ERRATA_ID in { - errata['errata-id'] for errata in Erratum.list({'repository-id': repository_info['id']}) + errata['errata-id'] + for errata in target_sat.cli.Erratum.list({'repository-id': repository_info['id']}) } for errata_cve in REAL_4_ERRATA_CVES: assert REAL_4_ERRATA_ID in { - errata['errata-id'] for errata in Erratum.list({'cve': errata_cve}) + errata['errata-id'] for errata in target_sat.cli.Erratum.list({'cve': errata_cve}) } @pytest.mark.tier3 -def test_positive_check_errata_dates(module_entitlement_manifest_org): +def test_positive_check_errata_dates(module_entitlement_manifest_org, module_target_sat): """Check for errata dates in `hammer erratum list` :id: b19286ae-bdb4-4319-87d0-5d3ff06c5f38 @@ -1243,19 +1249,19 @@ def test_positive_check_errata_dates(module_entitlement_manifest_org): :BZ: 1695163 """ product = entities.Product(organization=module_entitlement_manifest_org).create() - repo = make_repository( + repo = module_target_sat.cli_factory.make_repository( {'content-type': 'yum', 'product-id': product.id, 'url': REPO_WITH_ERRATA['url']} ) # Synchronize custom repository - Repository.synchronize({'id': repo['id']}) - result = Erratum.list(options={'per-page': '5', 'fields': 'Issued'}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + result = module_target_sat.cli.Erratum.list(options={'per-page': '5', 'fields': 'Issued'}) assert 'issued' in result[0] # Verify any errata ISSUED date from stdout validate_issued_date = datetime.strptime(result[0]['issued'], '%Y-%m-%d').date() assert isinstance(validate_issued_date, date) - result = Erratum.list(options={'per-page': '5', 'fields': 'Updated'}) + result = module_target_sat.cli.Erratum.list(options={'per-page': '5', 'fields': 'Updated'}) assert 'updated' in result[0] # Verify any errata UPDATED date from stdout @@ -1357,11 +1363,13 @@ def test_apply_errata_using_default_content_view(errata_host, target_sat): :CaseImportance: High """ # check that package errata is applicable - erratum = Host.errata_list({'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + erratum = target_sat.cli.Host.errata_list( + {'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'} + ) assert len(erratum) == 1 assert erratum[0]['installable'] == 'true' # Update errata from Library, i.e. Default CV - result = JobInvocation.create( + result = target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': f'name = {errata_host.hostname}', @@ -1371,7 +1379,7 @@ def test_apply_errata_using_default_content_view(errata_host, target_sat): )[1]['id'] assert 'success' in result timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) target_sat.wait_for_tasks( search_query=( 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' @@ -1382,7 +1390,9 @@ def test_apply_errata_using_default_content_view(errata_host, target_sat): ) # Assert that the erratum is no longer applicable - erratum = Host.errata_list({'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'}) + erratum = target_sat.cli.Host.errata_list( + {'host': errata_host.hostname, 'search': f'id = {REAL_0_ERRATA_ID}'} + ) assert len(erratum) == 0 @@ -1408,7 +1418,7 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe :CaseImportance: High """ # check that package is applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1416,7 +1426,7 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe } ) timestamp = (datetime.utcnow()).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) target_sat.wait_for_tasks( search_query=( 'label = Actions::Katello::Applicability::Hosts::BulkGenerate' @@ -1428,7 +1438,7 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe assert len(applicable_packages) == 1 assert REAL_RHEL7_0_2_PACKAGE_NAME in applicable_packages[0]['filename'] # Update package from Library, i.e. Default CV - result = JobInvocation.create( + result = target_sat.cli.JobInvocation.create( { 'feature': 'katello_errata_install', 'search-query': f'name = {errata_host.hostname}', @@ -1439,7 +1449,7 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe assert 'success' in result # note time for later wait_for_tasks include 2 mins margin of safety. timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) # Wait for upload profile event (in case Satellite system slow) target_sat.wait_for_tasks( @@ -1452,8 +1462,8 @@ def test_update_applicable_package_using_default_content_view(errata_host, targe ) # Assert that the package is no longer applicable - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) - applicable_packages = Package.list( + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1488,7 +1498,7 @@ def test_downgrade_applicable_package_using_default_content_view(errata_host, ta # Update package from Library, i.e. Default CV errata_host.run(f'yum -y update {REAL_RHEL7_0_2_PACKAGE_NAME}') # Assert that the package is not applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1501,7 +1511,7 @@ def test_downgrade_applicable_package_using_default_content_view(errata_host, ta errata_host.run(f'curl -O {settings.repos.epel_repo.url}/{PSUTIL_RPM}') timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime(TIMESTAMP_FMT) errata_host.run(f'yum -y downgrade {PSUTIL_RPM}') - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) # Wait for upload profile event (in case Satellite system slow) target_sat.wait_for_tasks( search_query=( @@ -1512,7 +1522,7 @@ def test_downgrade_applicable_package_using_default_content_view(errata_host, ta max_tries=10, ) # check that package is applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1544,8 +1554,8 @@ def test_install_applicable_package_to_registerd_host(chost, target_sat): :CaseImportance: Medium """ # Assert that the package is not applicable - Host.errata_recalculate({'host-id': chost.nailgun_host.id}) - applicable_packages = Package.list( + target_sat.cli.Host.errata_recalculate({'host-id': chost.nailgun_host.id}) + applicable_packages = target_sat.cli.Package.list( { 'host-id': chost.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1567,10 +1577,10 @@ def test_install_applicable_package_to_registerd_host(chost, target_sat): search_rate=15, max_tries=10, ) - Host.errata_recalculate({'host-id': chost.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': chost.nailgun_host.id}) # check that package is applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': chost.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1608,7 +1618,7 @@ def test_downgrading_package_shows_errata_from_library( # Update package from Library, i.e. Default CV errata_host.run(f'yum -y update {REAL_RHEL7_0_2_PACKAGE_NAME}') # Assert that the package is not applicable - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': errata_host.nailgun_host.id, 'packages-restrict-applicable': 'true', @@ -1622,7 +1632,7 @@ def test_downgrading_package_shows_errata_from_library( errata_host.run(f'curl -O {settings.repos.epel_repo.url}/{PSUTIL_RPM}') errata_host.run(f'yum -y downgrade {PSUTIL_RPM}') # Wait for upload profile event (in case Satellite system slow) - Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) + target_sat.cli.Host.errata_recalculate({'host-id': errata_host.nailgun_host.id}) # Wait for upload profile event (in case Satellite system slow) target_sat.wait_for_tasks( search_query=( @@ -1644,7 +1654,7 @@ def test_downgrading_package_shows_errata_from_library( @pytest.mark.skip_if_open('BZ:1785146') @pytest.mark.tier2 -def test_errata_list_by_contentview_filter(module_entitlement_manifest_org): +def test_errata_list_by_contentview_filter(module_entitlement_manifest_org, module_target_sat): """Hammer command to list errata should take filter ID into consideration. :id: e9355a92-8354-4853-a806-d388ed32d73e @@ -1665,17 +1675,17 @@ def test_errata_list_by_contentview_filter(module_entitlement_manifest_org): :BZ: 1785146 """ product = entities.Product(organization=module_entitlement_manifest_org).create() - repo = make_repository( + repo = module_target_sat.cli_factory.make_repository( {'content-type': 'yum', 'product-id': product.id, 'url': REPO_WITH_ERRATA['url']} ) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) lce = entities.LifecycleEnvironment(organization=module_entitlement_manifest_org).create() cv = entities.ContentView( organization=module_entitlement_manifest_org, repository=[repo['id']] ).create() cv_publish_promote(cv, module_entitlement_manifest_org, lce) errata_count = len( - Erratum.list( + module_target_sat.cli.Erratum.list( { 'organization-id': module_entitlement_manifest_org.id, 'content-view-id': cv.id, @@ -1690,7 +1700,7 @@ def test_errata_list_by_contentview_filter(module_entitlement_manifest_org): cv.publish() cv_version_info = cv.read().version[1].read() errata_count_cvf = len( - Erratum.list( + module_target_sat.cli.Erratum.list( { 'organization-id': module_entitlement_manifest_org.id, 'content-view-id': cv.id, diff --git a/tests/foreman/cli/test_fact.py b/tests/foreman/cli/test_fact.py index 96fee50c126..1b1dd658250 100644 --- a/tests/foreman/cli/test_fact.py +++ b/tests/foreman/cli/test_fact.py @@ -19,8 +19,6 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.fact import Fact - pytestmark = [pytest.mark.tier1] @@ -29,7 +27,7 @@ @pytest.mark.parametrize( 'fact', ['uptime', 'os::family', 'uptime_seconds', 'memorysize', 'ipaddress'] ) -def test_positive_list_by_name(fact): +def test_positive_list_by_name(fact, module_target_sat): """Test Fact List :id: 83794d97-d21b-4482-9522-9b41053e595f @@ -40,12 +38,12 @@ def test_positive_list_by_name(fact): :BZ: 2161294 """ - facts = Fact().list(options={'search': f'fact={fact}'}) + facts = module_target_sat.cli.Fact().list(options={'search': f'fact={fact}'}) assert facts[0]['fact'] == fact @pytest.mark.parametrize('fact', ['uptime_days', 'memoryfree']) -def test_negative_list_ignored_by_name(fact): +def test_negative_list_ignored_by_name(fact, module_target_sat): """Test Fact List :id: b6375f39-b8c3-4807-b04b-b0e43644441f @@ -54,14 +52,16 @@ def test_negative_list_ignored_by_name(fact): :parametrized: yes """ - assert Fact().list(options={'search': f'fact={fact}'}) == [] + assert module_target_sat.cli.Fact().list(options={'search': f'fact={fact}'}) == [] -def test_negative_list_by_name(): +def test_negative_list_by_name(module_target_sat): """Test Fact List failure :id: bd56d27e-59c0-4f35-bd53-2999af7c6946 :expectedresults: Fact List is not displayed """ - assert Fact().list(options={'search': f'fact={gen_string("alpha")}'}) == [] + assert ( + module_target_sat.cli.Fact().list(options={'search': f'fact={gen_string("alpha")}'}) == [] + ) diff --git a/tests/foreman/cli/test_filter.py b/tests/foreman/cli/test_filter.py index da5eccb86c2..7c382aa7250 100644 --- a/tests/foreman/cli/test_filter.py +++ b/tests/foreman/cli/test_filter.py @@ -18,30 +18,29 @@ """ import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_filter, make_location, make_org, make_role -from robottelo.cli.filter import Filter -from robottelo.cli.role import Role +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') -def module_perms(): +def module_perms(module_target_sat): """Search for provisioning template permissions. Set ``cls.ct_perms``.""" perms = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=User"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=User"} + ) ] return perms @pytest.fixture(scope='function') -def function_role(): +def function_role(target_sat): """Create a role that a filter would be assigned""" - return make_role() + return target_sat.cli_factory.make_role() @pytest.mark.tier1 -def test_positive_create_with_permission(module_perms, function_role): +def test_positive_create_with_permission(module_perms, function_role, target_sat): """Create a filter and assign it some permissions. :id: 6da6c5d3-2727-4eb7-aa15-9f7b6f91d3b2 @@ -51,12 +50,14 @@ def test_positive_create_with_permission(module_perms, function_role): :CaseImportance: Critical """ # Assign filter to created role - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) assert set(filter_['permissions'].split(", ")) == set(module_perms) @pytest.mark.tier1 -def test_positive_create_with_org(module_perms, function_role): +def test_positive_create_with_org(module_perms, function_role, target_sat): """Create a filter and assign it some permissions. :id: f6308192-0e1f-427b-a296-b285f6684691 @@ -67,9 +68,9 @@ def test_positive_create_with_org(module_perms, function_role): :CaseImportance: Critical """ - org = make_org() + org = target_sat.cli_factory.make_org() # Assign filter to created role - filter_ = make_filter( + filter_ = target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -82,7 +83,7 @@ def test_positive_create_with_org(module_perms, function_role): @pytest.mark.tier1 -def test_positive_create_with_loc(module_perms, function_role): +def test_positive_create_with_loc(module_perms, function_role, module_target_sat): """Create a filter and assign it some permissions. :id: d7d1969a-cb30-4e97-a9a3-3a4aaf608795 @@ -93,9 +94,9 @@ def test_positive_create_with_loc(module_perms, function_role): :CaseImportance: Critical """ - loc = make_location() + loc = module_target_sat.cli_factory.make_location() # Assign filter to created role - filter_ = make_filter( + filter_ = module_target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -108,7 +109,7 @@ def test_positive_create_with_loc(module_perms, function_role): @pytest.mark.tier1 -def test_positive_delete(module_perms, function_role): +def test_positive_delete(module_perms, function_role, module_target_sat): """Create a filter and delete it afterwards. :id: 97d1093c-0d49-454b-86f6-f5be87b32775 @@ -117,15 +118,17 @@ def test_positive_delete(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) - Filter.delete({'id': filter_['id']}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) + module_target_sat.cli.Filter.delete({'id': filter_['id']}) with pytest.raises(CLIReturnCodeError): - Filter.info({'id': filter_['id']}) + module_target_sat.cli.Filter.info({'id': filter_['id']}) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_role(module_perms, function_role): +def test_positive_delete_role(module_perms, function_role, target_sat): """Create a filter and delete the role it points at. :id: e2adb6a4-e408-4912-a32d-2bf2c43187d9 @@ -134,19 +137,21 @@ def test_positive_delete_role(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) # A filter depends on a role. Deleting a role implicitly deletes the # filter pointing at it. - Role.delete({'id': function_role['id']}) + target_sat.cli.Role.delete({'id': function_role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': function_role['id']}) + target_sat.cli.Role.info({'id': function_role['id']}) with pytest.raises(CLIReturnCodeError): - Filter.info({'id': filter_['id']}) + target_sat.cli.Filter.info({'id': filter_['id']}) @pytest.mark.tier1 -def test_positive_update_permissions(module_perms, function_role): +def test_positive_update_permissions(module_perms, function_role, target_sat): """Create a filter and update its permissions. :id: 3d6a52d8-2f8f-4f97-a155-9b52888af16e @@ -155,18 +160,22 @@ def test_positive_update_permissions(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) new_perms = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=User"}) + for permission in target_sat.cli.Filter.available_permissions( + {"search": "resource_type=User"} + ) ] - Filter.update({'id': filter_['id'], 'permissions': new_perms}) - filter_ = Filter.info({'id': filter_['id']}) + target_sat.cli.Filter.update({'id': filter_['id'], 'permissions': new_perms}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) assert set(filter_['permissions'].split(", ")) == set(new_perms) @pytest.mark.tier1 -def test_positive_update_role(module_perms, function_role): +def test_positive_update_role(module_perms, function_role, target_sat): """Create a filter and assign it to another role. :id: 2950b3a1-2bce-447f-9df2-869b1d10eaf5 @@ -175,16 +184,18 @@ def test_positive_update_role(module_perms, function_role): :CaseImportance: Critical """ - filter_ = make_filter({'role-id': function_role['id'], 'permissions': module_perms}) + filter_ = target_sat.cli_factory.make_filter( + {'role-id': function_role['id'], 'permissions': module_perms} + ) # Update with another role - new_role = make_role() - Filter.update({'id': filter_['id'], 'role-id': new_role['id']}) - filter_ = Filter.info({'id': filter_['id']}) + new_role = target_sat.cli_factory.make_role() + target_sat.cli.Filter.update({'id': filter_['id'], 'role-id': new_role['id']}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) assert filter_['role'] == new_role['name'] @pytest.mark.tier1 -def test_positive_update_org_loc(module_perms, function_role): +def test_positive_update_org_loc(module_perms, function_role, target_sat): """Create a filter and assign it to another organization and location. :id: 9bb59109-9701-4ef3-95c6-81f387d372da @@ -195,9 +206,9 @@ def test_positive_update_org_loc(module_perms, function_role): :CaseImportance: Critical """ - org = make_org() - loc = make_location() - filter_ = make_filter( + org = target_sat.cli_factory.make_org() + loc = target_sat.cli_factory.make_location() + filter_ = target_sat.cli_factory.make_filter( { 'role-id': function_role['id'], 'permissions': module_perms, @@ -207,9 +218,9 @@ def test_positive_update_org_loc(module_perms, function_role): } ) # Update org and loc - new_org = make_org() - new_loc = make_location() - Filter.update( + new_org = target_sat.cli_factory.make_org() + new_loc = target_sat.cli_factory.make_location() + target_sat.cli.Filter.update( { 'id': filter_['id'], 'permissions': module_perms, @@ -218,7 +229,7 @@ def test_positive_update_org_loc(module_perms, function_role): 'override': 1, } ) - filter_ = Filter.info({'id': filter_['id']}) + filter_ = target_sat.cli.Filter.info({'id': filter_['id']}) # We expect here only one organization and location assert filter_['organizations'][0] == new_org['name'] assert filter_['locations'][0] == new_loc['name'] diff --git a/tests/foreman/cli/test_globalparam.py b/tests/foreman/cli/test_globalparam.py index 4fb6b1949cb..2a5d49cf966 100644 --- a/tests/foreman/cli/test_globalparam.py +++ b/tests/foreman/cli/test_globalparam.py @@ -21,13 +21,11 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.globalparam import GlobalParameter - pytestmark = [pytest.mark.tier1] @pytest.mark.upgrade -def test_positive_list_delete_by_name(): +def test_positive_list_delete_by_name(module_target_sat): """Test Global Param List :id: 8dd6c4e8-4ec9-4bee-8a04-f5788960973a @@ -40,13 +38,13 @@ def test_positive_list_delete_by_name(): value = f'val-{alphastring()} {alphastring()}' # Create - GlobalParameter().set({'name': name, 'value': value}) + module_target_sat.cli.GlobalParameter().set({'name': name, 'value': value}) # List by name - result = GlobalParameter().list({'search': name}) + result = module_target_sat.cli.GlobalParameter().list({'search': name}) assert len(result) == 1 assert result[0]['value'] == value # Delete - GlobalParameter().delete({'name': name}) - assert len(GlobalParameter().list({'search': name})) == 0 + module_target_sat.cli.GlobalParameter().delete({'name': name}) + assert len(module_target_sat.cli.GlobalParameter().list({'search': name})) == 0 diff --git a/tests/foreman/cli/test_host.py b/tests/foreman/cli/test_host.py index 5453de02b53..5b0e17650e8 100644 --- a/tests/foreman/cli/test_host.py +++ b/tests/foreman/cli/test_host.py @@ -24,19 +24,6 @@ from wait_for import TimedOutError, wait_for import yaml -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - CLIFactoryError, - add_role_permissions, - make_fake_host, - make_host, - setup_org_for_a_rh_repo, -) -from robottelo.cli.host import Host, HostInterface, HostTraces -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.package import Package -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import ( DEFAULT_SUBSCRIPTION_NAME, @@ -51,6 +38,7 @@ REPOSET, SM_OVERALL_STATUS, ) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHostError from robottelo.logging import logger from robottelo.utils.datafactory import ( @@ -72,7 +60,7 @@ def function_host(target_sat): host_template = target_sat.api.Host() host_template.create_missing() # using CLI to create host - host = make_host( + host = target_sat.cli_factory.make_host( { 'architecture-id': host_template.architecture.id, 'domain-id': host_template.domain.id, @@ -87,7 +75,7 @@ def function_host(target_sat): } ) yield host - Host.delete({'id': host['id']}) + target_sat.cli.Host.delete({'id': host['id']}) @pytest.fixture(scope="function") @@ -261,7 +249,7 @@ def parse_cli_entity_list_help_message(help_message): return parsed_dict -def test_positive_search_all_field_sets(): +def test_positive_search_all_field_sets(module_target_sat): """All fields in predefined field sets from hammer host list --help message are shown when specified as --fields in hammer host list command Note: host was created, so we there will always be at least 1 host @@ -283,12 +271,14 @@ def test_positive_search_all_field_sets(): :customerscenario: true """ - new_host = make_fake_host() - host_help_yaml = Host.list(options={'help': ''}, output_format='yaml') + new_host = module_target_sat.cli_factory.make_fake_host() + host_help_yaml = module_target_sat.cli.Host.list(options={'help': ''}, output_format='yaml') host_help = yaml.load(host_help_yaml, yaml.SafeLoader) parsed_dict = parse_cli_entity_list_help_message(host_help[':message']) help_field_sets = parsed_dict['Predefined field sets'] - output_field_sets = Host.list(options={'fields': ','.join(help_field_sets)}) + output_field_sets = module_target_sat.cli.Host.list( + options={'fields': ','.join(help_field_sets)} + ) # get list index of the created host in the output_field_sets [host_idx] = [idx for idx, host in enumerate(output_field_sets) if new_host['id'] == host['id']] @@ -324,7 +314,7 @@ def test_positive_create_and_delete(target_sat, module_lce_library, module_publi 'type=interface,mac={},identifier=eth0,name={},domain_id={},' 'ip={},primary=true,provision=true' ).format(host.mac, gen_string('alpha'), host.domain.id, gen_ipaddr()) - new_host = make_host( + new_host = target_sat.cli_factory.make_host( { 'architecture-id': host.architecture.id, 'content-view-id': module_published_cv.id, @@ -347,14 +337,14 @@ def test_positive_create_and_delete(target_sat, module_lce_library, module_publi assert ( new_host['content-information']['lifecycle-environment']['name'] == module_lce_library.name ) - host_interface = HostInterface.info( + host_interface = target_sat.cli.HostInterface.info( {'host-id': new_host['id'], 'id': new_host['network-interfaces'][0]['id']} ) assert host_interface['domain'] == host.domain.read().name - Host.delete({'id': new_host['id']}) + target_sat.cli.Host.delete({'id': new_host['id']}) with pytest.raises(CLIReturnCodeError): - Host.info({'id': new_host['id']}) + target_sat.cli.Host.info({'id': new_host['id']}) @pytest.mark.e2e @@ -373,14 +363,14 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org domain = target_sat.api.Domain(location=[default_location], organization=[default_org]).create() mac = gen_mac(multicast=False) - host = make_fake_host({'domain-id': domain.id}) - number_of_interfaces = len(HostInterface.list({'host-id': host['id']})) + host = target_sat.cli_factory.make_fake_host({'domain-id': domain.id}) + number_of_interfaces = len(target_sat.cliHostInterface.list({'host-id': host['id']})) - HostInterface.create( + target_sat.cliHostInterface.create( {'host-id': host['id'], 'domain-id': domain.id, 'mac': mac, 'type': 'interface'} ) - host = Host.info({'id': host['id']}) - host_interface = HostInterface.info( + host = target_sat.cli.Host.info({'id': host['id']}) + host_interface = target_sat.cliHostInterface.info( { 'host-id': host['id'], 'id': [ni for ni in host['network-interfaces'] if ni['mac-address'] == mac][0]['id'], @@ -388,13 +378,15 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org ) assert host_interface['domain'] == domain.name assert host_interface['mac-address'] == mac - assert len(HostInterface.list({'host-id': host['id']})) == number_of_interfaces + 1 + assert ( + len(target_sat.cliHostInterface.list({'host-id': host['id']})) == number_of_interfaces + 1 + ) new_domain = target_sat.api.Domain( location=[default_location], organization=[default_org] ).create() new_mac = gen_mac(multicast=False) - HostInterface.update( + target_sat.cliHostInterface.update( { 'host-id': host['id'], 'id': host_interface['id'], @@ -402,7 +394,7 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org 'mac': new_mac, } ) - host_interface = HostInterface.info( + host_interface = target_sat.cliHostInterface.info( { 'host-id': host['id'], 'id': [ni for ni in host['network-interfaces'] if ni['mac-address'] == mac][0]['id'], @@ -411,15 +403,17 @@ def test_positive_crud_interface_by_id(target_sat, default_location, default_org assert host_interface['domain'] == new_domain.name assert host_interface['mac-address'] == new_mac - HostInterface.delete({'host-id': host['id'], 'id': host_interface['id']}) - assert len(HostInterface.list({'host-id': host['id']})) == number_of_interfaces + target_sat.cliHostInterface.delete({'host-id': host['id'], 'id': host_interface['id']}) + assert len(target_sat.cliHostInterface.list({'host-id': host['id']})) == number_of_interfaces with pytest.raises(CLIReturnCodeError): - HostInterface.info({'host-id': host['id'], 'id': host_interface['id']}) + target_sat.cliHostInterface.info({'host-id': host['id'], 'id': host_interface['id']}) @pytest.mark.cli_host_create @pytest.mark.tier2 -def test_negative_create_with_content_source(module_lce_library, module_org, module_published_cv): +def test_negative_create_with_content_source( + module_lce_library, module_org, module_published_cv, module_target_sat +): """Attempt to create a host with invalid content source specified :id: d92d6aff-4ad3-467c-88a8-5a5e56614f58 @@ -431,7 +425,7 @@ def test_negative_create_with_content_source(module_lce_library, module_org, mod :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-source-id': gen_integer(10000, 99999), 'content-view-id': module_published_cv.id, @@ -444,7 +438,7 @@ def test_negative_create_with_content_source(module_lce_library, module_org, mod @pytest.mark.cli_host_create @pytest.mark.tier2 def test_negative_update_content_source( - module_default_proxy, module_lce_library, module_org, module_published_cv + module_default_proxy, module_lce_library, module_org, module_published_cv, module_target_sat ): """Attempt to update host's content source with invalid value @@ -459,7 +453,7 @@ def test_negative_update_content_source( :CaseImportance: Medium """ - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( { 'content-source-id': module_default_proxy['id'], 'content-view-id': module_published_cv.id, @@ -468,14 +462,18 @@ def test_negative_update_content_source( } ) with pytest.raises(CLIReturnCodeError): - Host.update({'id': host['id'], 'content-source-id': gen_integer(10000, 99999)}) - host = Host.info({'id': host['id']}) + module_target_sat.cli.Host.update( + {'id': host['id'], 'content-source-id': gen_integer(10000, 99999)} + ) + host = module_target_sat.cli.Host.info({'id': host['id']}) assert host['content-information']['content-source']['name'] == module_default_proxy['name'] @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted_cv): +def test_positive_create_with_lce_and_cv( + module_lce, module_org, module_promoted_cv, module_target_sat +): """Check if host can be created with new lifecycle and new content view @@ -488,7 +486,7 @@ def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted :CaseImportance: Critical """ - new_host = make_fake_host( + new_host = module_target_sat.cli_factory.make_fake_host( { 'content-view-id': module_promoted_cv.id, 'lifecycle-environment-id': module_lce.id, @@ -501,7 +499,9 @@ def test_positive_create_with_lce_and_cv(module_lce, module_org, module_promoted @pytest.mark.cli_host_create @pytest.mark.tier2 -def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org): +def test_positive_create_with_openscap_proxy_id( + module_default_proxy, module_org, module_target_sat +): """Check if host can be created with OpenSCAP Proxy id :id: 3774ba08-3b18-4e64-b07f-53f6aa0504f3 @@ -510,7 +510,7 @@ def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org :CaseImportance: Medium """ - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( {'organization-id': module_org.id, 'openscap-proxy-id': module_default_proxy['id']} ) assert host['openscap-proxy'] == module_default_proxy['id'] @@ -518,7 +518,9 @@ def test_positive_create_with_openscap_proxy_id(module_default_proxy, module_org @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_negative_create_with_name(module_lce_library, module_org, module_published_cv): +def test_negative_create_with_name( + module_lce_library, module_org, module_published_cv, module_target_sat +): """Check if host can be created with random long names :id: f92b6070-b2d1-4e3e-975c-39f1b1096697 @@ -529,7 +531,7 @@ def test_negative_create_with_name(module_lce_library, module_org, module_publis """ name = gen_choice(invalid_values_list()) with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'name': name, 'organization-id': module_org.id, @@ -541,7 +543,7 @@ def test_negative_create_with_name(module_lce_library, module_org, module_publis @pytest.mark.cli_host_create @pytest.mark.tier1 -def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): +def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv, module_target_sat): """Check if host can be created using unpublished cv :id: 9997383d-3c27-4f14-94f9-4b8b51180eb6 @@ -551,7 +553,7 @@ def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_fake_host( + module_target_sat.cli_factory.make_fake_host( { 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, @@ -563,7 +565,7 @@ def test_negative_create_with_unpublished_cv(module_lce, module_org, module_cv): @pytest.mark.cli_host_create @pytest.mark.tier3 @pytest.mark.upgrade -def test_positive_katello_and_openscap_loaded(): +def test_positive_katello_and_openscap_loaded(target_sat): """Verify that command line arguments from both Katello and foreman_openscap plugins are loaded and available at the same time @@ -582,7 +584,7 @@ def test_positive_katello_and_openscap_loaded(): :BZ: 1671148 """ - help_output = Host.execute('host update --help') + help_output = target_sat.cli.Host.execute('host update --help') for arg in ['lifecycle-environment[-id]', 'openscap-proxy-id']: assert any( f'--{arg}' in line for line in help_output.split('\n') @@ -607,11 +609,11 @@ def test_positive_list_and_unregister( """ rhel7_contenthost.register(module_org, None, module_ak_with_cv.name, target_sat) assert rhel7_contenthost.subscribed - hosts = Host.list({'organization-id': module_org.id}) + hosts = target_sat.cli.Host.list({'organization-id': module_org.id}) assert rhel7_contenthost.hostname in [host['name'] for host in hosts] result = rhel7_contenthost.unregister() assert result.status == 0 - hosts = Host.list({'organization-id': module_org.id}) + hosts = target_sat.cli.Host.list({'organization-id': module_org.id}) assert rhel7_contenthost.hostname in [host['name'] for host in hosts] @@ -640,7 +642,9 @@ def test_positive_list_by_last_checkin( lce=f'{module_lce.label}/{module_promoted_cv.label}', ) assert rhel7_contenthost.subscribed - hosts = Host.list({'search': 'last_checkin = "Today" or last_checkin = "Yesterday"'}) + hosts = target_sat.cli.Host.list( + {'search': 'last_checkin = "Today" or last_checkin = "Yesterday"'} + ) assert len(hosts) >= 1 assert rhel7_contenthost.hostname in [host['name'] for host in hosts] @@ -666,15 +670,15 @@ def test_positive_list_infrastructure_hosts( lce=f'{module_lce.label}/{module_promoted_cv.label}', ) assert rhel7_contenthost.subscribed - Host.update({'name': target_sat.hostname, 'new-organization-id': module_org.id}) + target_sat.cli.Host.update({'name': target_sat.hostname, 'new-organization-id': module_org.id}) # list satellite hosts - hosts = Host.list({'search': 'infrastructure_facet.foreman=true'}) + hosts = target_sat.cli.Host.list({'search': 'infrastructure_facet.foreman=true'}) assert len(hosts) == 2 if is_open('BZ:1994685') else len(hosts) == 1 hostnames = [host['name'] for host in hosts] assert rhel7_contenthost.hostname not in hostnames assert target_sat.hostname in hostnames # list capsule hosts - hosts = Host.list({'search': 'infrastructure_facet.smart_proxy_id=1'}) + hosts = target_sat.cli.Host.list({'search': 'infrastructure_facet.smart_proxy_id=1'}) hostnames = [host['name'] for host in hosts] assert len(hosts) == 2 if is_open('BZ:1994685') else len(hosts) == 1 assert rhel7_contenthost.hostname not in hostnames @@ -703,7 +707,9 @@ def test_positive_create_inherit_lce_cv( lifecycle_environment=module_lce_library, organization=[module_org], ).create() - host = make_fake_host({'hostgroup-id': hostgroup.id, 'organization-id': module_org.id}) + host = target_sat.cli_factory.make_fake_host( + {'hostgroup-id': hostgroup.id, 'organization-id': module_org.id} + ) assert ( int(host['content-information']['lifecycle-environment']['id']) == hostgroup.lifecycle_environment.id @@ -758,7 +764,7 @@ def test_positive_create_inherit_nested_hostgroup(target_sat): ).create() nested_hostgroups.append(nested_hg) - host = make_host( + host = target_sat.cli_factory.make_host( { 'hostgroup-title': f'{parent_hostgroups[0].name}/{nested_hostgroups[0].name}', 'location-id': options.location.id, @@ -812,7 +818,7 @@ def test_positive_list_with_nested_hostgroup(target_sat): organization=[options.organization], parent=parent_hg, ).create() - make_host( + target_sat.cli_factory.make_host( { 'hostgroup-id': nested_hg.id, 'location-id': options.location.id, @@ -820,9 +826,9 @@ def test_positive_list_with_nested_hostgroup(target_sat): 'name': host_name, } ) - hosts = Host.list({'organization-id': options.organization.id}) + hosts = target_sat.cli.Host.list({'organization-id': options.organization.id}) assert f'{parent_hg_name}/{nested_hg_name}' == hosts[0]['host-group'] - host = Host.info({'id': hosts[0]['id']}) + host = target_sat.cli.Host.info({'id': hosts[0]['id']}) logger.info(f'Host info: {host}') assert host['operating-system']['medium'] == options.medium.name assert host['operating-system']['partition-table'] == options.ptable.name # inherited @@ -910,7 +916,7 @@ def test_positive_update_parameters_by_name( organization=[organization], operatingsystem=[new_os], ).create() - Host.update( + target_sat.cli.Host.update( { 'architecture': module_architecture.name, 'domain': new_domain.name, @@ -922,7 +928,7 @@ def test_positive_update_parameters_by_name( 'new-location-id': new_loc.id, } ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert '{}.{}'.format(new_name, host['network']['domain']) == host['name'] assert host['location'] == new_loc.name assert host['network']['mac'] == new_mac @@ -934,7 +940,7 @@ def test_positive_update_parameters_by_name( @pytest.mark.tier1 @pytest.mark.cli_host_update -def test_negative_update_name(function_host): +def test_negative_update_name(function_host, target_sat): """A host can not be updated with invalid or empty name :id: e8068d2a-6a51-4627-908b-60a516c67032 @@ -945,14 +951,14 @@ def test_negative_update_name(function_host): """ new_name = gen_choice(invalid_values_list()) with pytest.raises(CLIReturnCodeError): - Host.update({'id': function_host['id'], 'new-name': new_name}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update({'id': function_host['id'], 'new-name': new_name}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert '{}.{}'.format(new_name, host['network']['domain']).lower() != host['name'] @pytest.mark.tier1 @pytest.mark.cli_host_update -def test_negative_update_mac(function_host): +def test_negative_update_mac(function_host, target_sat): """A host can not be updated with invalid or empty MAC address :id: 2f03032d-789d-419f-9ff2-a6f3561444da @@ -963,14 +969,14 @@ def test_negative_update_mac(function_host): """ new_mac = gen_choice(invalid_values_list()) with pytest.raises(CLIReturnCodeError): - Host.update({'id': function_host['id'], 'mac': new_mac}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update({'id': function_host['id'], 'mac': new_mac}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['network']['mac'] != new_mac @pytest.mark.tier2 @pytest.mark.cli_host_update -def test_negative_update_arch(function_host, module_architecture): +def test_negative_update_arch(function_host, module_architecture, target_sat): """A host can not be updated with a architecture, which does not belong to host's operating system @@ -981,8 +987,10 @@ def test_negative_update_arch(function_host, module_architecture): :CaseLevel: Integration """ with pytest.raises(CLIReturnCodeError): - Host.update({'architecture': module_architecture.name, 'id': function_host['id']}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.update( + {'architecture': module_architecture.name, 'id': function_host['id']} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['operating-system']['architecture'] != module_architecture.name @@ -1007,14 +1015,14 @@ def test_negative_update_os(target_sat, function_host, module_architecture): ptable=[p_table.id], ).create() with pytest.raises(CLIReturnCodeError): - Host.update( + target_sat.cli.Host.update( { 'architecture': module_architecture.name, 'id': function_host['id'], 'operatingsystem': new_os.title, } ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['operating-system']['operating-system'] != new_os.title @@ -1042,25 +1050,27 @@ def test_hammer_host_info_output(target_sat, module_user): user = target_sat.api.User().search( query={'search': f'login={settings.server.admin_username}'} )[0] - Host.update({'owner': settings.server.admin_username, 'owner-type': 'User', 'id': '1'}) - result_info = Host.info(options={'id': '1', 'fields': 'Additional info'}) + target_sat.cli.Host.update( + {'owner': settings.server.admin_username, 'owner-type': 'User', 'id': '1'} + ) + result_info = target_sat.cli.Host.info(options={'id': '1', 'fields': 'Additional info'}) assert int(result_info['additional-info']['owner-id']) == user.id - host = Host.info({'id': '1'}) - User.update( + host = target_sat.cli.Host.info({'id': '1'}) + target_sat.cli.User.update( { 'id': module_user.id, 'organizations': [host['organization']], 'locations': [host['location']], } ) - Host.update({'owner-id': module_user.id, 'id': '1'}) - result_info = Host.info(options={'id': '1', 'fields': 'Additional info'}) + target_sat.cli.Host.update({'owner-id': module_user.id, 'id': '1'}) + result_info = target_sat.cli.Host.info(options={'id': '1', 'fields': 'Additional info'}) assert int(result_info['additional-info']['owner-id']) == module_user.id @pytest.mark.cli_host_parameter @pytest.mark.tier1 -def test_positive_parameter_crud(function_host): +def test_positive_parameter_crud(function_host, target_sat): """Add, update and remove host parameter with valid name. :id: 76034424-cf18-4ced-916b-ee9798c311bc @@ -1072,26 +1082,28 @@ def test_positive_parameter_crud(function_host): """ name = next(iter(valid_data_list())) value = valid_data_list()[name] - Host.set_parameter({'host-id': function_host['id'], 'name': name, 'value': value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': name, 'value': value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert name in host['parameters'].keys() assert value == host['parameters'][name] new_value = valid_data_list()[name] - Host.set_parameter({'host-id': host['id'], 'name': name, 'value': new_value}) - host = Host.info({'id': host['id']}) + target_sat.cli.Host.set_parameter({'host-id': host['id'], 'name': name, 'value': new_value}) + host = target_sat.cli.Host.info({'id': host['id']}) assert name in host['parameters'].keys() assert new_value == host['parameters'][name] - Host.delete_parameter({'host-id': host['id'], 'name': name}) - host = Host.info({'id': host['id']}) + target_sat.cli.Host.delete_parameter({'host-id': host['id'], 'name': name}) + host = target_sat.cli.Host.info({'id': host['id']}) assert name not in host['parameters'].keys() # -------------------------- HOST PARAMETER SCENARIOS ------------------------- @pytest.mark.cli_host_parameter @pytest.mark.tier1 -def test_negative_add_parameter(function_host): +def test_negative_add_parameter(function_host, target_sat): """Try to add host parameter with different invalid names. :id: 473f8c3f-b66e-4526-88af-e139cc3dabcb @@ -1103,14 +1115,14 @@ def test_negative_add_parameter(function_host): """ name = gen_choice(invalid_values_list()).lower() with pytest.raises(CLIReturnCodeError): - Host.set_parameter( + target_sat.cli.Host.set_parameter( { 'host-id': function_host['id'], 'name': name, 'value': gen_string('alphanumeric'), } ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert name not in host['parameters'].keys() @@ -1138,19 +1150,21 @@ def test_negative_view_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, 'Organization': {'permissions': ['view_organizations']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) - host = Host.with_user( + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + host = target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).info({'id': host['id']}) assert not host.get('parameters') @@ -1181,11 +1195,13 @@ def test_positive_view_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, @@ -1193,8 +1209,8 @@ def test_positive_view_parameter_by_non_admin_user(target_sat, function_host, fu 'Parameter': {'permissions': ['view_params']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) - host = Host.with_user( + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + host = target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).info({'id': host['id']}) assert param_name in host['parameters'] @@ -1226,11 +1242,13 @@ def test_negative_edit_parameter_by_non_admin_user(target_sat, function_host, fu """ param_name = gen_string('alpha').lower() param_value = gen_string('alphanumeric') - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - host = Host.info({'id': function_host['id']}) + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value role = target_sat.api.Role(name=gen_string('alphanumeric')).create() - add_role_permissions( + target_sat.cli_factory.add_role_permissions( role.id, resource_permissions={ 'Host': {'permissions': ['view_hosts']}, @@ -1238,21 +1256,21 @@ def test_negative_edit_parameter_by_non_admin_user(target_sat, function_host, fu 'Parameter': {'permissions': ['view_params']}, }, ) - User.add_role({'id': function_user['user'].id, 'role-id': role.id}) + target_sat.cli.User.add_role({'id': function_user['user'].id, 'role-id': role.id}) param_new_value = gen_string('alphanumeric') with pytest.raises(CLIReturnCodeError): - Host.with_user( + target_sat.cli.Host.with_user( username=function_user['user'].login, password=function_user['password'] ).set_parameter( {'host-id': function_host['id'], 'name': param_name, 'value': param_new_value} ) - host = Host.info({'id': function_host['id']}) + host = target_sat.cli.Host.info({'id': function_host['id']}) assert host['parameters'][param_name] == param_value @pytest.mark.cli_host_parameter @pytest.mark.tier2 -def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host): +def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host, target_sat): """Check that host parameter value with multi-line and spaces is correctly restored from yaml format @@ -1276,15 +1294,17 @@ def test_positive_set_multi_line_and_with_spaces_parameter_value(function_host): 'account include password-auth' ) # count parameters of a host - response = Host.info( + response = target_sat.cli.Host.info( {'id': function_host['id']}, output_format='yaml', return_raw_response=True ) assert response.status == 0 yaml_content = yaml.load(response.stdout, yaml.SafeLoader) host_initial_params = yaml_content.get('Parameters') # set parameter - Host.set_parameter({'host-id': function_host['id'], 'name': param_name, 'value': param_value}) - response = Host.info( + target_sat.cli.Host.set_parameter( + {'host-id': function_host['id'], 'name': param_name, 'value': param_value} + ) + response = target_sat.cli.Host.info( {'id': function_host['id']}, output_format='yaml', return_raw_response=True ) assert response.status == 0 @@ -1572,7 +1592,9 @@ def yum_security_plugin(katello_host_tools_host): @pytest.mark.e2e @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_report_package_installed_removed(katello_host_tools_host, setup_custom_repo): +def test_positive_report_package_installed_removed( + katello_host_tools_host, setup_custom_repo, target_sat +): """Ensure installed/removed package is reported to satellite :id: fa5dc238-74c3-4c8a-aa6f-e0a91ba543e3 @@ -1598,18 +1620,18 @@ def test_positive_report_package_installed_removed(katello_host_tools_host, setu :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 1 assert installed_packages[0]['nvra'] == setup_custom_repo["package"] result = client.run(f'yum remove -y {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 0 @@ -1617,7 +1639,7 @@ def test_positive_report_package_installed_removed(katello_host_tools_host, setu @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_package_applicability(katello_host_tools_host, setup_custom_repo): +def test_positive_package_applicability(katello_host_tools_host, setup_custom_repo, target_sat): """Ensure packages applicability is functioning properly :id: d283b65b-19c1-4eba-87ea-f929b0ee4116 @@ -1644,12 +1666,12 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 applicable_packages, _ = wait_for( - lambda: Package.list( + lambda: target_sat.cli.Package.list( { 'host-id': host_info['id'], 'packages-restrict-applicable': 'true', @@ -1667,7 +1689,7 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re client.run(f'yum install -y {setup_custom_repo["new_package"]}') result = client.run(f'rpm -q {setup_custom_repo["new_package"]}') assert result.status == 0 - applicable_packages = Package.list( + applicable_packages = target_sat.cli.Package.list( { 'host-id': host_info['id'], 'packages-restrict-applicable': 'true', @@ -1683,7 +1705,7 @@ def test_positive_package_applicability(katello_host_tools_host, setup_custom_re @pytest.mark.pit_server @pytest.mark.tier3 def test_positive_erratum_applicability( - katello_host_tools_host, setup_custom_repo, yum_security_plugin + katello_host_tools_host, setup_custom_repo, yum_security_plugin, target_sat ): """Ensure erratum applicability is functioning properly @@ -1708,12 +1730,12 @@ def test_positive_erratum_applicability( :CaseLevel: System """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["package"]}') result = client.run(f'rpm -q {setup_custom_repo["package"]}') client.subscription_manager_list_repos() applicable_errata, _ = wait_for( - lambda: Host.errata_list({'host-id': host_info['id']}), + lambda: target_sat.cli.Host.errata_list({'host-id': host_info['id']}), handle_exception=True, fail_condition=[], timeout=120, @@ -1735,7 +1757,7 @@ def test_positive_erratum_applicability( lambda: setup_custom_repo["security_errata"] not in [ errata['erratum-id'] - for errata in Host.errata_list({'host-id': host_info['id']}) + for errata in target_sat.cli.Host.errata_list({'host-id': host_info['id']}) if errata['installable'] == 'true' ], handle_exception=True, @@ -1751,7 +1773,7 @@ def test_positive_erratum_applicability( @pytest.mark.cli_katello_host_tools @pytest.mark.tier3 -def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_repo): +def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_repo, target_sat): """Apply security erratum to a host :id: 4d1095c8-d354-42ac-af44-adf6dbb46deb @@ -1768,12 +1790,12 @@ def test_positive_apply_security_erratum(katello_host_tools_host, setup_custom_r :parametrized: yes """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.run(f'yum install -y {setup_custom_repo["new_package"]}') client.run(f'yum downgrade -y {setup_custom_repo["package_name"]}') # Check that host has applicable errata host_erratum, _ = wait_for( - lambda: Host.errata_list({'host-id': host_info['id']})[0], + lambda: target_sat.cli.Host.errata_list({'host-id': host_info['id']})[0], handle_exception=True, timeout=120, delay=5, @@ -1804,10 +1826,10 @@ def test_positive_install_package_via_rex( :parametrized: yes """ client = katello_host_tools_host - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) client.configure_rex(satellite=target_sat, org=module_org, register=False) # Apply errata to the host collection using job invocation - JobInvocation.create( + target_sat.cli.JobInvocation.create( { 'feature': 'katello_package_install', 'search-query': f'name ~ {client.hostname}', @@ -1817,7 +1839,7 @@ def test_positive_install_package_via_rex( ) result = client.run(f'rpm -q {setup_custom_repo["package"]}') assert result.status == 0 - installed_packages = Host.package_list( + installed_packages = target_sat.cli.Host.package_list( {'host-id': host_info['id'], 'search': f'name={setup_custom_repo["package_name"]}'} ) assert len(installed_packages) == 1 @@ -1849,7 +1871,12 @@ def ak_with_subscription( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_positive_register( - module_org, module_promoted_cv, module_lce, module_ak_with_cv, host_subscription_client + module_org, + module_promoted_cv, + module_lce, + module_ak_with_cv, + host_subscription_client, + target_sat, ): """Attempt to register a host @@ -1861,14 +1888,14 @@ def test_positive_register( :CaseLevel: System """ - hosts = Host.list( + hosts = target_sat.cli.Host.list( { 'organization-id': module_org.id, 'search': host_subscription_client.hostname, } ) assert len(hosts) == 0 - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -1876,18 +1903,18 @@ def test_positive_register( 'name': host_subscription_client.hostname, } ) - hosts = Host.list( + hosts = target_sat.cli.Host.list( { 'organization-id': module_org.id, 'search': host_subscription_client.hostname, } ) assert len(hosts) > 0 - host = Host.info({'id': hosts[0]['id']}) + host = target_sat.cli.Host.info({'id': hosts[0]['id']}) assert host['name'] == host_subscription_client.hostname # note: when not registered the following command lead to exception, # see unregister - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': module_ak_with_cv.id, @@ -1908,6 +1935,7 @@ def test_positive_attach( module_rhst_repo, default_subscription, host_subscription_client, + target_sat, ): """Attempt to attach a subscription to host @@ -1926,7 +1954,7 @@ def test_positive_attach( """ # create an activation key without subscriptions # register the client host - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -1934,13 +1962,13 @@ def test_positive_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, activation_key=module_ak_with_cv.name ) assert host_subscription_client.subscribed # attach the subscription to host - Host.subscription_attach( + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, @@ -1964,6 +1992,7 @@ def test_positive_attach_with_lce( module_rhst_repo, default_subscription, host_subscription_client, + target_sat, ): """Attempt to attach a subscription to host, registered by lce @@ -1986,8 +2015,8 @@ def test_positive_attach_with_lce( auto_attach=False, ) assert host_subscription_client.subscribed - host = Host.info({'name': host_subscription_client.hostname}) - Host.subscription_attach( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, @@ -2005,7 +2034,7 @@ def test_positive_attach_with_lce( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_negative_without_attach( - module_org, module_promoted_cv, module_lce, host_subscription_client + module_org, module_promoted_cv, module_lce, host_subscription_client, target_sat ): """Register content host from satellite, register client to uuid of that content host, as there was no attach on the client, @@ -2019,7 +2048,7 @@ def test_negative_without_attach( :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2027,7 +2056,7 @@ def test_negative_without_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, lce=None, # required, to jump into right branch in register_contenthost method @@ -2063,7 +2092,7 @@ def test_negative_without_attach_with_lce( environment=function_lce, organization=function_org, ).create() - setup_org_for_a_rh_repo( + target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -2110,6 +2139,7 @@ def test_positive_remove( ak_with_subscription, default_subscription, host_subscription_client, + target_sat, ): """Attempt to remove a subscription from content host @@ -2121,7 +2151,7 @@ def test_positive_remove( :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2129,8 +2159,8 @@ def test_positive_remove( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) - host_subscriptions = ActivationKey.subscriptions( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2142,13 +2172,13 @@ def test_positive_remove( host_subscription_client.register_contenthost( module_org.name, activation_key=ak_with_subscription.name ) - Host.subscription_attach( + target_sat.cli.Host.subscription_attach( { 'host-id': host['id'], 'subscription-id': default_subscription.id, } ) - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2157,13 +2187,13 @@ def test_positive_remove( output_format='json', ) assert default_subscription.name in [sub['name'] for sub in host_subscriptions] - Host.subscription_remove( + target_sat.cli.Host.subscription_remove( { 'host-id': host['id'], 'subscription-id': default_subscription.id, } ) - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2183,6 +2213,7 @@ def test_positive_auto_attach( module_rhst_repo, ak_with_subscription, host_subscription_client, + target_sat, ): """Attempt to auto attach a subscription to content host @@ -2195,7 +2226,7 @@ def test_positive_auto_attach( :CaseLevel: System """ - Host.subscription_register( + target_sat.cli.Host.subscription_register( { 'organization-id': module_org.id, 'content-view-id': module_promoted_cv.id, @@ -2203,11 +2234,11 @@ def test_positive_auto_attach( 'name': host_subscription_client.hostname, } ) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) host_subscription_client.register_contenthost( module_org.name, activation_key=ak_with_subscription.name ) - Host.subscription_auto_attach({'host-id': host['id']}) + target_sat.cli.Host.subscription_auto_attach({'host-id': host['id']}) host_subscription_client.enable_repo(module_rhst_repo) # ensure that katello-host-tools can be installed try: @@ -2219,7 +2250,7 @@ def test_positive_auto_attach( @pytest.mark.cli_host_subscription @pytest.mark.tier3 def test_positive_unregister_host_subscription( - module_org, module_rhst_repo, ak_with_subscription, host_subscription_client + module_org, module_rhst_repo, ak_with_subscription, host_subscription_client, target_sat ): """Attempt to unregister host subscription @@ -2240,8 +2271,8 @@ def test_positive_unregister_host_subscription( host_subscription_client.run('subscription-manager attach --auto') host_subscription_client.enable_repo(module_rhst_repo) assert host_subscription_client.subscribed - host = Host.info({'name': host_subscription_client.hostname}) - host_subscriptions = ActivationKey.subscriptions( + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2250,11 +2281,11 @@ def test_positive_unregister_host_subscription( output_format='json', ) assert len(host_subscriptions) > 0 - Host.subscription_unregister({'host': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_unregister({'host': host_subscription_client.hostname}) with pytest.raises(CLIReturnCodeError): # raise error that the host was not registered by # subscription-manager register - ActivationKey.subscriptions( + target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': ak_with_subscription.id, @@ -2300,7 +2331,7 @@ def test_syspurpose_end_to_end( purpose_usage="test-usage", service_level="Self-Support", ).create() - ActivationKey.add_subscription( + target_sat.cli.ActivationKey.add_subscription( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2314,14 +2345,14 @@ def test_syspurpose_end_to_end( assert host_subscription_client.subscribed host_subscription_client.run('subscription-manager attach --auto') host_subscription_client.enable_repo(module_rhst_repo) - host = Host.info({'name': host_subscription_client.hostname}) + host = target_sat.cli.Host.info({'name': host_subscription_client.hostname}) # Assert system purpose values are set in the host as expected assert host['subscription-information']['system-purpose']['purpose-addons'] == purpose_addons assert host['subscription-information']['system-purpose']['purpose-role'] == "test-role" assert host['subscription-information']['system-purpose']['purpose-usage'] == "test-usage" assert host['subscription-information']['system-purpose']['service-level'] == "Self-Support" # Change system purpose values in the host - Host.update( + target_sat.cli.Host.update( { 'purpose-addons': "test-addon3", 'purpose-role': "test-role2", @@ -2330,13 +2361,13 @@ def test_syspurpose_end_to_end( 'id': host['id'], } ) - host = Host.info({'id': host['id']}) + host = target_sat.cli.Host.info({'id': host['id']}) # Assert system purpose values have been updated in the host as expected assert host['subscription-information']['system-purpose']['purpose-addons'] == "test-addon3" assert host['subscription-information']['system-purpose']['purpose-role'] == "test-role2" assert host['subscription-information']['system-purpose']['purpose-usage'] == "test-usage2" assert host['subscription-information']['system-purpose']['service-level'] == "Self-Support2" - host_subscriptions = ActivationKey.subscriptions( + host_subscriptions = target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2347,11 +2378,11 @@ def test_syspurpose_end_to_end( assert len(host_subscriptions) > 0 assert host_subscriptions[0]['name'] == default_subscription.name # Unregister host - Host.subscription_unregister({'host': host_subscription_client.hostname}) + target_sat.cli.Host.subscription_unregister({'host': host_subscription_client.hostname}) with pytest.raises(CLIReturnCodeError): # raise error that the host was not registered by # subscription-manager register - ActivationKey.subscriptions( + target_sat.cli.ActivationKey.subscriptions( { 'organization-id': module_org.id, 'id': activation_key.id, @@ -2374,8 +2405,8 @@ def test_positive_errata_list_of_sat_server(target_sat): :CaseImportance: Critical """ hostname = target_sat.execute('hostname').stdout.strip() - host = Host.info({'name': hostname}) - assert isinstance(Host.errata_list({'host-id': host['id']}), list) + host = target_sat.cli.Host.info({'name': hostname}) + assert isinstance(target_sat.cli.Host.errata_list({'host-id': host['id']}), list) # -------------------------- HOST ENC SUBCOMMAND SCENARIOS ------------------------- @@ -2393,7 +2424,7 @@ def test_positive_dump_enc_yaml(target_sat): :CaseImportance: Critical """ - enc_dump = Host.enc_dump({'name': target_sat.hostname}) + enc_dump = target_sat.cli.Host.enc_dump({'name': target_sat.hostname}) assert f'fqdn: {target_sat.hostname}' in enc_dump assert f'ip: {target_sat.ip_addr}' in enc_dump assert 'ssh-rsa' in enc_dump @@ -2402,7 +2433,7 @@ def test_positive_dump_enc_yaml(target_sat): # -------------------------- HOST TRACE SUBCOMMAND SCENARIOS ------------------------- @pytest.mark.tier3 @pytest.mark.rhel_ver_match('[^6].*') -def test_positive_tracer_list_and_resolve(tracer_host): +def test_positive_tracer_list_and_resolve(tracer_host, target_sat): """Install tracer on client, downgrade the service, check from the satellite that tracer shows and resolves the problem. The test works with a package specified in settings. This package is expected to install a systemd service which is expected @@ -2426,7 +2457,7 @@ def test_positive_tracer_list_and_resolve(tracer_host): """ client = tracer_host package = settings.repos["MOCK_SERVICE_RPM"] - host_info = Host.info({'name': client.hostname}) + host_info = target_sat.cli.Host.info({'name': client.hostname}) # mark the service log messages for later comparison and downgrade the pkg version service_ver_log_old = tracer_host.execute(f'cat /var/log/{package}/service.log') @@ -2434,12 +2465,12 @@ def test_positive_tracer_list_and_resolve(tracer_host): assert package_downgrade.status == 0 # tracer should detect a new trace - traces = HostTraces.list({'host-id': host_info['id']})[0] + traces = target_sat.cli.HostTraces.list({'host-id': host_info['id']})[0] assert package == traces['application'] # resolve traces and make sure that they disappear - HostTraces.resolve({'host-id': host_info['id'], 'trace-ids': traces['trace-id']}) - traces = HostTraces.list({'host-id': host_info['id']}) + target_sat.cli.HostTraces.resolve({'host-id': host_info['id'], 'trace-ids': traces['trace-id']}) + traces = target_sat.cli.HostTraces.list({'host-id': host_info['id']}) assert not traces # verify on the host end, that the service was really restarted @@ -2492,15 +2523,15 @@ def test_positive_host_with_puppet( location=[host_template.location], ).update(['location', 'organization']) - session_puppet_enabled_sat.cli.Host.update( + session_puppet_enabled_sat.cli.target_sat.cli.Host.update( { 'name': host.name, 'puppet-environment': module_puppet_environment.name, } ) - host = session_puppet_enabled_sat.cli.Host.info({'id': host['id']}) + host = session_puppet_enabled_sat.cli.target_sat.cli.Host.info({'id': host['id']}) assert host['puppet-environment'] == module_puppet_environment.name - session_puppet_enabled_sat.cli.Host.delete({'id': host['id']}) + session_puppet_enabled_sat.cli.target_sat.cli.Host.delete({'id': host['id']}) @pytest.fixture(scope="function") @@ -2527,7 +2558,7 @@ def function_host_content_source( } ) yield host - session_puppet_enabled_sat.cli.Host.delete({'id': host['id']}) + session_puppet_enabled_sat.cli.target_sat.cli.Host.delete({'id': host['id']}) @pytest.mark.tier2 @@ -2574,7 +2605,9 @@ class are listed scp_id = choice(sc_params_list)['id'] session_puppet_enabled_sat.cli.SmartClassParameter.update({'id': scp_id, 'override': 1}) # Verify that affected sc-param is listed - host_scparams = session_puppet_enabled_sat.cli.Host.sc_params({'host': host['name']}) + host_scparams = session_puppet_enabled_sat.cli.target_sat.cli.Host.sc_params( + {'host': host['name']} + ) assert scp_id in [scp['id'] for scp in host_scparams] @@ -2609,7 +2642,9 @@ def test_positive_create_with_puppet_class_name( 'puppet-proxy-id': session_puppet_enabled_proxy.id, } ) - host_classes = session_puppet_enabled_sat.cli.Host.puppetclasses({'host': host['name']}) + host_classes = session_puppet_enabled_sat.cli.target_sat.cli.Host.puppetclasses( + {'host': host['name']} + ) assert module_puppet_classes[0].name in [puppet['name'] for puppet in host_classes] @@ -2650,17 +2685,21 @@ def test_positive_update_host_owner_and_verify_puppet_class_name( 'puppet-proxy-id': session_puppet_enabled_proxy.id, } ) - host_classes = session_puppet_enabled_sat.cli.Host.puppetclasses({'host': host['name']}) + host_classes = session_puppet_enabled_sat.cli.target_sat.cli.Host.puppetclasses( + {'host': host['name']} + ) assert module_puppet_classes[0].name in [puppet['name'] for puppet in host_classes] - session_puppet_enabled_sat.cli.Host.update( + session_puppet_enabled_sat.cli.target_sat.cli.Host.update( {'id': host['id'], 'owner': module_puppet_user.login, 'owner-type': 'User'} ) - host = session_puppet_enabled_sat.cli.Host.info({'id': host['id']}) + host = session_puppet_enabled_sat.cli.target_sat.cli.Host.info({'id': host['id']}) assert int(host['additional-info']['owner-id']) == module_puppet_user.id assert host['additional-info']['owner-type'] == 'User' - host_classes = session_puppet_enabled_sat.cli.Host.puppetclasses({'host': host['name']}) + host_classes = session_puppet_enabled_sat.cli.target_sat.cli.Host.puppetclasses( + {'host': host['name']} + ) assert module_puppet_classes[0].name in [puppet['name'] for puppet in host_classes] @@ -2692,8 +2731,8 @@ def test_positive_create_and_update_with_content_source( host['content-information']['content-source']['name'] == session_puppet_enabled_proxy.name ) new_content_source = function_proxy - session_puppet_enabled_sat.cli.Host.update( + session_puppet_enabled_sat.cli.target_sat.cli.Host.update( {'id': host['id'], 'content-source-id': new_content_source.id} ) - host = session_puppet_enabled_sat.cli.Host.info({'id': host['id']}) + host = session_puppet_enabled_sat.cli.target_sat.cli.Host.info({'id': host['id']}) assert host['content-information']['content-source']['name'] == new_content_source.name diff --git a/tests/foreman/cli/test_hostcollection.py b/tests/foreman/cli/test_hostcollection.py index fa41aa9067e..a7710f79fc6 100644 --- a/tests/foreman/cli/test_hostcollection.py +++ b/tests/foreman/cli/test_hostcollection.py @@ -20,19 +20,8 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - CLIFactoryError, - make_fake_host, - make_host_collection, - make_org, -) -from robottelo.cli.host import Host -from robottelo.cli.hostcollection import HostCollection -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment from robottelo.constants import DEFAULT_CV, ENVIRONMENT +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost from robottelo.utils.datafactory import ( invalid_values_list, @@ -41,11 +30,15 @@ ) -def _make_fake_host_helper(module_org): +def _make_fake_host_helper(module_org, module_target_sat): """Make a new fake host""" - library = LifecycleEnvironment.info({'organization-id': module_org.id, 'name': ENVIRONMENT}) - default_cv = ContentView.info({'organization-id': module_org.id, 'name': DEFAULT_CV}) - return make_fake_host( + library = module_target_sat.cli.LifecycleEnvironment.info( + {'organization-id': module_org.id, 'name': ENVIRONMENT} + ) + default_cv = module_target_sat.cli.ContentView.info( + {'organization-id': module_org.id, 'name': DEFAULT_CV} + ) + return module_target_sat.cli_factory.make_fake_host( { 'content-view-id': default_cv['id'], 'lifecycle-environment-id': library['id'], @@ -58,7 +51,7 @@ def _make_fake_host_helper(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.e2e -def test_positive_end_to_end(module_org): +def test_positive_end_to_end(module_org, module_target_sat): """Check if host collection can be created with name and description, content host can be added and removed, host collection can be listed, updated and deleted @@ -73,56 +66,70 @@ def test_positive_end_to_end(module_org): """ name = list(valid_data_list().values())[0] desc = list(valid_data_list().values())[0] - new_host_col = make_host_collection( + new_host_col = module_target_sat.cli_factory.make_host_collection( {'description': desc, 'name': name, 'organization-id': module_org.id} ) assert new_host_col['name'] == name assert new_host_col['description'] == desc # add host - new_system = _make_fake_host_helper(module_org) + new_system = _make_fake_host_helper(module_org, module_target_sat) no_of_content_host = new_host_col['total-hosts'] - HostCollection.add_host({'host-ids': new_system['id'], 'id': new_host_col['id']}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.add_host( + {'host-ids': new_system['id'], 'id': new_host_col['id']} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['total-hosts'] > no_of_content_host # list hosts - result = HostCollection.hosts({'name': name, 'organization-id': module_org.id}) + result = module_target_sat.cli.HostCollection.hosts( + {'name': name, 'organization-id': module_org.id} + ) assert new_system['name'].lower() == result[0]['name'] # List all host collections within organization - result = HostCollection.list({'organization': module_org.name}) + result = module_target_sat.cli.HostCollection.list({'organization': module_org.name}) assert len(result) >= 1 # Filter list by name - result = HostCollection.list({'name': name, 'organization-id': module_org.id}) + result = module_target_sat.cli.HostCollection.list( + {'name': name, 'organization-id': module_org.id} + ) assert len(result) == 1 assert result[0]['id'] == new_host_col['id'] # Filter list by associated host name - result = HostCollection.list({'organization': module_org.name, 'host': new_system['name']}) + result = module_target_sat.cli.HostCollection.list( + {'organization': module_org.name, 'host': new_system['name']} + ) assert len(result) == 1 assert result[0]['name'] == new_host_col['name'] # remove host - no_of_content_host = HostCollection.info({'id': new_host_col['id']})['total-hosts'] - HostCollection.remove_host({'host-ids': new_system['id'], 'id': new_host_col['id']}) - result = HostCollection.info({'id': new_host_col['id']}) + no_of_content_host = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']})[ + 'total-hosts' + ] + module_target_sat.cli.HostCollection.remove_host( + {'host-ids': new_system['id'], 'id': new_host_col['id']} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert no_of_content_host > result['total-hosts'] # update new_name = list(valid_data_list().values())[0] new_desc = list(valid_data_list().values())[0] - HostCollection.update({'description': new_desc, 'id': new_host_col['id'], 'new-name': new_name}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.update( + {'description': new_desc, 'id': new_host_col['id'], 'new-name': new_name} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['name'] == new_name assert result['description'] == new_desc # delete - HostCollection.delete({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.delete({'id': new_host_col['id']}) with pytest.raises(CLIReturnCodeError): - HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) @pytest.mark.tier1 -def test_positive_create_with_limit(module_org): +def test_positive_create_with_limit(module_org, module_target_sat): """Check if host collection can be created with correct limits :id: 682b5624-1095-48e6-a0dd-c76e70ca6540 @@ -132,12 +139,14 @@ def test_positive_create_with_limit(module_org): :CaseImportance: Critical """ for limit in ('1', '3', '5', '10', '20'): - new_host_col = make_host_collection({'max-hosts': limit, 'organization-id': module_org.id}) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'max-hosts': limit, 'organization-id': module_org.id} + ) assert new_host_col['limit'] == limit @pytest.mark.tier1 -def test_positive_update_to_unlimited_hosts(module_org): +def test_positive_update_to_unlimited_hosts(module_org, module_target_sat): """Create Host Collection with a limit and update it to unlimited hosts :id: d688fd4a-88eb-484e-9e90-854e0595edd0 @@ -146,24 +155,24 @@ def test_positive_update_to_unlimited_hosts(module_org): :CaseImportance: High """ - host_collection = make_host_collection( + host_collection = module_target_sat.cli_factory.make_host_collection( { 'max-hosts': 1, 'organization-id': module_org.id, } ) - result = HostCollection.info( + result = module_target_sat.cli.HostCollection.info( {'name': host_collection['name'], 'organization-id': module_org.id} ) assert result['limit'] == '1' - HostCollection.update( + module_target_sat.cli.HostCollection.update( { 'name': host_collection['name'], 'organization-id': module_org.id, 'unlimited-hosts': True, } ) - result = HostCollection.info( + result = module_target_sat.cli.HostCollection.info( {'name': host_collection['name'], 'organization-id': module_org.id} ) assert result['limit'] == 'None' @@ -171,7 +180,7 @@ def test_positive_update_to_unlimited_hosts(module_org): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_name(module_org, name): +def test_negative_create_with_name(module_org, name, module_target_sat): """Attempt to create host collection with invalid name of different types @@ -184,11 +193,13 @@ def test_negative_create_with_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_host_collection({'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.make_host_collection( + {'name': name, 'organization-id': module_org.id} + ) @pytest.mark.tier1 -def test_positive_update_limit(module_org): +def test_positive_update_limit(module_org, module_target_sat): """Check if host collection limits can be updated :id: 4c0e0c3b-82ac-4aa2-8378-6adc7946d4ec @@ -199,15 +210,17 @@ def test_positive_update_limit(module_org): :CaseImportance: Critical """ - new_host_col = make_host_collection({'organization-id': module_org.id}) + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_org.id} + ) for limit in ('3', '6', '9', '12', '15', '17', '19'): - HostCollection.update({'id': new_host_col['id'], 'max-hosts': limit}) - result = HostCollection.info({'id': new_host_col['id']}) + module_target_sat.cli.HostCollection.update({'id': new_host_col['id'], 'max-hosts': limit}) + result = module_target_sat.cli.HostCollection.info({'id': new_host_col['id']}) assert result['limit'] == limit @pytest.mark.tier2 -def test_positive_list_by_org_id(module_org): +def test_positive_list_by_org_id(module_org, module_target_sat): """Check if host collection list can be filtered by organization id :id: afbe077a-0de1-432c-a0c4-082129aab92e @@ -217,19 +230,21 @@ def test_positive_list_by_org_id(module_org): :CaseLevel: Integration """ # Create two host collections within different organizations - make_host_collection({'organization-id': module_org.id}) - new_org = make_org() - new_host_col = make_host_collection({'organization-id': new_org['id']}) + module_target_sat.cli_factory.make_host_collection({'organization-id': module_org.id}) + new_org = module_target_sat.cli_factory.make_org() + new_host_col = module_target_sat.cli_factory.make_host_collection( + {'organization-id': new_org['id']} + ) # List all host collections - assert len(HostCollection.list()) >= 2 + assert len(module_target_sat.cli.HostCollection.list()) >= 2 # Filter list by org id - result = HostCollection.list({'organization-id': new_org['id']}) + result = module_target_sat.cli.HostCollection.list({'organization-id': new_org['id']}) assert len(result) == 1 assert result[0]['id'] == new_host_col['id'] @pytest.mark.tier2 -def test_positive_host_collection_host_pagination(module_org): +def test_positive_host_collection_host_pagination(module_org, module_target_sat): """Check if pagination configured on per-page param defined in hammer host-collection hosts command overrides global configuration defined on /etc/hammer/cli_config.yml, which default is 20 per page @@ -243,11 +258,17 @@ def test_positive_host_collection_host_pagination(module_org): :CaseLevel: Integration """ - host_collection = make_host_collection({'organization-id': module_org.id}) - host_ids = ','.join(_make_fake_host_helper(module_org)['id'] for _ in range(2)) - HostCollection.add_host({'host-ids': host_ids, 'id': host_collection['id']}) + host_collection = module_target_sat.cli_factory.make_host_collection( + {'organization-id': module_org.id} + ) + host_ids = ','.join( + _make_fake_host_helper((module_org)['id'] for _ in range(2)), module_target_sat + ) + module_target_sat.cli.HostCollection.add_host( + {'host-ids': host_ids, 'id': host_collection['id']} + ) for number in range(1, 3): - listed_hosts = HostCollection.hosts( + listed_hosts = module_target_sat.cli.HostCollection.hosts( { 'id': host_collection['id'], 'organization-id': module_org.id, @@ -258,7 +279,7 @@ def test_positive_host_collection_host_pagination(module_org): @pytest.mark.tier2 -def test_positive_copy_by_id(module_org): +def test_positive_copy_by_id(module_org, module_target_sat): """Check if host collection can be cloned by id :id: fd7cea50-bc56-4938-a81d-4f7a60711814 @@ -271,12 +292,14 @@ def test_positive_copy_by_id(module_org): :CaseLevel: Integration """ - host_collection = make_host_collection( + host_collection = module_target_sat.cli_factory.make_host_collection( {'name': gen_string('alpha', 15), 'organization-id': module_org.id} ) new_name = gen_string('numeric') - new_host_collection = HostCollection.copy({'id': host_collection['id'], 'new-name': new_name}) - result = HostCollection.info({'id': new_host_collection[0]['id']}) + new_host_collection = module_target_sat.cli.HostCollection.copy( + {'id': host_collection['id'], 'new-name': new_name} + ) + result = module_target_sat.cli.HostCollection.info({'id': new_host_collection[0]['id']}) assert result['name'] == new_name @@ -295,8 +318,8 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi """ host_info = _make_fake_host_helper(module_org) - hc = make_host_collection({'organization-id': module_org.id}) - ActivationKey.add_host_collection( + hc = target_sat.cli_factory.make_host_collection({'organization-id': module_org.id}) + target_sat.cli.ActivationKey.add_host_collection( { 'id': module_ak_with_cv.id, 'organization-id': module_org.id, @@ -304,7 +327,7 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi } ) # add the registered instance host to collection - HostCollection.add_host( + target_sat.cli.HostCollection.add_host( {'id': hc['id'], 'organization-id': module_org.id, 'host-ids': host_info['id']} ) @@ -314,8 +337,10 @@ def test_positive_register_host_ak_with_host_collection(module_org, module_ak_wi client.register_contenthost(module_org.name, activation_key=module_ak_with_cv.name) assert client.subscribed # note: when registering the host, it should be automatically added to the host-collection - client_host = Host.info({'name': client.hostname}) - hosts = HostCollection.hosts({'id': hc['id'], 'organization-id': module_org.id}) + client_host = target_sat.cli.Host.info({'name': client.hostname}) + hosts = target_sat.cli.HostCollection.hosts( + {'id': hc['id'], 'organization-id': module_org.id} + ) assert len(hosts) == 2 expected_hosts_ids = {host_info['id'], client_host['id']} hosts_ids = {host['id'] for host in hosts} diff --git a/tests/foreman/cli/test_hostgroup.py b/tests/foreman/cli/test_hostgroup.py index d1fa7190b5a..3597c475984 100644 --- a/tests/foreman/cli/test_hostgroup.py +++ b/tests/foreman/cli/test_hostgroup.py @@ -20,25 +20,8 @@ from nailgun import entities import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - CLIFactoryError, - make_architecture, - make_content_view, - make_domain, - make_environment, - make_hostgroup, - make_lifecycle_environment, - make_location, - make_medium, - make_os, - make_partition_table, - make_subnet, -) -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.proxy import Proxy from robottelo.config import settings +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_id_list, invalid_values_list, @@ -92,20 +75,20 @@ def puppet_content_source(session_puppet_enabled_sat): @pytest.fixture(scope='module') def content_source(module_target_sat): """Return the proxy.""" - return Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] + return module_target_sat.cli.Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] @pytest.fixture(scope='module') -def hostgroup(content_source, module_org): +def hostgroup(content_source, module_org, module_target_sat): """Create a host group.""" - return make_hostgroup( + return module_target_sat.cli_factory.make_hostgroup( {'content-source-id': content_source['id'], 'organization-ids': module_org.id} ) @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name): +def test_negative_create_with_name(name, module_target_sat): """Don't create an HostGroup with invalid data. :id: 853a6d43-129a-497b-94f0-08dc622862f8 @@ -115,7 +98,7 @@ def test_negative_create_with_name(name): :expectedresults: HostGroup is not created. """ with pytest.raises(CLIReturnCodeError): - HostGroup.create({'name': name}) + module_target_sat.cli.HostGroup.create({'name': name}) @pytest.mark.e2e @@ -140,27 +123,39 @@ def test_positive_create_with_multiple_entities_and_delete( with session_puppet_enabled_sat: # Common entities name = valid_hostgroups_list()[0] - loc = make_location() + loc = session_puppet_enabled_sat.cli_factory.make_location() org_2 = entities.Organization().create() orgs = [module_puppet_org, org_2] - env = make_environment({'location-ids': loc['id'], 'organization-ids': org_2.id}) - lce = make_lifecycle_environment({'organization-id': org_2.id}) + env = session_puppet_enabled_sat.cli_factory.make_environment( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + lce = session_puppet_enabled_sat.cli_factory.make_lifecycle_environment( + {'organization-id': org_2.id} + ) # Content View should be promoted to be used with LC Env - cv = make_content_view({'organization-id': org_2.id}) - ContentView.publish({'id': cv['id']}) - cv = ContentView.info({'id': cv['id']}) - ContentView.version_promote( + cv = session_puppet_enabled_sat.cli_factory.make_content_view({'organization-id': org_2.id}) + session_puppet_enabled_sat.cli.ContentView.publish({'id': cv['id']}) + cv = session_puppet_enabled_sat.cli.ContentView.info({'id': cv['id']}) + session_puppet_enabled_sat.cli.ContentView.version_promote( {'id': cv['versions'][0]['id'], 'to-lifecycle-environment-id': lce['id']} ) # Network - domain = make_domain({'location-ids': loc['id'], 'organization-ids': org_2.id}) - subnet = make_subnet({'domain-ids': domain['id'], 'organization-ids': org_2.id}) + domain = session_puppet_enabled_sat.cli_factory.make_domain( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + subnet = session_puppet_enabled_sat.cli_factory.make_subnet( + {'domain-ids': domain['id'], 'organization-ids': org_2.id} + ) # Operating System - arch = make_architecture() - ptable = make_partition_table({'location-ids': loc['id'], 'organization-ids': org_2.id}) - os = make_os({'architecture-ids': arch['id'], 'partition-table-ids': ptable['id']}) + arch = session_puppet_enabled_sat.cli_factory.make_architecture() + ptable = session_puppet_enabled_sat.cli_factory.make_partition_table( + {'location-ids': loc['id'], 'organization-ids': org_2.id} + ) + os = session_puppet_enabled_sat.cli_factory.make_os( + {'architecture-ids': arch['id'], 'partition-table-ids': ptable['id']} + ) os_full_name = "{} {}.{}".format(os['name'], os['major-version'], os['minor-version']) - media = make_medium( + media = session_puppet_enabled_sat.cli_factory.make_medium( { 'operatingsystem-ids': os['id'], 'location-ids': loc['id'], @@ -188,7 +183,7 @@ def test_positive_create_with_multiple_entities_and_delete( 'puppet-classes': puppet_classes[0]['name'], 'query-organization': org_2.name, } - hostgroup = make_hostgroup(make_hostgroup_params) + hostgroup = session_puppet_enabled_sat.cli_factory.make_hostgroup(make_hostgroup_params) assert hostgroup['name'] == name assert {org.name for org in orgs} == set(hostgroup['organizations']) assert loc['name'] in hostgroup['locations'] @@ -206,13 +201,13 @@ def test_positive_create_with_multiple_entities_and_delete( assert puppet_content_source['name'] == hostgroup['content-source']['name'] assert puppet_classes[0]['name'] in hostgroup['puppetclasses'] # delete hostgroup - HostGroup.delete({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.delete({'id': hostgroup['id']}) with pytest.raises(CLIReturnCodeError): - HostGroup.info({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.info({'id': hostgroup['id']}) @pytest.mark.tier2 -def test_negative_create_with_content_source(module_org): +def test_negative_create_with_content_source(module_org, module_target_sat): """Attempt to create a hostgroup with invalid content source specified :id: 9fc1b777-36a3-4940-a9c8-aed7ff725371 @@ -224,7 +219,7 @@ def test_negative_create_with_content_source(module_org): :CaseLevel: Integration """ with pytest.raises(CLIFactoryError): - make_hostgroup( + module_target_sat.cli_factory.make_hostgroup( { 'content-source-id': gen_integer(10000, 99999), 'organization-ids': module_org.id, @@ -257,7 +252,7 @@ def test_positive_update_hostgroup_with_puppet( :CaseLevel: Integration """ with session_puppet_enabled_sat as puppet_sat: - hostgroup = make_hostgroup( + hostgroup = puppet_sat.cli_factory.make_hostgroup( { 'content-source-id': puppet_content_source['id'], 'organization-ids': module_puppet_org.id, @@ -270,13 +265,13 @@ def test_positive_update_hostgroup_with_puppet( @request.addfinalizer def _cleanup(): with session_puppet_enabled_sat: - HostGroup.delete({'id': hostgroup['id']}) + session_puppet_enabled_sat.cli.HostGroup.delete({'id': hostgroup['id']}) session_puppet_enabled_sat.cli.Proxy.delete({'id': new_content_source['id']}) assert len(hostgroup['puppetclasses']) == 0 new_name = valid_hostgroups_list()[0] puppet_class_names = [puppet['name'] for puppet in puppet_classes] - HostGroup.update( + session_puppet_enabled_sat.cli.HostGroup.update( { 'new-name': new_name, 'id': hostgroup['id'], @@ -284,7 +279,7 @@ def _cleanup(): 'puppet-classes': puppet_class_names, } ) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + hostgroup = session_puppet_enabled_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == new_name assert hostgroup['content-source']['name'] == new_content_source['name'] for puppet_class_name in puppet_class_names: @@ -311,7 +306,7 @@ def test_positive_update_hostgroup( :CaseLevel: Integration """ - hostgroup = make_hostgroup( + hostgroup = module_target_sat.cli_factory.make_hostgroup( { 'content-source-id': content_source['id'], 'organization-ids': module_org.id, @@ -321,20 +316,20 @@ def test_positive_update_hostgroup( new_content_source = module_target_sat.cli_factory.make_proxy() new_name = valid_hostgroups_list()[0] - HostGroup.update( + module_target_sat.cli.HostGroup.update( { 'new-name': new_name, 'id': hostgroup['id'], 'content-source-id': new_content_source['id'], } ) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + hostgroup = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == new_name assert hostgroup['content-source']['name'] == new_content_source['name'] @pytest.mark.tier2 -def test_negative_update_content_source(hostgroup, content_source): +def test_negative_update_content_source(hostgroup, content_source, module_target_sat): """Attempt to update hostgroup's content source with invalid value :id: 4ffe6d18-3899-4bf1-acb2-d55ea09b7a26 @@ -347,13 +342,15 @@ def test_negative_update_content_source(hostgroup, content_source): :CaseLevel: Integration """ with pytest.raises(CLIReturnCodeError): - HostGroup.update({'id': hostgroup['id'], 'content-source-id': gen_integer(10000, 99999)}) - hostgroup = HostGroup.info({'id': hostgroup['id']}) + module_target_sat.cli.HostGroup.update( + {'id': hostgroup['id'], 'content-source-id': gen_integer(10000, 99999)} + ) + hostgroup = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['content-source']['name'] == content_source['name'] @pytest.mark.tier2 -def test_negative_update_name(hostgroup): +def test_negative_update_name(hostgroup, module_target_sat): """Create HostGroup then fail to update its name :id: 42d208a4-f518-4ff2-9b7a-311adb460abd @@ -362,13 +359,13 @@ def test_negative_update_name(hostgroup): """ new_name = invalid_values_list()[0] with pytest.raises(CLIReturnCodeError): - HostGroup.update({'id': hostgroup['id'], 'new-name': new_name}) - result = HostGroup.info({'id': hostgroup['id']}) + module_target_sat.cli.HostGroup.update({'id': hostgroup['id'], 'new-name': new_name}) + result = module_target_sat.cli.HostGroup.info({'id': hostgroup['id']}) assert hostgroup['name'] == result['name'] @pytest.mark.tier2 -def test_negative_delete_by_id(): +def test_negative_delete_by_id(module_target_sat): """Create HostGroup then delete it by wrong ID :id: 047c9f1a-4dd6-4fdc-b7ed-37cc725c68d3 @@ -379,11 +376,11 @@ def test_negative_delete_by_id(): """ entity_id = invalid_id_list()[0] with pytest.raises(CLIReturnCodeError): - HostGroup.delete({'id': entity_id}) + module_target_sat.cli.HostGroup.delete({'id': entity_id}) @pytest.mark.tier2 -def test_positive_created_nested_hostgroup(module_org): +def test_positive_created_nested_hostgroup(module_org, module_target_sat): """Create a nested host group using multiple parent hostgroup paths. e.g. ` hostgroup create --organization 'org_name' --name new3 --parent-title new_1/new_2` @@ -396,9 +393,11 @@ def test_positive_created_nested_hostgroup(module_org): :CaseImportance: Low """ - parent_hg = make_hostgroup({'organization-ids': module_org.id}) - nested = make_hostgroup({'organization-ids': module_org.id, 'parent': parent_hg['name']}) - sub_nested = make_hostgroup( + parent_hg = module_target_sat.cli_factory.make_hostgroup({'organization-ids': module_org.id}) + nested = module_target_sat.cli_factory.make_hostgroup( + {'organization-ids': module_org.id, 'parent': parent_hg['name']} + ) + sub_nested = module_target_sat.cli_factory.make_hostgroup( {'organization-ids': module_org.id, 'parent-title': f'{parent_hg["name"]}/{nested["name"]}'} ) assert sub_nested['title'] == f"{parent_hg['name']}/{nested['name']}/{sub_nested['name']}" diff --git a/tests/foreman/cli/test_http_proxy.py b/tests/foreman/cli/test_http_proxy.py index 50c71313a89..8fb4d6af0a4 100644 --- a/tests/foreman/cli/test_http_proxy.py +++ b/tests/foreman/cli/test_http_proxy.py @@ -19,13 +19,9 @@ from fauxfactory import gen_integer, gen_string, gen_url import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_product, make_repository -from robottelo.cli.http_proxy import HttpProxy -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.constants import FAKE_0_YUM_REPO_PACKAGES_COUNT +from robottelo.exceptions import CLIReturnCodeError @pytest.mark.tier1 @@ -206,7 +202,7 @@ def test_positive_environment_variable_unset_set(): @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_assign_http_proxy_to_products(module_org): +def test_positive_assign_http_proxy_to_products(module_org, module_target_sat): """Assign http_proxy to Products and perform product sync. :id: 6af7b2b8-15d5-4d9f-9f87-e76b404a966f @@ -217,14 +213,14 @@ def test_positive_assign_http_proxy_to_products(module_org): :CaseImportance: High """ # create HTTP proxies - http_proxy_a = HttpProxy.create( + http_proxy_a = module_target_sat.cli.HttpProxy.create( { 'name': gen_string('alpha', 15), 'url': settings.http_proxy.un_auth_proxy_url, 'organization-id': module_org.id, }, ) - http_proxy_b = HttpProxy.create( + http_proxy_b = module_target_sat.cli.HttpProxy.create( { 'name': gen_string('alpha', 15), 'url': settings.http_proxy.auth_proxy_url, @@ -234,9 +230,9 @@ def test_positive_assign_http_proxy_to_products(module_org): }, ) # Create products and repositories - product_a = make_product({'organization-id': module_org.id}) - product_b = make_product({'organization-id': module_org.id}) - repo_a1 = make_repository( + product_a = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + product_b = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo_a1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_a['id'], @@ -244,7 +240,7 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-policy': 'none', }, ) - repo_a2 = make_repository( + repo_a2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_a['id'], @@ -253,7 +249,7 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-id': http_proxy_a['id'], }, ) - repo_b1 = make_repository( + repo_b1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_b['id'], @@ -261,7 +257,7 @@ def test_positive_assign_http_proxy_to_products(module_org): 'http-proxy-policy': 'none', }, ) - repo_b2 = make_repository( + repo_b2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product_b['id'], @@ -269,7 +265,7 @@ def test_positive_assign_http_proxy_to_products(module_org): }, ) # Add http_proxy to products - Product.update_proxy( + module_target_sat.cli.Product.update_proxy( { 'ids': f"{product_a['id']},{product_b['id']}", 'http-proxy-policy': 'use_selected_http_proxy', @@ -277,18 +273,22 @@ def test_positive_assign_http_proxy_to_products(module_org): } ) for repo in repo_a1, repo_a2, repo_b1, repo_b2: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['http-proxy']['http-proxy-policy'] == 'use_selected_http_proxy' assert result['http-proxy']['id'] == http_proxy_b['id'] # Perform sync and verify packages count - Product.synchronize({'id': product_a['id'], 'organization-id': module_org.id}) - Product.synchronize({'id': product_b['id'], 'organization-id': module_org.id}) + module_target_sat.cli.Product.synchronize( + {'id': product_a['id'], 'organization-id': module_org.id} + ) + module_target_sat.cli.Product.synchronize( + {'id': product_b['id'], 'organization-id': module_org.id} + ) - Product.update_proxy( + module_target_sat.cli.Product.update_proxy( {'ids': f"{product_a['id']},{product_b['id']}", 'http-proxy-policy': 'none'} ) for repo in repo_a1, repo_a2, repo_b1, repo_b2: - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['http-proxy']['http-proxy-policy'] == 'none' assert int(result['content-counts']['packages']) == FAKE_0_YUM_REPO_PACKAGES_COUNT diff --git a/tests/foreman/cli/test_jobtemplate.py b/tests/foreman/cli/test_jobtemplate.py index 916fdea7a33..82e176b1bb9 100644 --- a/tests/foreman/cli/test_jobtemplate.py +++ b/tests/foreman/cli/test_jobtemplate.py @@ -20,9 +20,7 @@ import pytest from robottelo import ssh -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError, make_job_template -from robottelo.cli.job_template import JobTemplate +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import invalid_values_list, parametrized TEMPLATE_FILE = 'template_file.txt' @@ -36,7 +34,7 @@ def module_template(): @pytest.mark.tier1 -def test_positive_create_job_template(module_org): +def test_positive_create_job_template(module_org, module_target_sat): """Create a simple Job Template :id: a5a67b10-61b0-4362-b671-9d9f095c452c @@ -46,19 +44,19 @@ def test_positive_create_job_template(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - assert JobTemplate.info({'name': template_name}) is not None + assert module_target_sat.cli.JobTemplate.info({'name': template_name}) is not None @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_job_template_with_invalid_name(module_org, name): +def test_negative_create_job_template_with_invalid_name(module_org, name, module_target_sat): """Create Job Template with invalid name :id: eb51afd4-e7b3-42c3-81c3-6e18ef3d7efe @@ -71,7 +69,7 @@ def test_negative_create_job_template_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': name, @@ -81,7 +79,7 @@ def test_negative_create_job_template_with_invalid_name(module_org, name): @pytest.mark.tier1 -def test_negative_create_job_template_with_same_name(module_org): +def test_negative_create_job_template_with_same_name(module_org, module_target_sat): """Create Job Template with duplicate name :id: 66100c82-97f5-4300-a0c9-8cf041f7789f @@ -91,7 +89,7 @@ def test_negative_create_job_template_with_same_name(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, @@ -99,7 +97,7 @@ def test_negative_create_job_template_with_same_name(module_org): } ) with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, @@ -109,7 +107,7 @@ def test_negative_create_job_template_with_same_name(module_org): @pytest.mark.tier1 -def test_negative_create_empty_job_template(module_org): +def test_negative_create_empty_job_template(module_org, module_target_sat): """Create Job Template with empty template file :id: 749be863-94ae-4008-a242-c23f353ca404 @@ -120,7 +118,7 @@ def test_negative_create_empty_job_template(module_org): """ template_name = gen_string('alpha', 7) with pytest.raises(CLIFactoryError, match='Could not create the job template:'): - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, @@ -131,7 +129,7 @@ def test_negative_create_empty_job_template(module_org): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_job_template(module_org): +def test_positive_delete_job_template(module_org, module_target_sat): """Delete a job template :id: 33104c04-20e9-47aa-99da-4bf3414ea31a @@ -141,20 +139,20 @@ def test_positive_delete_job_template(module_org): :CaseImportance: Critical """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - JobTemplate.delete({'name': template_name}) + module_target_sat.cli.JobTemplate.delete({'name': template_name}) with pytest.raises(CLIReturnCodeError): - JobTemplate.info({'name': template_name}) + module_target_sat.cli.JobTemplate.info({'name': template_name}) @pytest.mark.tier2 -def test_positive_view_dump(module_org): +def test_positive_view_dump(module_org, module_target_sat): """Export contents of a job template :id: 25fcfcaa-fc4c-425e-919e-330e36195c4a @@ -163,12 +161,12 @@ def test_positive_view_dump(module_org): """ template_name = gen_string('alpha', 7) - make_job_template( + module_target_sat.cli_factory.make_job_template( { 'organizations': module_org.name, 'name': template_name, 'file': TEMPLATE_FILE, } ) - dumped_content = JobTemplate.dump({'name': template_name}) + dumped_content = module_target_sat.cli.JobTemplate.dump({'name': template_name}) assert len(dumped_content) > 0 diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index 3feed9bad43..9cb464961e7 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -20,17 +20,8 @@ from nailgun import entities import pytest -from robottelo.cli.auth import Auth -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - make_ldap_auth_source, - make_usergroup, - make_usergroup_external, -) -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.cli.role import Role -from robottelo.cli.usergroup import UserGroup, UserGroupExternal from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import generate_strings_list, parametrized @@ -57,7 +48,7 @@ class TestADAuthSource: @pytest.mark.upgrade @pytest.mark.parametrize('server_name', **parametrized(generate_strings_list())) @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_create_with_ad(self, ad_data, server_name): + def test_positive_create_with_ad(self, ad_data, server_name, module_target_sat): """Create/update/delete LDAP authentication with AD using names of different types :id: 093f6abc-91e7-4449-b484-71e4a14ac808 @@ -69,7 +60,7 @@ def test_positive_create_with_ad(self, ad_data, server_name): :CaseImportance: Critical """ ad_data = ad_data() - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -89,17 +80,17 @@ def test_positive_create_with_ad(self, ad_data, server_name): assert auth['server']['server'] == ad_data['ldap_hostname'] assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['ad'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) @pytest.mark.tier1 @pytest.mark.parametrize('member_group', ['foobargroup', 'foobar.group']) @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): + def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data, module_target_sat): """Verify the usergroup-sync functionality in AD Auth Source :id: 2e913e76-49c3-11eb-b4c6-d46d6dd3b5b2 @@ -117,7 +108,7 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): """ ad_data = ad_data() LOGEDIN_MSG = "Using configured credentials for user '{0}'." - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': gen_string('alpha'), 'onthefly-register': 'true', @@ -132,24 +123,28 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): 'base-dn': ad_data['base_dn'], } ) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.make_usergroup() + module_target_sat.cli_factory.make_usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) - user_group = UserGroup.info({'id': user_group['id']}) - result = Auth.with_user( + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) + result = module_target_sat.cli.Auth.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).status() assert LOGEDIN_MSG.format(ad_data['ldap_user_name']) in result[0]['message'] - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - user_group = UserGroup.info({'id': user_group['id']}) - list = Role.with_user( + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) + list = module_target_sat.cli.Role.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).list() assert len(list) > 1 @@ -176,7 +171,7 @@ def _clean_up_previous_ldap(self): @pytest.mark.upgrade @pytest.mark.e2e @pytest.mark.usefixtures("ldap_tear_down") - def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): + def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name, module_target_sat): """CRUD LDAP authentication with FreeIPA :id: 6cb54405-b579-4020-bf99-cb811a6aa28b @@ -188,7 +183,7 @@ def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): :CaseImportance: High """ - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -208,16 +203,16 @@ def test_positive_end_to_end_with_ipa(self, default_ipa_host, server_name): assert auth['server']['server'] == default_ipa_host.hostname assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['ipa'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) @pytest.mark.tier3 @pytest.mark.usefixtures("ldap_tear_down") - def test_usergroup_sync_with_refresh(self, default_ipa_host): + def test_usergroup_sync_with_refresh(self, default_ipa_host, module_target_sat): """Verify the refresh functionality in Ldap Auth Source :id: c905eb80-2bd0-11ea-abc3-ddb7dbb3c930 @@ -233,7 +228,7 @@ def test_usergroup_sync_with_refresh(self, default_ipa_host): member_group = 'foreman_group' LOGEDIN_MSG = "Using configured credentials for user '{0}'." auth_source_name = gen_string('alpha') - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': auth_source_name, 'onthefly-register': 'true', @@ -250,55 +245,61 @@ def test_usergroup_sync_with_refresh(self, default_ipa_host): 'groups-base': ipa_group_base_dn, } ) - auth_source = LDAPAuthSource.info({'id': auth_source['server']['id']}) + auth_source = module_target_sat.cli.LDAPAuthSource.info({'id': auth_source['server']['id']}) # Adding User in IPA UserGroup default_ipa_host.add_user_to_usergroup(member_username, member_group) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - ext_user_group = make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.make_usergroup() + ext_user_group = module_target_sat.cli_factory.make_usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) assert ext_user_group['auth-source'] == auth_source['server']['name'] - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 - result = Auth.with_user( + result = module_target_sat.cli.Auth.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).status() assert LOGEDIN_MSG.format(member_username) in result[0]['message'] with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - list = Role.with_user( + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + list = module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert len(list) > 1 - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 1 assert user_group['users'][0] == member_username # Removing User in IPA UserGroup default_ipa_host.remove_user_from_usergroup(member_username, member_group) - UserGroupExternal.refresh({'user-group-id': user_group['id'], 'name': member_group}) - user_group = UserGroup.info({'id': user_group['id']}) + module_target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': user_group['id'], 'name': member_group} + ) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message @pytest.mark.tier3 @pytest.mark.usefixtures("ldap_tear_down") - def test_usergroup_with_usergroup_sync(self, default_ipa_host): + def test_usergroup_with_usergroup_sync(self, default_ipa_host, module_target_sat): """Verify the usergroup-sync functionality in Ldap Auth Source :id: 2b63e886-2c53-11ea-9da5-db3ae0527554 @@ -314,7 +315,7 @@ def test_usergroup_with_usergroup_sync(self, default_ipa_host): member_group = 'foreman_group' LOGEDIN_MSG = "Using configured credentials for user '{0}'." auth_source_name = gen_string('alpha') - auth_source = make_ldap_auth_source( + auth_source = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': auth_source_name, 'onthefly-register': 'true', @@ -331,43 +332,45 @@ def test_usergroup_with_usergroup_sync(self, default_ipa_host): 'groups-base': ipa_group_base_dn, } ) - auth_source = LDAPAuthSource.info({'id': auth_source['server']['id']}) + auth_source = module_target_sat.cli.LDAPAuthSource.info({'id': auth_source['server']['id']}) # Adding User in IPA UserGroup default_ipa_host.add_user_to_usergroup(member_username, member_group) - viewer_role = Role.info({'name': 'Viewer'}) - user_group = make_usergroup() - ext_user_group = make_usergroup_external( + viewer_role = module_target_sat.cli.Role.info({'name': 'Viewer'}) + user_group = module_target_sat.cli_factory.make_usergroup() + ext_user_group = module_target_sat.cli_factory.make_usergroup_external( { 'auth-source-id': auth_source['server']['id'], 'user-group-id': user_group['id'], 'name': member_group, } ) - UserGroup.add_role({'id': user_group['id'], 'role-id': viewer_role['id']}) + module_target_sat.cli.UserGroup.add_role( + {'id': user_group['id'], 'role-id': viewer_role['id']} + ) assert ext_user_group['auth-source'] == auth_source['server']['name'] - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 - result = Auth.with_user( + result = module_target_sat.cli.Auth.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).status() assert LOGEDIN_MSG.format(member_username) in result[0]['message'] - list = Role.with_user( + list = module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert len(list) > 1 - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 1 assert user_group['users'][0] == member_username # Removing User in IPA UserGroup default_ipa_host.remove_user_from_usergroup(member_username, member_group) with pytest.raises(CLIReturnCodeError) as error: - Role.with_user( + module_target_sat.cli.Role.with_user( username=member_username, password=default_ipa_host.ldap_user_passwd ).list() assert 'Missing one of the required permissions' in error.value.message - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['users']) == 0 @@ -379,7 +382,9 @@ class TestOpenLdapAuthSource: @pytest.mark.e2e @pytest.mark.parametrize('server_name', **parametrized(generate_strings_list())) @pytest.mark.upgrade - def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): + def test_positive_end_to_end_with_open_ldap( + self, open_ldap_data, server_name, module_target_sat + ): """CRUD LDAP Operations with OpenLDAP :id: f84db334-0189-11eb-846c-d46d6dd3b5b2 @@ -390,7 +395,7 @@ def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): :CaseImportance: High """ - auth = make_ldap_auth_source( + auth = module_target_sat.cli_factory.make_ldap_auth_source( { 'name': server_name, 'onthefly-register': 'true', @@ -409,9 +414,9 @@ def test_positive_end_to_end_with_open_ldap(self, open_ldap_data, server_name): assert auth['server']['server'] == open_ldap_data['ldap_hostname'] assert auth['server']['server-type'] == LDAP_SERVER_TYPE['CLI']['posix'] new_name = gen_string('alpha') - LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) - updated_auth = LDAPAuthSource.info({'id': auth['server']['id']}) + module_target_sat.cli.LDAPAuthSource.update({'name': server_name, 'new-name': new_name}) + updated_auth = module_target_sat.cli.LDAPAuthSource.info({'id': auth['server']['id']}) assert updated_auth['server']['name'] == new_name - LDAPAuthSource.delete({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.delete({'name': new_name}) with pytest.raises(CLIReturnCodeError): - LDAPAuthSource.info({'name': new_name}) + module_target_sat.cli.LDAPAuthSource.info({'name': new_name}) diff --git a/tests/foreman/cli/test_lifecycleenvironment.py b/tests/foreman/cli/test_lifecycleenvironment.py index b9a77b5220f..019a84d7662 100644 --- a/tests/foreman/cli/test_lifecycleenvironment.py +++ b/tests/foreman/cli/test_lifecycleenvironment.py @@ -21,15 +21,13 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_lifecycle_environment, make_org -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment from robottelo.constants import ENVIRONMENT +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='class') -def module_lce(module_org): - return make_lifecycle_environment( +def module_lce(module_org, class_target_sat): + return class_target_sat.cli_factory.make_lifecycle_environment( { 'name': module_org.name, 'organization-id': module_org.id, @@ -40,7 +38,7 @@ def module_lce(module_org): # Issues validation @pytest.mark.tier2 -def test_positive_list_subcommand(module_org): +def test_positive_list_subcommand(module_org, module_target_sat): """List subcommand returns standard output :id: cca249d0-fb77-422b-aae3-3361887269db @@ -55,12 +53,14 @@ def test_positive_list_subcommand(module_org): # List available lifecycle environments using default Table # output cmd = 'lifecycle-environment list --organization-id="%s"' - result = LifecycleEnvironment.execute(cmd % module_org.id, None, None, False) + result = module_target_sat.cli.LifecycleEnvironment.execute( + cmd % module_org.id, None, None, False + ) assert len(result) > 0 @pytest.mark.tier2 -def test_positive_search_lce_via_UTF8(module_org): +def test_positive_search_lce_via_UTF8(module_org, module_target_sat): """Search lifecycle environment via its name containing UTF-8 chars @@ -74,15 +74,18 @@ def test_positive_search_lce_via_UTF8(module_org): """ test_data = {'name': gen_string('utf8', 15), 'organization-id': module_org.id} # Can we find the new object - result = LifecycleEnvironment.info( - {'name': make_lifecycle_environment(test_data)['name'], 'organization-id': module_org.id} + result = module_target_sat.cli.LifecycleEnvironment.info( + { + 'name': module_target_sat.cli_factory.make_lifecycle_environment(test_data)['name'], + 'organization-id': module_org.id, + } ) assert result['name'] == test_data['name'] # CRUD @pytest.mark.tier1 -def test_positive_lce_crud(module_org): +def test_positive_lce_crud(module_org, module_target_sat): """CRUD test case for lifecycle environment for name, description, label, registry name pattern, and unauthenticated pull @@ -103,7 +106,7 @@ def test_positive_lce_crud(module_org): ).format(gen_string('alpha', 5)) # create - lce = make_lifecycle_environment( + lce = module_target_sat.cli_factory.make_lifecycle_environment( { 'organization': org_name, 'organization-id': module_org.id, @@ -120,7 +123,7 @@ def test_positive_lce_crud(module_org): assert lce['organization'] == org_name # update - LifecycleEnvironment.update( + module_target_sat.cli.LifecycleEnvironment.update( { 'id': lce['id'], 'new-name': new_name, @@ -129,19 +132,23 @@ def test_positive_lce_crud(module_org): 'registry-name-pattern': registry_name_pattern, } ) - lce = LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + lce = module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) assert lce['name'] == new_name assert lce['registry-name-pattern'] == registry_name_pattern assert lce['unauthenticated-pull'] == 'true' # delete - LifecycleEnvironment.delete({'id': lce['id']}) + module_target_sat.cli.LifecycleEnvironment.delete({'id': lce['id']}) with pytest.raises(CLIReturnCodeError): - LifecycleEnvironment.info({'id': lce['id'], 'organization-id': module_org.id}) + module_target_sat.cli.LifecycleEnvironment.info( + {'id': lce['id'], 'organization-id': module_org.id} + ) @pytest.mark.tier1 -def test_positive_create_with_organization_label(module_org): +def test_positive_create_with_organization_label(module_org, module_target_sat): """Create lifecycle environment, specifying organization label :id: eb5cfc71-c83d-45ca-ba34-9ef79197691d @@ -152,14 +159,14 @@ def test_positive_create_with_organization_label(module_org): :CaseImportance: Critical """ - new_lce = make_lifecycle_environment( + new_lce = module_target_sat.cli_factory.make_lifecycle_environment( {'name': gen_string('alpha'), 'organization-label': module_org.label} ) assert new_lce['organization'] == module_org.label @pytest.mark.tier1 -def test_positve_list_paths(module_org): +def test_positve_list_paths(module_org, module_target_sat): """List the environment paths under a given organization :id: 71600d6b-1ef4-4b88-8e9b-eb2481ee1fe2 @@ -169,9 +176,11 @@ def test_positve_list_paths(module_org): :CaseImportance: Critical """ - lc_env = make_lifecycle_environment({'organization-id': module_org.id}) + lc_env = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + ) # Add paths to lifecycle environments - result = LifecycleEnvironment.paths( + result = module_target_sat.cli.LifecycleEnvironment.paths( {'organization-id': module_org.id, 'permission-type': 'readable'} ) assert f"Library >> {lc_env['name']}" in result @@ -181,17 +190,17 @@ class LifeCycleEnvironmentPaginationTestCase: """Test class for LifeCycle Environment pagination tests""" @classmethod - def setUpClass(cls): + def setUpClass(cls, target_sat): """Create organization and lifecycle environments to reuse in tests""" super().setUpClass() cls.lces_count = 25 - cls.org = make_org() + cls.org = target_sat.cli_factory.make_org() env_base_name = gen_string('alpha') last_env_name = ENVIRONMENT cls.env_names = [last_env_name] for env_index in range(cls.lces_count): env_name = f'{env_base_name}-{env_index}' - make_lifecycle_environment( + target_sat.cli_factory.make_lifecycle_environment( {'name': env_name, 'organization-id': cls.org['id'], 'prior': last_env_name} ) last_env_name = env_name @@ -200,7 +209,7 @@ def setUpClass(cls): cls.lces_count += 1 # include default 'Library' lce @pytest.mark.tier2 - def test_positive_list_all_with_per_page(self): + def test_positive_list_all_with_per_page(self, target_sat): """Attempt to list more than 20 lifecycle environment with per-page option. @@ -213,7 +222,7 @@ def test_positive_list_all_with_per_page(self): """ per_page_count = self.lces_count + 5 - lifecycle_environments = LifecycleEnvironment.list( + lifecycle_environments = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'per-page': per_page_count} ) @@ -222,7 +231,7 @@ def test_positive_list_all_with_per_page(self): assert env_name_set == set(self.env_names) @pytest.mark.tier2 - def test_positive_list_with_pagination(self): + def test_positive_list_with_pagination(self, target_sat): """Make sure lces list can be displayed with different items per page value @@ -240,14 +249,14 @@ def test_positive_list_with_pagination(self): # Verify the first page contains exactly the same items count # as `per-page` value with self.subTest(per_page): - lces = LifecycleEnvironment.list( + lces = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'per-page': per_page} ) assert len(lces) == per_page # Verify pagination and total amount of pages by checking the # items count on the last page last_page = ceil(self.lces_count / per_page) - lces = LifecycleEnvironment.list( + lces = target_sat.cli.LifecycleEnvironment.list( {'organization-id': self.org['id'], 'page': last_page, 'per-page': per_page} ) assert len(lces) == self.lces_count % per_page or per_page diff --git a/tests/foreman/cli/test_location.py b/tests/foreman/cli/test_location.py index ee157c4ef76..4a90e396788 100644 --- a/tests/foreman/cli/test_location.py +++ b/tests/foreman/cli/test_location.py @@ -19,29 +19,7 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.domain import Domain -from robottelo.cli.environment import Environment -from robottelo.cli.factory import ( - CLIFactoryError, - make_compute_resource, - make_domain, - make_environment, - make_hostgroup, - make_location, - make_medium, - make_subnet, - make_template, - make_user, -) -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.location import Location -from robottelo.cli.medium import Medium -from robottelo.cli.proxy import Proxy -from robottelo.cli.subnet import Subnet -from robottelo.cli.template import Template -from robottelo.cli.user import User +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError def _proxy(request, target_sat): @@ -50,107 +28,107 @@ def _proxy(request, target_sat): @request.addfinalizer def _cleanup(): - if Proxy.exists(search=('name', proxy['name'])): - Proxy.delete(options={'id': proxy['id']}) + if target_sat.cli.Proxy.exists(search=('name', proxy['name'])): + target_sat.cli.Proxy.delete(options={'id': proxy['id']}) return proxy -def _location(request, options=None): - location = make_location(options=options) +def _location(request, target_sat, options=None): + location = target_sat.cli_factory.make_location(options=options) @request.addfinalizer def _cleanup(): - if Location.exists(search=('id', location['id'])): - Location.delete(options={'id': location['id']}) + if target_sat.cli.Location.exists(search=('id', location['id'])): + target_sat.cli.Location.delete(options={'id': location['id']}) return location -def _subnet(request): - subnet = make_subnet() +def _subnet(request, target_sat): + subnet = target_sat.cli_factory.make_subnet() @request.addfinalizer def _cleanup(): - if Subnet.exists(search=('name', subnet['name'])): - Subnet.delete(options={'id': subnet['id']}) + if target_sat.cli.Subnet.exists(search=('name', subnet['name'])): + target_sat.cli.Subnet.delete(options={'id': subnet['id']}) return subnet -def _environment(request): - environment = make_environment() +def _environment(request, target_sat): + environment = target_sat.cli_factory.make_environment() @request.addfinalizer def _cleanup(): - if Environment.exists(search=('name', environment['name'])): - Environment.delete(options={'id': environment['id']}) + if target_sat.cli.Environment.exists(search=('name', environment['name'])): + target_sat.cli.Environment.delete(options={'id': environment['id']}) return environment -def _domain(request): - domain = make_domain() +def _domain(request, target_sat): + domain = target_sat.cli_factory.make_domain() @request.addfinalizer def _cleanup(): - if Domain.exists(search=('name', domain['name'])): - Domain.delete(options={'id': domain['id']}) + if target_sat.cli.Domain.exists(search=('name', domain['name'])): + target_sat.cli.Domain.delete(options={'id': domain['id']}) return domain -def _medium(request): - medium = make_medium() +def _medium(request, target_sat): + medium = target_sat.cli_factory.make_medium() @request.addfinalizer def _cleanup(): - if Medium.exists(search=('name', medium['name'])): - Medium.delete(options={'id': medium['id']}) + if target_sat.cli.Medium.exists(search=('name', medium['name'])): + target_sat.cli.Medium.delete(options={'id': medium['id']}) return medium -def _host_group(request): - host_group = make_hostgroup() +def _host_group(request, target_sat): + host_group = target_sat.cli_factory.make_hostgroup() @request.addfinalizer def _cleanup(): - if HostGroup.exists(search=('id', host_group['id'])): - HostGroup.delete(options={'id': host_group['id']}) + if target_sat.cli.HostGroup.exists(search=('id', host_group['id'])): + target_sat.cli.HostGroup.delete(options={'id': host_group['id']}) return host_group -def _compute_resource(request): - compute_resource = make_compute_resource() +def _compute_resource(request, target_sat): + compute_resource = target_sat.cli_factory.make_compute_resource() @request.addfinalizer def _cleanup(): - if ComputeResource.exists(search=('id', compute_resource['id'])): - ComputeResource.delete(options={'id': compute_resource['id']}) + if target_sat.cli.ComputeResource.exists(search=('id', compute_resource['id'])): + target_sat.cli.ComputeResource.delete(options={'id': compute_resource['id']}) return compute_resource -def _template(request): - template = make_template() +def _template(request, target_sat): + template = target_sat.cli_factory.make_template() @request.addfinalizer def _cleanup(): - if Template.exists(search=('name', template['name'])): - Template.delete(options={'id': template['id']}) + if target_sat.cli.Template.exists(search=('name', template['name'])): + target_sat.cli.Template.delete(options={'id': template['id']}) return template -def _user(request): - user = make_user() +def _user(request, target_sat): + user = target_sat.cli_factory.make_user() @request.addfinalizer def _cleanup(): - if User.exists(search=('login', user['login'])): - User.delete(options={'id': user['id']}) + if target_sat.cli.User.exists(search=('login', user['login'])): + target_sat.cli.User.delete(options={'id': user['id']}) return user @@ -161,7 +139,7 @@ class TestLocation: @pytest.mark.e2e @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_create_update_delete(self, request): + def test_positive_create_update_delete(self, request, target_sat): """Create new location with attributes, update and delete it :id: e1844d9d-ec4a-44b3-9743-e932cc70020d @@ -176,8 +154,8 @@ def test_positive_create_update_delete(self, request): """ # Create description = gen_string('utf8') - subnet = _subnet(request) - domains = [_domain(request) for _ in range(0, 2)] + subnet = _subnet(request, target_sat) + domains = [_domain(request, target_sat) for _ in range(0, 2)] host_groups = [_host_group(request) for _ in range(0, 3)] medium = _medium(request) compute_resource = _compute_resource(request) @@ -221,25 +199,25 @@ def test_positive_create_update_delete(self, request): assert location['users'][0] == user['login'] # Update - Location.update( + target_sat.cli.Location.update( { 'id': location['id'], 'domain-ids': domains[1]['id'], 'hostgroup-ids': [host_groups[1]['id'], host_groups[2]['id']], } ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert host_groups[1]['name'] in location['hostgroups'] assert host_groups[2]['name'] in location['hostgroups'] assert location['domains'][0] == domains[1]['name'] # Delete - Location.delete({'id': location['id']}) + target_sat.cli.Location.delete({'id': location['id']}) with pytest.raises(CLIReturnCodeError): - Location.info({'id': location['id']}) + target_sat.cli.Location.info({'id': location['id']}) @pytest.mark.tier1 - def test_positive_create_with_parent(self, request): + def test_positive_create_with_parent(self, request, target_sat): """Create new location with parent location specified :id: 49b34733-103a-4fee-818b-6a3386253af1 @@ -252,12 +230,12 @@ def test_positive_create_with_parent(self, request): expected parent location set """ - parent_location = _location(request) + parent_location = _location(request, target_sat) location = _location(request, {'parent-id': parent_location['id']}) assert location['parent'] == parent_location['name'] @pytest.mark.tier1 - def test_negative_create_with_same_name(self, request): + def test_negative_create_with_same_name(self, request, target_sat): """Try to create location using same name twice :id: 4fbaea41-9775-40a2-85a5-4dc05cc95134 @@ -267,13 +245,13 @@ def test_negative_create_with_same_name(self, request): :CaseImportance: Critical """ name = gen_string('utf8') - location = _location(request, options={'name': name}) + location = _location(request, target_sat, options={'name': name}) assert location['name'] == name with pytest.raises(CLIFactoryError): - _location(request, options={'name': name}) + _location(request, target_sat, options={'name': name}) @pytest.mark.tier1 - def test_negative_create_with_user_by_name(self, request): + def test_negative_create_with_user_by_name(self, request, target_sat): """Try to create new location with incorrect user assigned to it Use user login as a parameter @@ -284,7 +262,7 @@ def test_negative_create_with_user_by_name(self, request): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - _location(request, options={'users': gen_string('utf8', 80)}) + _location(request, target_sat, options={'users': gen_string('utf8', 80)}) @pytest.mark.run_in_one_thread @pytest.mark.tier2 @@ -300,19 +278,23 @@ def test_positive_add_and_remove_capsule(self, request, target_sat): :CaseLevel: Integration """ - location = _location(request) + location = _location(request, target_sat) proxy = _proxy(request, target_sat) - Location.add_smart_proxy({'name': location['name'], 'smart-proxy-id': proxy['id']}) - location = Location.info({'name': location['name']}) + target_sat.cli.Location.add_smart_proxy( + {'name': location['name'], 'smart-proxy-id': proxy['id']} + ) + location = target_sat.cli.Location.info({'name': location['name']}) assert proxy['name'] in location['smart-proxies'] - Location.remove_smart_proxy({'name': location['name'], 'smart-proxy': proxy['name']}) - location = Location.info({'name': location['name']}) + target_sat.cli.Location.remove_smart_proxy( + {'name': location['name'], 'smart-proxy': proxy['name']} + ) + location = target_sat.cli.Location.info({'name': location['name']}) assert proxy['name'] not in location['smart-proxies'] @pytest.mark.tier1 - def test_positive_add_update_remove_parameter(self, request): + def test_positive_add_update_remove_parameter(self, request, target_sat): """Add, update and remove parameter to location :id: 61b564f2-a42a-48de-833d-bec3a127d0f5 @@ -325,30 +307,30 @@ def test_positive_add_update_remove_parameter(self, request): param_name = gen_string('alpha') param_value = gen_string('alpha') param_new_value = gen_string('alpha') - location = _location(request) - Location.set_parameter( + location = _location(request, target_sat) + target_sat.cli.Location.set_parameter( {'name': param_name, 'value': param_value, 'location-id': location['id']} ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 1 assert param_value == location['parameters'][param_name.lower()] # Update - Location.set_parameter( + target_sat.cli.Location.set_parameter( {'name': param_name, 'value': param_new_value, 'location': location['name']} ) - location = Location.info({'id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 1 assert param_new_value == location['parameters'][param_name.lower()] # Remove - Location.delete_parameter({'name': param_name, 'location': location['name']}) - location = Location.info({'id': location['id']}) + target_sat.cli.Location.delete_parameter({'name': param_name, 'location': location['name']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert len(location['parameters']) == 0 assert param_name.lower() not in location['parameters'] @pytest.mark.tier2 - def test_positive_update_parent(self, request): + def test_positive_update_parent(self, request, target_sat): """Update location's parent location :id: 34522d1a-1190-48d8-9285-fc9a9bcf6c6a @@ -361,16 +343,16 @@ def test_positive_update_parent(self, request): :CaseImportance: High """ - parent_location = _location(request) + parent_location = _location(request, target_sat) location = _location(request, {'parent-id': parent_location['id']}) - parent_location_2 = _location(request) - Location.update({'id': location['id'], 'parent-id': parent_location_2['id']}) - location = Location.info({'id': location['id']}) + parent_location_2 = _location(request, target_sat) + target_sat.cli.Location.update({'id': location['id'], 'parent-id': parent_location_2['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert location['parent'] == parent_location_2['name'] @pytest.mark.tier1 - def test_negative_update_parent_with_child(self, request): + def test_negative_update_parent_with_child(self, request, target_sat): """Attempt to set child location as a parent and vice versa :id: fd4cb1cf-377f-4b48-b7f4-d4f6ca56f544 @@ -383,17 +365,19 @@ def test_negative_update_parent_with_child(self, request): :CaseImportance: High """ - parent_location = _location(request) + parent_location = _location(request, target_sat) location = _location(request, {'parent-id': parent_location['id']}) # set parent as child with pytest.raises(CLIReturnCodeError): - Location.update({'id': parent_location['id'], 'parent-id': location['id']}) - parent_location = Location.info({'id': parent_location['id']}) + target_sat.cli.Location.update( + {'id': parent_location['id'], 'parent-id': location['id']} + ) + parent_location = target_sat.cli.Location.info({'id': parent_location['id']}) assert parent_location.get('parent') is None # set child as parent with pytest.raises(CLIReturnCodeError): - Location.update({'id': location['id'], 'parent-id': location['id']}) - location = Location.info({'id': location['id']}) + target_sat.cli.Location.update({'id': location['id'], 'parent-id': location['id']}) + location = target_sat.cli.Location.info({'id': location['id']}) assert location['parent'] == parent_location['name'] diff --git a/tests/foreman/cli/test_logging.py b/tests/foreman/cli/test_logging.py index 45692e2e2a2..61ead1bee0e 100644 --- a/tests/foreman/cli/test_logging.py +++ b/tests/foreman/cli/test_logging.py @@ -22,9 +22,6 @@ from nailgun import entities import pytest -from robottelo.cli.factory import make_product, make_repository -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.logging import logger @@ -238,10 +235,10 @@ def test_positive_logging_from_pulp3(module_org, target_sat): name = product_name label = product_name desc = product_name - product = make_product( + product = target_sat.cli_factory.make_product( {'description': desc, 'label': label, 'name': name, 'organization-id': module_org.id}, ) - repo = make_repository( + repo = target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -249,8 +246,8 @@ def test_positive_logging_from_pulp3(module_org, target_sat): }, ) # Synchronize the repository - Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Get the id of repository sync from task task_out = target_sat.execute( "hammer task list | grep -F \'Synchronize repository {\"text\"=>\"repository\'" diff --git a/tests/foreman/cli/test_medium.py b/tests/foreman/cli/test_medium.py index d9f835fdb0b..5ded3225e84 100644 --- a/tests/foreman/cli/test_medium.py +++ b/tests/foreman/cli/test_medium.py @@ -19,9 +19,7 @@ from fauxfactory import gen_alphanumeric import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_location, make_medium, make_org, make_os -from robottelo.cli.medium import Medium +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import parametrized, valid_data_list URL = "http://mirror.fakeos.org/%s/$major.$minor/os/$arch" @@ -33,7 +31,7 @@ class TestMedium: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list().values())) - def test_positive_crud_with_name(self, name): + def test_positive_crud_with_name(self, name, module_target_sat): """Check if Medium can be created, updated, deleted :id: 66b749b2-0248-47a8-b78f-3366f3804b29 @@ -45,18 +43,18 @@ def test_positive_crud_with_name(self, name): :CaseImportance: Critical """ - medium = make_medium({'name': name}) + medium = module_target_sat.cli_factory.make_medium({'name': name}) assert medium['name'] == name new_name = gen_alphanumeric(6) - Medium.update({'name': medium['name'], 'new-name': new_name}) - medium = Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.update({'name': medium['name'], 'new-name': new_name}) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert medium['name'] == new_name - Medium.delete({'id': medium['id']}) + module_target_sat.cli.Medium.delete({'id': medium['id']}) with pytest.raises(CLIReturnCodeError): - Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.info({'id': medium['id']}) @pytest.mark.tier1 - def test_positive_create_with_location(self): + def test_positive_create_with_location(self, module_target_sat): """Check if medium with location can be created :id: cbc6c586-fae7-4bb9-aeb1-e30158f16a98 @@ -66,12 +64,12 @@ def test_positive_create_with_location(self): :CaseImportance: Medium """ - location = make_location() - medium = make_medium({'location-ids': location['id']}) + location = module_target_sat.cli_factory.make_location() + medium = module_target_sat.cli_factory.make_medium({'location-ids': location['id']}) assert location['name'] in medium['locations'] @pytest.mark.tier1 - def test_positive_create_with_organization_by_id(self): + def test_positive_create_with_organization_by_id(self, module_target_sat): """Check if medium with organization can be created :id: 631bb6ed-e42b-482a-83f0-f6ce0f20729a @@ -81,13 +79,13 @@ def test_positive_create_with_organization_by_id(self): :CaseImportance: Medium """ - org = make_org() - medium = make_medium({'organization-ids': org['id']}) + org = module_target_sat.cli_factory.make_org() + medium = module_target_sat.cli_factory.make_medium({'organization-ids': org['id']}) assert org['name'] in medium['organizations'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_remove_os(self): + def test_positive_remove_os(self, module_target_sat): """Check if Medium can be associated with operating system and then removed from media :id: 23b5b55b-3624-440c-8001-75c7c5a5a004 @@ -97,11 +95,15 @@ def test_positive_remove_os(self): :CaseLevel: Integration """ - medium = make_medium() - os = make_os() - Medium.add_operating_system({'id': medium['id'], 'operatingsystem-id': os['id']}) - medium = Medium.info({'id': medium['id']}) + medium = module_target_sat.cli_factory.make_medium() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.Medium.add_operating_system( + {'id': medium['id'], 'operatingsystem-id': os['id']} + ) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert os['title'] in medium['operating-systems'] - Medium.remove_operating_system({'id': medium['id'], 'operatingsystem-id': os['id']}) - medium = Medium.info({'id': medium['id']}) + module_target_sat.cli.Medium.remove_operating_system( + {'id': medium['id'], 'operatingsystem-id': os['id']} + ) + medium = module_target_sat.cli.Medium.info({'id': medium['id']}) assert os['name'] not in medium['operating-systems'] diff --git a/tests/foreman/cli/test_model.py b/tests/foreman/cli/test_model.py index fe8050d13e9..1e14cc975cd 100644 --- a/tests/foreman/cli/test_model.py +++ b/tests/foreman/cli/test_model.py @@ -19,9 +19,7 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_model -from robottelo.cli.model import Model +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_id_list, invalid_values_list, @@ -34,9 +32,9 @@ class TestModel: """Test class for Model CLI""" @pytest.fixture() - def class_model(self): + def class_model(self, target_sat): """Shared model for tests""" - return make_model() + return target_sat.cli_factory.make_model() @pytest.mark.tier1 @pytest.mark.upgrade @@ -44,7 +42,7 @@ def class_model(self): 'name, new_name', **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Successfully creates, updates and deletes a Model. :id: 9ca9d5ff-750a-4d60-91b2-4c4375f0e35f @@ -55,17 +53,17 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: High """ - model = make_model({'name': name}) + model = module_target_sat.cli_factory.make_model({'name': name}) assert model['name'] == name - Model.update({'id': model['id'], 'new-name': new_name}) - model = Model.info({'id': model['id']}) + module_target_sat.cli.Model.update({'id': model['id'], 'new-name': new_name}) + model = module_target_sat.cli.Model.info({'id': model['id']}) assert model['name'] == new_name - Model.delete({'id': model['id']}) + module_target_sat.cli.Model.delete({'id': model['id']}) with pytest.raises(CLIReturnCodeError): - Model.info({'id': model['id']}) + module_target_sat.cli.Model.info({'id': model['id']}) @pytest.mark.tier1 - def test_positive_create_with_vendor_class(self): + def test_positive_create_with_vendor_class(self, module_target_sat): """Check if Model can be created with specific vendor class :id: c36d3490-cd12-4f5f-a453-2ae5d0404496 @@ -75,12 +73,12 @@ def test_positive_create_with_vendor_class(self): :CaseImportance: Medium """ vendor_class = gen_string('utf8') - model = make_model({'vendor-class': vendor_class}) + model = module_target_sat.cli_factory.make_model({'vendor-class': vendor_class}) assert model['vendor-class'] == vendor_class @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, name, module_target_sat): """Don't create an Model with invalid data. :id: b2eade66-b612-47e7-bfcc-6e363023f498 @@ -92,11 +90,11 @@ def test_negative_create_with_name(self, name): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - Model.create({'name': name}) + module_target_sat.cli.Model.create({'name': name}) @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, class_model, new_name): + def test_negative_update_name(self, class_model, new_name, module_target_sat): """Fail to update shared model name :id: 98020a4a-1789-4df3-929c-6c132b57f5a1 @@ -108,13 +106,13 @@ def test_negative_update_name(self, class_model, new_name): :CaseImportance: Medium """ with pytest.raises(CLIReturnCodeError): - Model.update({'id': class_model['id'], 'new-name': new_name}) - result = Model.info({'id': class_model['id']}) + module_target_sat.cli.Model.update({'id': class_model['id'], 'new-name': new_name}) + result = module_target_sat.cli.Model.info({'id': class_model['id']}) assert class_model['name'] == result['name'] @pytest.mark.tier1 @pytest.mark.parametrize('entity_id', **parametrized(invalid_id_list())) - def test_negative_delete_by_id(self, entity_id): + def test_negative_delete_by_id(self, entity_id, module_target_sat): """Delete model by wrong ID :id: f8b0d428-1b3d-4fc9-9ca1-1eb30c8ac20a @@ -126,4 +124,4 @@ def test_negative_delete_by_id(self, entity_id): :CaseImportance: High """ with pytest.raises(CLIReturnCodeError): - Model.delete({'id': entity_id}) + module_target_sat.cli.Model.delete({'id': entity_id}) diff --git a/tests/foreman/cli/test_operatingsystem.py b/tests/foreman/cli/test_operatingsystem.py index 587ceeb67a0..7b5e0bd60a5 100644 --- a/tests/foreman/cli/test_operatingsystem.py +++ b/tests/foreman/cli/test_operatingsystem.py @@ -19,14 +19,8 @@ from fauxfactory import gen_alphanumeric, gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - make_architecture, - make_medium, - make_partition_table, - make_template, -) from robottelo.constants import DEFAULT_ORG +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( filtered_datapoint, invalid_values_list, @@ -114,10 +108,10 @@ def test_positive_end_to_end_os(self, target_sat): new_pass_hash = 'SHA256' new_minor_version = gen_string('numeric', 1) new_major_version = gen_string('numeric', 1) - new_architecture = make_architecture() - new_medium = make_medium() - new_ptable = make_partition_table() - new_template = make_template() + new_architecture = target_sat.cli_factory.make_architecture() + new_medium = target_sat.cli_factory.make_medium() + new_ptable = target_sat.cli_factory.make_partition_table() + new_template = target_sat.cli_factory.make_template() os = target_sat.cli.OperatingSys.update( { 'id': os['id'], diff --git a/tests/foreman/cli/test_organization.py b/tests/foreman/cli/test_organization.py index 2d4a8b8b8b8..37430b81a3b 100644 --- a/tests/foreman/cli/test_organization.py +++ b/tests/foreman/cli/test_organization.py @@ -19,25 +19,9 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - CLIFactoryError, - make_compute_resource, - make_domain, - make_hostgroup, - make_lifecycle_environment, - make_location, - make_medium, - make_org, - make_subnet, - make_template, - make_user, -) -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.org import Org -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( filtered_datapoint, invalid_values_list, @@ -73,7 +57,7 @@ def proxy(target_sat): @pytest.mark.tier2 -def test_positive_no_duplicate_lines(): +def test_positive_no_duplicate_lines(module_target_sat): """hammer organization --help types information doubled @@ -86,7 +70,7 @@ def test_positive_no_duplicate_lines(): :CaseImportance: Low """ # org list --help: - result = Org.list({'help': True}, output_format=None) + result = module_target_sat.cli.Org.list({'help': True}, output_format=None) # get list of lines and check they all are unique lines = [ line @@ -98,7 +82,7 @@ def test_positive_no_duplicate_lines(): @pytest.mark.e2e @pytest.mark.tier1 -def test_positive_CRD(): +def test_positive_CRD(module_target_sat): """Create organization with valid name, label and description :id: 35840da7-668e-4f78-990a-738aa688d586 @@ -113,14 +97,16 @@ def test_positive_CRD(): name = valid_org_names_list()[0] label = valid_labels_list()[0] desc = list(valid_data_list().values())[0] - org = make_org({'name': name, 'label': label, 'description': desc}) + org = module_target_sat.cli_factory.make_org( + {'name': name, 'label': label, 'description': desc} + ) assert org['name'] == name assert org['label'] == label assert org['description'] == desc # List - result = Org.list({'search': f'name={name}'}) + result = module_target_sat.cli.Org.list({'search': f'name={name}'}) assert len(result) == 1 assert result[0]['name'] == name @@ -130,30 +116,30 @@ def test_positive_CRD(): f'description ~ {desc[:-5]}', f'name ^ "{name}"', ]: - result = Org.list({'search': query}) + result = module_target_sat.cli.Org.list({'search': query}) assert len(result) == 1 assert result[0]['name'] == name # Search by name and label - result = Org.exists(search=('name', name)) + result = module_target_sat.cli.Org.exists(search=('name', name)) assert result['name'] == name - result = Org.exists(search=('label', label)) + result = module_target_sat.cli.Org.exists(search=('label', label)) assert result['name'] == name # Info by name and label - result = Org.info({'label': label}) + result = module_target_sat.cli.Org.info({'label': label}) assert result['id'] == org['id'] - result = Org.info({'name': name}) + result = module_target_sat.cli.Org.info({'name': name}) assert org['id'] == result['id'] # Delete - Org.delete({'id': org['id']}) + module_target_sat.cli.Org.delete({'id': org['id']}) with pytest.raises(CLIReturnCodeError): - result = Org.info({'id': org['id']}) + result = module_target_sat.cli.Org.info({'id': org['id']}) @pytest.mark.tier2 -def test_positive_create_with_system_admin_user(): +def test_positive_create_with_system_admin_user(module_target_sat): """Create organization using user with system admin role :id: 1482ab6e-18c7-4a62-81a2-cc969ac373fe @@ -165,16 +151,16 @@ def test_positive_create_with_system_admin_user(): login = gen_string('alpha') password = gen_string('alpha') org_name = gen_string('alpha') - make_user({'login': login, 'password': password}) - User.add_role({'login': login, 'role': 'System admin'}) - make_org({'user': login, 'password': password, 'name': org_name}) - result = Org.info({'name': org_name}) + module_target_sat.cli_factory.make_user({'login': login, 'password': password}) + module_target_sat.cli.User.add_role({'login': login, 'role': 'System admin'}) + module_target_sat.cli_factory.make_org({'user': login, 'password': password, 'name': org_name}) + result = module_target_sat.cli.Org.info({'name': org_name}) assert result['name'] == org_name @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_subnets(module_org): +def test_positive_add_and_remove_subnets(module_org, module_target_sat): """add and remove a subnet from organization :id: adb5310b-76c5-4aca-8220-fdf0fe605cb0 @@ -189,19 +175,21 @@ def test_positive_add_and_remove_subnets(module_org): :CaseLevel: Integration """ - subnets = [make_subnet() for _ in range(0, 2)] - Org.add_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) - Org.add_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) - org_info = Org.info({'id': module_org.id}) + subnets = [module_target_sat.cli_factory.make_subnet() for _ in range(0, 2)] + module_target_sat.cli.Org.add_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) + module_target_sat.cli.Org.add_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['subnets']) == 2, "Failed to add subnets" - Org.remove_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) - Org.remove_subnet({'name': module_org.name, 'subnet-id': subnets[1]['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_subnet({'name': module_org.name, 'subnet': subnets[0]['name']}) + module_target_sat.cli.Org.remove_subnet( + {'name': module_org.name, 'subnet-id': subnets[1]['id']} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['subnets']) == 0, "Failed to remove subnets" @pytest.mark.tier2 -def test_positive_add_and_remove_users(module_org): +def test_positive_add_and_remove_users(module_org, module_target_sat): """Add and remove (admin) user to organization :id: c35b2e88-a65f-4eea-ba55-89cef59f30be @@ -218,39 +206,39 @@ def test_positive_add_and_remove_users(module_org): :CaseLevel: Integration """ - user = make_user() - admin_user = make_user({'admin': '1'}) + user = module_target_sat.cli_factory.make_user() + admin_user = module_target_sat.cli_factory.make_user({'admin': '1'}) assert admin_user['admin'] == 'yes' # add and remove user and admin user by name - Org.add_user({'name': module_org.name, 'user': user['login']}) - Org.add_user({'name': module_org.name, 'user': admin_user['login']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': user['login']}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': admin_user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] in org_info['users'], "Failed to add user by name" assert admin_user['login'] in org_info['users'], "Failed to add admin user by name" - Org.remove_user({'name': module_org.name, 'user': user['login']}) - Org.remove_user({'name': module_org.name, 'user': admin_user['login']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_user({'name': module_org.name, 'user': user['login']}) + module_target_sat.cli.Org.remove_user({'name': module_org.name, 'user': admin_user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] not in org_info['users'], "Failed to remove user by name" assert admin_user['login'] not in org_info['users'], "Failed to remove admin user by name" # add and remove user and admin user by id - Org.add_user({'id': module_org.id, 'user-id': user['id']}) - Org.add_user({'id': module_org.id, 'user-id': admin_user['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.add_user({'id': module_org.id, 'user-id': user['id']}) + module_target_sat.cli.Org.add_user({'id': module_org.id, 'user-id': admin_user['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert user['login'] in org_info['users'], "Failed to add user by id" assert admin_user['login'] in org_info['users'], "Failed to add admin user by id" - Org.remove_user({'id': module_org.id, 'user-id': user['id']}) - Org.remove_user({'id': module_org.id, 'user-id': admin_user['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': user['id']}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': admin_user['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert user['login'] not in org_info['users'], "Failed to remove user by id" assert admin_user['login'] not in org_info['users'], "Failed to remove admin user by id" @pytest.mark.tier2 -def test_positive_add_and_remove_hostgroups(module_org): +def test_positive_add_and_remove_hostgroups(module_org, module_target_sat): """add and remove a hostgroup from an organization :id: 34e2c7c8-dc20-4709-a5a9-83c0dee9d84d @@ -265,16 +253,24 @@ def test_positive_add_and_remove_hostgroups(module_org): :CaseLevel: Integration """ - hostgroups = [make_hostgroup() for _ in range(0, 2)] + hostgroups = [module_target_sat.cli_factory.make_hostgroup() for _ in range(0, 2)] - Org.add_hostgroup({'hostgroup-id': hostgroups[0]['id'], 'id': module_org.id}) - Org.add_hostgroup({'hostgroup': hostgroups[1]['name'], 'name': module_org.name}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_hostgroup( + {'hostgroup-id': hostgroups[0]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.add_hostgroup( + {'hostgroup': hostgroups[1]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert hostgroups[0]['name'] in org_info['hostgroups'], "Failed to add hostgroup by id" assert hostgroups[1]['name'] in org_info['hostgroups'], "Failed to add hostgroup by name" - Org.remove_hostgroup({'hostgroup-id': hostgroups[1]['id'], 'id': module_org.id}) - Org.remove_hostgroup({'hostgroup': hostgroups[0]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_hostgroup( + {'hostgroup-id': hostgroups[1]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.remove_hostgroup( + {'hostgroup': hostgroups[0]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert hostgroups[0]['name'] not in org_info['hostgroups'], "Failed to remove hostgroup by name" assert hostgroups[1]['name'] not in org_info['hostgroups'], "Failed to remove hostgroup by id" @@ -283,7 +279,7 @@ def test_positive_add_and_remove_hostgroups(module_org): @pytest.mark.tier2 @pytest.mark.libvirt_discovery @pytest.mark.upgrade -def test_positive_add_and_remove_compute_resources(module_org): +def test_positive_add_and_remove_compute_resources(module_org, module_target_sat): """Add and remove a compute resource from organization :id: 415c14ab-f879-4ed8-9ba7-8af4ada2e277 @@ -299,7 +295,7 @@ def test_positive_add_and_remove_compute_resources(module_org): :CaseLevel: Integration """ compute_resources = [ - make_compute_resource( + module_target_sat.cli_factory.make_compute_resource( { 'provider': FOREMAN_PROVIDERS['libvirt'], 'url': f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system', @@ -307,21 +303,21 @@ def test_positive_add_and_remove_compute_resources(module_org): ) for _ in range(0, 2) ] - Org.add_compute_resource( + module_target_sat.cli.Org.add_compute_resource( {'compute-resource-id': compute_resources[0]['id'], 'id': module_org.id} ) - Org.add_compute_resource( + module_target_sat.cli.Org.add_compute_resource( {'compute-resource': compute_resources[1]['name'], 'name': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['compute-resources']) == 2, "Failed to add compute resources" - Org.remove_compute_resource( + module_target_sat.cli.Org.remove_compute_resource( {'compute-resource-id': compute_resources[0]['id'], 'id': module_org.id} ) - Org.remove_compute_resource( + module_target_sat.cli.Org.remove_compute_resource( {'compute-resource': compute_resources[1]['name'], 'name': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( compute_resources[0]['name'] not in org_info['compute-resources'] ), "Failed to remove compute resource by id" @@ -331,7 +327,7 @@ def test_positive_add_and_remove_compute_resources(module_org): @pytest.mark.tier2 -def test_positive_add_and_remove_media(module_org): +def test_positive_add_and_remove_media(module_org, module_target_sat): """Add and remove medium to organization :id: c2943a81-c8f7-44c4-926b-388055d7c290 @@ -346,15 +342,15 @@ def test_positive_add_and_remove_media(module_org): :CaseLevel: Integration """ - media = [make_medium() for _ in range(0, 2)] - Org.add_medium({'id': module_org.id, 'medium-id': media[0]['id']}) - Org.add_medium({'name': module_org.name, 'medium': media[1]['name']}) - org_info = Org.info({'id': module_org.id}) + media = [module_target_sat.cli_factory.make_medium() for _ in range(0, 2)] + module_target_sat.cli.Org.add_medium({'id': module_org.id, 'medium-id': media[0]['id']}) + module_target_sat.cli.Org.add_medium({'name': module_org.name, 'medium': media[1]['name']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert media[0]['name'] in org_info['installation-media'], "Failed to add medium by id" assert media[1]['name'] in org_info['installation-media'], "Failed to add medium by name" - Org.remove_medium({'name': module_org.name, 'medium': media[0]['name']}) - Org.remove_medium({'id': module_org.id, 'medium-id': media[1]['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_medium({'name': module_org.name, 'medium': media[0]['name']}) + module_target_sat.cli.Org.remove_medium({'id': module_org.id, 'medium-id': media[1]['id']}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert media[0]['name'] not in org_info['installation-media'], "Failed to remove medium by name" assert media[1]['name'] not in org_info['installation-media'], "Failed to remove medium by id" @@ -362,7 +358,7 @@ def test_positive_add_and_remove_media(module_org): @pytest.mark.tier2 @pytest.mark.skip_if_open("BZ:1845860") @pytest.mark.skip_if_open("BZ:1886876") -def test_positive_add_and_remove_templates(module_org): +def test_positive_add_and_remove_templates(module_org, module_target_sat): """Add and remove provisioning templates to organization :id: bd46a192-488f-4da0-bf47-1f370ae5f55c @@ -380,43 +376,47 @@ def test_positive_add_and_remove_templates(module_org): # create and remove templates by name name = list(valid_data_list().values())[0] - template = make_template({'content': gen_string('alpha'), 'name': name}) + template = module_target_sat.cli_factory.make_template( + {'content': gen_string('alpha'), 'name': name} + ) # Add provisioning-template - Org.add_provisioning_template( + module_target_sat.cli.Org.add_provisioning_template( {'name': module_org.name, 'provisioning-template': template['name']} ) - org_info = Org.info({'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert ( f"{template['name']} ({template['type']})" in org_info['templates'] ), "Failed to add template by name" # Remove provisioning-template - Org.remove_provisioning_template( + module_target_sat.cli.Org.remove_provisioning_template( {'provisioning-template': template['name'], 'name': module_org.name} ) - org_info = Org.info({'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert ( f"{template['name']} ({template['type']})" not in org_info['templates'] ), "Failed to remove template by name" # add and remove templates by id # Add provisioning-template - Org.add_provisioning_template({'provisioning-template-id': template['id'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.add_provisioning_template( + {'provisioning-template-id': template['id'], 'id': module_org.id} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( f"{template['name']} ({template['type']})" in org_info['templates'] ), "Failed to add template by name" # Remove provisioning-template - Org.remove_provisioning_template( + module_target_sat.cli.Org.remove_provisioning_template( {'provisioning-template-id': template['id'], 'id': module_org.id} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert ( f"{template['name']} ({template['type']})" not in org_info['templates'] ), "Failed to remove template by id" @pytest.mark.tier2 -def test_positive_add_and_remove_domains(module_org): +def test_positive_add_and_remove_domains(module_org, module_target_sat): """Add and remove domains to organization :id: 97359ffe-4ce6-4e44-9e3f-583d3fdebbc8 @@ -431,22 +431,22 @@ def test_positive_add_and_remove_domains(module_org): :CaseLevel: Integration """ - domains = [make_domain() for _ in range(0, 2)] - Org.add_domain({'domain-id': domains[0]['id'], 'name': module_org.name}) - Org.add_domain({'domain': domains[1]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) + domains = [module_target_sat.cli_factory.make_domain() for _ in range(0, 2)] + module_target_sat.cli.Org.add_domain({'domain-id': domains[0]['id'], 'name': module_org.name}) + module_target_sat.cli.Org.add_domain({'domain': domains[1]['name'], 'name': module_org.name}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['domains']) == 2, "Failed to add domains" assert domains[0]['name'] in org_info['domains'] assert domains[1]['name'] in org_info['domains'] - Org.remove_domain({'domain': domains[0]['name'], 'name': module_org.name}) - Org.remove_domain({'domain-id': domains[1]['id'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_domain({'domain': domains[0]['name'], 'name': module_org.name}) + module_target_sat.cli.Org.remove_domain({'domain-id': domains[1]['id'], 'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['domains']) == 0, "Failed to remove domains" @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_lce(module_org): +def test_positive_add_and_remove_lce(module_org, module_target_sat): """Remove a lifecycle environment from organization :id: bfa9198e-6078-4f10-b79a-3d7f51b835fd @@ -460,23 +460,25 @@ def test_positive_add_and_remove_lce(module_org): :CaseLevel: Integration """ # Create a lifecycle environment. - lc_env_name = make_lifecycle_environment({'organization-id': module_org.id})['name'] + lc_env_name = module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_org.id} + )['name'] lc_env_attrs = {'name': lc_env_name, 'organization-id': module_org.id} # Read back information about the lifecycle environment. Verify the # sanity of that information. - response = LifecycleEnvironment.list(lc_env_attrs) + response = module_target_sat.cli.LifecycleEnvironment.list(lc_env_attrs) assert response[0]['name'] == lc_env_name # Delete it. - LifecycleEnvironment.delete(lc_env_attrs) + module_target_sat.cli.LifecycleEnvironment.delete(lc_env_attrs) # We should get a zero-length response when searching for the LC env. - response = LifecycleEnvironment.list(lc_env_attrs) + response = module_target_sat.cli.LifecycleEnvironment.list(lc_env_attrs) assert len(response) == 0 @pytest.mark.run_in_one_thread @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_capsules(proxy, module_org): +def test_positive_add_and_remove_capsules(proxy, module_org, module_target_sat): """Add and remove a capsule from organization :id: 71af64ec-5cbb-4dd8-ba90-652e302305ec @@ -489,23 +491,29 @@ def test_positive_add_and_remove_capsules(proxy, module_org): :CaseLevel: Integration """ - Org.add_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] in org_info['smart-proxies'], "Failed to add capsule by id" - Org.remove_smart_proxy({'id': module_org.id, 'smart-proxy-id': proxy['id']}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_smart_proxy( + {'id': module_org.id, 'smart-proxy-id': proxy['id']} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert proxy['name'] not in org_info['smart-proxies'], "Failed to remove capsule by id" - Org.add_smart_proxy({'name': module_org.name, 'smart-proxy': proxy['name']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.add_smart_proxy( + {'name': module_org.name, 'smart-proxy': proxy['name']} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] in org_info['smart-proxies'], "Failed to add capsule by name" - Org.remove_smart_proxy({'name': module_org.name, 'smart-proxy': proxy['name']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_smart_proxy( + {'name': module_org.name, 'smart-proxy': proxy['name']} + ) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert proxy['name'] not in org_info['smart-proxies'], "Failed to add capsule by name" @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_add_and_remove_locations(module_org): +def test_positive_add_and_remove_locations(module_org, module_target_sat): """Add and remove a locations from organization :id: 37b63e5c-8fd5-439c-9540-972b597b590a @@ -520,22 +528,30 @@ def test_positive_add_and_remove_locations(module_org): :CaseLevel: Integration """ - locations = [make_location() for _ in range(0, 2)] - Org.add_location({'location-id': locations[0]['id'], 'name': module_org.name}) - Org.add_location({'location': locations[1]['name'], 'name': module_org.name}) - org_info = Org.info({'id': module_org.id}) + locations = [module_target_sat.cli_factory.make_location() for _ in range(0, 2)] + module_target_sat.cli.Org.add_location( + {'location-id': locations[0]['id'], 'name': module_org.name} + ) + module_target_sat.cli.Org.add_location( + {'location': locations[1]['name'], 'name': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['locations']) == 2, "Failed to add locations" assert locations[0]['name'] in org_info['locations'] assert locations[1]['name'] in org_info['locations'] - Org.remove_location({'location-id': locations[0]['id'], 'id': module_org.id}) - Org.remove_location({'location': locations[1]['name'], 'id': module_org.id}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.remove_location( + {'location-id': locations[0]['id'], 'id': module_org.id} + ) + module_target_sat.cli.Org.remove_location( + {'location': locations[1]['name'], 'id': module_org.id} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert not org_info.get('locations'), "Failed to remove locations" @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_add_and_remove_parameter(module_org): +def test_positive_add_and_remove_parameter(module_org, module_target_sat): """Remove a parameter from organization :id: e4099279-4e73-4c14-9e7c-912b3787b99f @@ -547,34 +563,36 @@ def test_positive_add_and_remove_parameter(module_org): param_name = gen_string('alpha') param_new_value = gen_string('alpha') - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 0 # Create parameter - Org.set_parameter( + module_target_sat.cli.Org.set_parameter( {'name': param_name, 'value': gen_string('alpha'), 'organization-id': module_org.id} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 1 # Update - Org.set_parameter( + module_target_sat.cli.Org.set_parameter( {'name': param_name, 'value': param_new_value, 'organization': module_org.name} ) - org_info = Org.info({'id': module_org.id}) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 1 assert param_new_value == org_info['parameters'][param_name.lower()] # Delete parameter - Org.delete_parameter({'name': param_name, 'organization': module_org.name}) - org_info = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.delete_parameter( + {'name': param_name, 'organization': module_org.name} + ) + org_info = module_target_sat.cli.Org.info({'id': module_org.id}) assert len(org_info['parameters']) == 0 assert param_name.lower() not in org_info['parameters'] @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, module_target_sat): """Try to create an organization with invalid name, but valid label and description @@ -586,7 +604,7 @@ def test_negative_create_with_invalid_name(name): """ with pytest.raises(CLIFactoryError): - make_org( + module_target_sat.cli_factory.make_org( { 'description': gen_string('alpha'), 'label': gen_string('alpha'), @@ -596,7 +614,7 @@ def test_negative_create_with_invalid_name(name): @pytest.mark.tier1 -def test_negative_create_same_name(module_org): +def test_negative_create_same_name(module_org, module_target_sat): """Create a new organization with same name, description, and label. :id: 07924e1f-1eff-4bae-b0db-e41b84966bc1 @@ -606,7 +624,7 @@ def test_negative_create_same_name(module_org): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_org( + module_target_sat.cli_factory.make_org( { 'description': module_org.description, 'label': module_org.label, @@ -616,7 +634,7 @@ def test_negative_create_same_name(module_org): @pytest.mark.tier1 -def test_positive_update(module_org): +def test_positive_update(module_org, module_target_sat): """Update organization name and description :id: 66581003-f5d9-443c-8cd6-00f68087e8e9 @@ -630,19 +648,19 @@ def test_positive_update(module_org): new_desc = list(valid_data_list().values())[0] # upgrade name - Org.update({'id': module_org.id, 'new-name': new_name}) - org = Org.info({'id': module_org.id}) + module_target_sat.cli.Org.update({'id': module_org.id, 'new-name': new_name}) + org = module_target_sat.cli.Org.info({'id': module_org.id}) assert org['name'] == new_name # upgrade description - Org.update({'description': new_desc, 'id': org['id']}) - org = Org.info({'id': org['id']}) + module_target_sat.cli.Org.update({'description': new_desc, 'id': org['id']}) + org = module_target_sat.cli.Org.info({'id': org['id']}) assert org['description'] == new_desc @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) -def test_negative_update_name(new_name, module_org): +def test_negative_update_name(new_name, module_org, module_target_sat): """Fail to update organization name for invalid values. :id: 582d41b8-370d-45ed-9b7b-8096608e1324 @@ -653,11 +671,11 @@ def test_negative_update_name(new_name, module_org): """ with pytest.raises(CLIReturnCodeError): - Org.update({'id': module_org.id, 'new-name': new_name}) + module_target_sat.cli.Org.update({'id': module_org.id, 'new-name': new_name}) @pytest.mark.tier2 -def test_positive_create_user_with_timezone(module_org): +def test_positive_create_user_with_timezone(module_org, module_target_sat): """Create and remove user with valid timezone in an organization :id: b9b92c00-ee99-4da2-84c5-0a576a862100 @@ -686,11 +704,11 @@ def test_positive_create_user_with_timezone(module_org): 'Samoa', ] for timezone in users_timezones: - user = make_user({'timezone': timezone, 'admin': '1'}) - Org.add_user({'name': module_org.name, 'user': user['login']}) - org_info = Org.info({'name': module_org.name}) + user = module_target_sat.cli_factory.make_user({'timezone': timezone, 'admin': '1'}) + module_target_sat.cli.Org.add_user({'name': module_org.name, 'user': user['login']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] in org_info['users'] assert user['timezone'] == timezone - Org.remove_user({'id': module_org.id, 'user-id': user['id']}) - org_info = Org.info({'name': module_org.name}) + module_target_sat.cli.Org.remove_user({'id': module_org.id, 'user-id': user['id']}) + org_info = module_target_sat.cli.Org.info({'name': module_org.name}) assert user['login'] not in org_info['users'] diff --git a/tests/foreman/cli/test_oscap.py b/tests/foreman/cli/test_oscap.py index 5088d25fb60..e81a088d823 100644 --- a/tests/foreman/cli/test_oscap.py +++ b/tests/foreman/cli/test_oscap.py @@ -20,19 +20,9 @@ from nailgun import entities import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - CLIFactoryError, - make_hostgroup, - make_scap_policy, - make_scapcontent, - make_tailoringfile, -) -from robottelo.cli.host import Host -from robottelo.cli.scap_policy import Scappolicy -from robottelo.cli.scapcontent import Scapcontent from robottelo.config import settings from robottelo.constants import OSCAP_DEFAULT_CONTENT, OSCAP_PERIOD, OSCAP_WEEKDAY +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_names_list, parametrized, @@ -44,20 +34,20 @@ class TestOpenScap: """Tests related to the oscap cli hammer plugin""" @classmethod - def fetch_scap_and_profile_id(cls, scap_name): + def fetch_scap_and_profile_id(cls, scap_name, sat): """Extracts the scap ID and scap profile id :param scap_name: Scap title :returns: scap_id and scap_profile_id """ - default_content = Scapcontent.info({'title': scap_name}, output_format='json') + default_content = sat.cli.Scapcontent.info({'title': scap_name}, output_format='json') scap_id = default_content['id'] scap_profile_ids = default_content['scap-content-profiles'][0]['id'] return scap_id, scap_profile_ids @pytest.mark.tier1 - def test_positive_list_default_content_with_admin(self): + def test_positive_list_default_content_with_admin(self, module_target_sat): """List the default scap content with admin account :id: 32c41c22-6aef-424e-8e69-a65c00f1c811 @@ -81,13 +71,13 @@ def test_positive_list_default_content_with_admin(self): :CaseImportance: Medium """ - scap_contents = [content['title'] for content in Scapcontent.list()] + scap_contents = [content['title'] for content in module_target_sat.cli.Scapcontent.list()] for title in OSCAP_DEFAULT_CONTENT.values(): assert title in scap_contents @pytest.mark.tier1 def test_negative_list_default_content_with_viewer_role( - self, scap_content, default_viewer_role + self, scap_content, default_viewer_role, module_target_sat ): """List the default scap content by user with viewer role @@ -110,17 +100,17 @@ def test_negative_list_default_content_with_viewer_role( :CaseImportance: Medium """ - result = Scapcontent.with_user( + result = module_target_sat.cli.Scapcontent.with_user( default_viewer_role.login, default_viewer_role.password ).list() assert len(result) == 0 with pytest.raises(CLIReturnCodeError): - Scapcontent.with_user(default_viewer_role.login, default_viewer_role.password).info( - {'title': scap_content['title']} - ) + module_target_sat.cli.Scapcontent.with_user( + default_viewer_role.login, default_viewer_role.password + ).info({'title': scap_content['title']}) @pytest.mark.tier1 - def test_positive_view_scap_content_info_admin(self): + def test_positive_view_scap_content_info_admin(self, module_target_sat): """View info of scap content with admin account :id: 539ea982-0701-43f5-bb91-e566e6687e35 @@ -142,12 +132,14 @@ def test_positive_view_scap_content_info_admin(self): :CaseImportance: Medium """ title = gen_string('alpha') - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) - result = Scapcontent.info({'title': title}) + module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) + result = module_target_sat.cli.Scapcontent.info({'title': title}) assert result['title'] == title @pytest.mark.tier1 - def test_negative_info_scap_content(self): + def test_negative_info_scap_content(self, module_target_sat): """View info of scap content with invalid ID as parameter :id: 86f44fb1-2e2b-4004-83c1-4a62162ebea9 @@ -170,11 +162,11 @@ def test_negative_info_scap_content(self): """ invalid_scap_id = gen_string('alpha') with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'id': invalid_scap_id}) + module_target_sat.cli.Scapcontent.info({'id': invalid_scap_id}) @pytest.mark.parametrize('title', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_scap_content_with_valid_title(self, title): + def test_positive_create_scap_content_with_valid_title(self, title, module_target_sat): """Create scap-content with valid title :id: 68e9fbe2-e3c3-48e7-a774-f1260a3b7f4f @@ -199,11 +191,13 @@ def test_positive_create_scap_content_with_valid_title(self, title): :CaseImportance: Medium """ - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title @pytest.mark.tier1 - def test_negative_create_scap_content_with_same_title(self): + def test_negative_create_scap_content_with_same_title(self, module_target_sat): """Create scap-content with same title :id: a8cbacc9-456a-4f6f-bd0e-4d1167a8b401 @@ -231,14 +225,18 @@ def test_negative_create_scap_content_with_same_title(self): :CaseImportance: Medium """ title = gen_string('alpha') - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('title', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_with_invalid_title(self, title): + def test_negative_create_scap_content_with_invalid_title(self, title, module_target_sat): """Create scap-content with invalid title :id: 90a2590e-a6ff-41f1-9e0a-67d4b16435c0 @@ -262,11 +260,15 @@ def test_negative_create_scap_content_with_invalid_title(self, title): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_scap_content_with_valid_originalfile_name(self, name): + def test_positive_create_scap_content_with_valid_originalfile_name( + self, name, module_target_sat + ): """Create scap-content with valid original file name :id: 25441174-11cb-4d9b-9ec5-b1c69411b5bc @@ -289,14 +291,16 @@ def test_positive_create_scap_content_with_valid_originalfile_name(self, name): :CaseImportance: Medium """ - scap_content = make_scapcontent( + scap_content = module_target_sat.cli_factory.make_scapcontent( {'original-filename': name, 'scap-file': settings.oscap.content_path} ) assert scap_content['original-filename'] == name @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_with_invalid_originalfile_name(self, name): + def test_negative_create_scap_content_with_invalid_originalfile_name( + self, name, module_target_sat + ): """Create scap-content with invalid original file name :id: 83feb67a-a6bf-4a99-923d-889e8d1013fa @@ -322,11 +326,13 @@ def test_negative_create_scap_content_with_invalid_originalfile_name(self, name) :BZ: 1482395 """ with pytest.raises(CLIFactoryError): - make_scapcontent({'original-filename': name, 'scap-file': settings.oscap.content_path}) + module_target_sat.cli_factory.make_scapcontent( + {'original-filename': name, 'scap-file': settings.oscap.content_path} + ) @pytest.mark.parametrize('title', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_negative_create_scap_content_without_dsfile(self, title): + def test_negative_create_scap_content_without_dsfile(self, title, module_target_sat): """Create scap-content without scap data stream xml file :id: ea811994-12cd-4382-9382-37fa806cc26f @@ -349,10 +355,10 @@ def test_negative_create_scap_content_without_dsfile(self, title): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scapcontent({'title': title}) + module_target_sat.cli_factory.make_scapcontent({'title': title}) @pytest.mark.tier1 - def test_positive_update_scap_content_with_newtitle(self): + def test_positive_update_scap_content_with_newtitle(self, module_target_sat): """Update scap content title :id: 2c32e94a-237d-40b9-8a3b-fca2ef26fe79 @@ -376,14 +382,16 @@ def test_positive_update_scap_content_with_newtitle(self): """ title = gen_string('alpha') new_title = gen_string('alpha') - scap_content = make_scapcontent({'title': title, 'scap-file': settings.oscap.content_path}) + scap_content = module_target_sat.cli_factory.make_scapcontent( + {'title': title, 'scap-file': settings.oscap.content_path} + ) assert scap_content['title'] == title - Scapcontent.update({'title': title, 'new-title': new_title}) - result = Scapcontent.info({'title': new_title}, output_format='json') + module_target_sat.cli.Scapcontent.update({'title': title, 'new-title': new_title}) + result = module_target_sat.cli.Scapcontent.info({'title': new_title}, output_format='json') assert result['title'] == new_title @pytest.mark.tier1 - def test_positive_delete_scap_content_with_id(self): + def test_positive_delete_scap_content_with_id(self, module_target_sat): """Delete a scap content with id as parameter :id: 11ae7652-65e0-4751-b1e0-246b27919238 @@ -403,13 +411,15 @@ def test_positive_delete_scap_content_with_id(self): :CaseImportance: Medium """ - scap_content = make_scapcontent({'scap-file': settings.oscap.content_path}) - Scapcontent.delete({'id': scap_content['id']}) + scap_content = module_target_sat.cli_factory.make_scapcontent( + {'scap-file': settings.oscap.content_path} + ) + module_target_sat.cli.Scapcontent.delete({'id': scap_content['id']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'id': scap_content['id']}) + module_target_sat.cli.Scapcontent.info({'id': scap_content['id']}) @pytest.mark.tier1 - def test_positive_delete_scap_content_with_title(self): + def test_positive_delete_scap_content_with_title(self, module_target_sat): """Delete a scap content with title as parameter :id: aa4ca830-3250-4517-b40c-0256cdda5e0a @@ -431,14 +441,18 @@ def test_positive_delete_scap_content_with_title(self): :CaseImportance: Medium """ - scap_content = make_scapcontent({'scap-file': settings.oscap.content_path}) - Scapcontent.delete({'title': scap_content['title']}) + scap_content = module_target_sat.cli_factory.make_scapcontent( + {'scap-file': settings.oscap.content_path} + ) + module_target_sat.cli.Scapcontent.delete({'title': scap_content['title']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'title': scap_content['title']}) + module_target_sat.cli.Scapcontent.info({'title': scap_content['title']}) @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier2 - def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): + def test_postive_create_scap_policy_with_valid_name( + self, name, scap_content, module_target_sat + ): """Create scap policy with valid name :id: c9327675-62b2-4e22-933a-02818ef68c11 @@ -460,7 +474,7 @@ def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): :CaseImportance: Medium """ - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -472,13 +486,15 @@ def test_postive_create_scap_policy_with_valid_name(self, name, scap_content): ) assert scap_policy['name'] == name # Deleting policy which created for all valid input (ex- latin1, cjk, utf-8, etc.) - Scappolicy.delete({'name': scap_policy['name']}) + module_target_sat.cli.Scappolicy.delete({'name': scap_policy['name']}) with pytest.raises(CLIReturnCodeError): - Scappolicy.info({'name': scap_policy['name']}) + module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']}) @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier2 - def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content): + def test_negative_create_scap_policy_with_invalid_name( + self, name, scap_content, module_target_sat + ): """Create scap policy with invalid name :id: 0d163968-7759-4cfd-9c4d-98533d8db925 @@ -501,7 +517,7 @@ def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content) :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scap_policy( + module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -513,7 +529,7 @@ def test_negative_create_scap_policy_with_invalid_name(self, name, scap_content) ) @pytest.mark.tier2 - def test_negative_create_scap_policy_without_content(self, scap_content): + def test_negative_create_scap_policy_without_content(self, scap_content, module_target_sat): """Create scap policy without scap content :id: 88a8fba3-f45a-4e22-9ee1-f0d701f1135f @@ -534,7 +550,7 @@ def test_negative_create_scap_policy_without_content(self, scap_content): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_scap_policy( + module_target_sat.cli_factory.make_scap_policy( { 'deploy-by': 'ansible', 'scap-content-profile-id': scap_content["scap_profile_id"], @@ -544,7 +560,7 @@ def test_negative_create_scap_policy_without_content(self, scap_content): ) @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): + def test_positive_associate_scap_policy_with_hostgroups(self, scap_content, module_target_sat): """Associate hostgroups to scap policy :id: 916403a0-572d-4cf3-9155-3e3d0373577f @@ -566,9 +582,9 @@ def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): :CaseImportance: Medium """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.make_hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -582,7 +598,9 @@ def test_positive_associate_scap_policy_with_hostgroups(self, scap_content): assert scap_policy['hostgroups'][0] == hostgroup['name'] @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_content): + def test_positive_associate_scap_policy_with_hostgroup_via_ansible( + self, scap_content, module_target_sat + ): """Associate hostgroup to scap policy via ansible :id: 2df303c6-bff5-4977-a865-a3afabfb8726 @@ -604,9 +622,9 @@ def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_co :expectedresults: The policy is created via ansible deploy option and associated successfully. """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.make_hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -624,7 +642,7 @@ def test_positive_associate_scap_policy_with_hostgroup_via_ansible(self, scap_co @pytest.mark.upgrade @pytest.mark.tier2 def test_positive_associate_scap_policy_with_tailoringfiles( - self, deploy, scap_content, tailoring_file_path + self, deploy, scap_content, tailoring_file_path, module_target_sat ): """Associate tailoring file by name/id to scap policy with all deployments @@ -641,12 +659,16 @@ def test_positive_associate_scap_policy_with_tailoringfiles( :expectedresults: The policy is created and associated successfully. """ - tailoring_file_a = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) + tailoring_file_a = module_target_sat.cli_factory.make_tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) tailoring_file_profile_a_id = tailoring_file_a['tailoring-file-profiles'][0]['id'] - tailoring_file_b = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) + tailoring_file_b = module_target_sat.cli_factory.make_tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) tailoring_file_profile_b_id = tailoring_file_b['tailoring-file-profiles'][0]['id'] - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_content["scap_id"], 'deploy-by': deploy, @@ -661,22 +683,22 @@ def test_positive_associate_scap_policy_with_tailoringfiles( assert scap_policy['tailoring-file-id'] == tailoring_file_a['id'] assert scap_policy['tailoring-file-profile-id'] == tailoring_file_profile_a_id - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'name': scap_policy['name'], 'tailoring-file': tailoring_file_b['name'], 'tailoring-file-profile-id': tailoring_file_profile_b_id, } ) - scap_info = Scappolicy.info({'name': scap_policy['name']}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']}) assert scap_info['tailoring-file-id'] == tailoring_file_b['id'] assert scap_info['tailoring-file-profile-id'] == tailoring_file_profile_b_id - Scappolicy.delete({'name': scap_policy['name']}) + module_target_sat.cli.Scappolicy.delete({'name': scap_policy['name']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'name': scap_policy['name']}) + module_target_sat.cli.Scapcontent.info({'name': scap_policy['name']}) - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_content["scap_id"], 'deploy-by': deploy, @@ -691,26 +713,26 @@ def test_positive_associate_scap_policy_with_tailoringfiles( assert scap_policy['tailoring-file-id'] == tailoring_file_a['id'] assert scap_policy['tailoring-file-profile-id'] == tailoring_file_profile_a_id - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'tailoring-file-id': tailoring_file_b['id'], 'tailoring-file-profile-id': tailoring_file_profile_b_id, } ) - scap_info = Scappolicy.info({'id': scap_policy['id']}) + scap_info = module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']}) assert scap_info['tailoring-file-id'] == tailoring_file_b['id'] assert scap_info['tailoring-file-profile-id'] == tailoring_file_profile_b_id - Scappolicy.delete({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.delete({'id': scap_policy['id']}) with pytest.raises(CLIReturnCodeError): - Scapcontent.info({'name': scap_policy['name']}) + module_target_sat.cli.Scapcontent.info({'name': scap_policy['name']}) @pytest.mark.parametrize('deploy', **parametrized(['manual', 'ansible'])) @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.e2e - def test_positive_scap_policy_end_to_end(self, deploy, scap_content): + def test_positive_scap_policy_end_to_end(self, deploy, scap_content, module_target_sat): """List all scap policies and read info using id, name :id: d14ab43e-c7a9-4eee-b61c-420b07ca1da9 @@ -735,9 +757,9 @@ def test_positive_scap_policy_end_to_end(self, deploy, scap_content): :CaseImportance: Critical """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.make_hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': deploy, @@ -748,28 +770,31 @@ def test_positive_scap_policy_end_to_end(self, deploy, scap_content): 'hostgroups': hostgroup['name'], } ) - result = Scappolicy.list() + result = module_target_sat.cli.Scappolicy.list() assert name in [policy['name'] for policy in result] - assert Scappolicy.info({'id': scap_policy['id']})['id'] == scap_policy['id'] - assert Scappolicy.info({'name': scap_policy['name']})['name'] == name + assert ( + module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']})['id'] + == scap_policy['id'] + ) + assert module_target_sat.cli.Scappolicy.info({'name': scap_policy['name']})['name'] == name - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'period': OSCAP_PERIOD['monthly'].lower(), 'day-of-month': 15, } ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['period'] == OSCAP_PERIOD['monthly'].lower() assert scap_info['day-of-month'] == '15' - Scappolicy.delete({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.delete({'id': scap_policy['id']}) with pytest.raises(CLIReturnCodeError): - Scappolicy.info({'id': scap_policy['id']}) + module_target_sat.cli.Scappolicy.info({'id': scap_policy['id']}) @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_update_scap_policy_with_hostgroup(self, scap_content): + def test_positive_update_scap_policy_with_hostgroup(self, scap_content, module_target_sat): """Update scap policy by addition of hostgroup :id: 21b9b82b-7c6c-4944-bc2f-67631e1d4086 @@ -790,9 +815,9 @@ def test_positive_update_scap_policy_with_hostgroup(self, scap_content): :CaseImportance: Medium """ - hostgroup = make_hostgroup() + hostgroup = module_target_sat.cli_factory.make_hostgroup() name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -805,17 +830,17 @@ def test_positive_update_scap_policy_with_hostgroup(self, scap_content): ) assert scap_policy['hostgroups'][0] == hostgroup['name'] assert scap_policy['deployment-option'] == 'ansible' - new_hostgroup = make_hostgroup() - Scappolicy.update( + new_hostgroup = module_target_sat.cli_factory.make_hostgroup() + module_target_sat.cli.Scappolicy.update( {'id': scap_policy['id'], 'deploy-by': 'ansible', 'hostgroups': new_hostgroup['name']} ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['hostgroups'][0] == new_hostgroup['name'] # Assert if the deployment is updated assert scap_info['deployment-option'] == 'ansible' @pytest.mark.tier2 - def test_positive_update_scap_policy_period(self, scap_content): + def test_positive_update_scap_policy_period(self, scap_content, module_target_sat): """Update scap policy by updating the period strategy from monthly to weekly @@ -838,7 +863,7 @@ def test_positive_update_scap_policy_period(self, scap_content): :CaseImportance: Medium """ name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -849,20 +874,20 @@ def test_positive_update_scap_policy_period(self, scap_content): } ) assert scap_policy['period'] == OSCAP_PERIOD['weekly'].lower() - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( { 'id': scap_policy['id'], 'period': OSCAP_PERIOD['monthly'].lower(), 'day-of-month': 15, } ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['period'] == OSCAP_PERIOD['monthly'].lower() assert scap_info['day-of-month'] == '15' @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_update_scap_policy_with_content(self, scap_content): + def test_positive_update_scap_policy_with_content(self, scap_content, module_target_sat): """Update the scap policy by updating the scap content associated with the policy @@ -885,7 +910,7 @@ def test_positive_update_scap_policy_with_content(self, scap_content): :CaseImportance: Medium """ name = gen_string('alphanumeric') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -897,17 +922,19 @@ def test_positive_update_scap_policy_with_content(self, scap_content): ) assert scap_policy['scap-content-id'] == scap_content["scap_id"] scap_id, scap_profile_id = self.fetch_scap_and_profile_id( - OSCAP_DEFAULT_CONTENT['rhel_firefox'] + OSCAP_DEFAULT_CONTENT['rhel_firefox'], module_target_sat ) - Scappolicy.update( + module_target_sat.cli.Scappolicy.update( {'name': name, 'scap-content-id': scap_id, 'scap-content-profile-id': scap_profile_id} ) - scap_info = Scappolicy.info({'name': name}) + scap_info = module_target_sat.cli.Scappolicy.info({'name': name}) assert scap_info['scap-content-id'] == scap_id assert scap_info['scap-content-profile-id'] == scap_profile_id @pytest.mark.tier2 - def test_positive_associate_scap_policy_with_single_server(self, scap_content): + def test_positive_associate_scap_policy_with_single_server( + self, scap_content, module_target_sat + ): """Assign an audit policy to a single server :id: 30566c27-f466-4b4d-beaf-0a5bfda98b89 @@ -931,7 +958,7 @@ def test_positive_associate_scap_policy_with_single_server(self, scap_content): host = entities.Host() host.create() name = gen_string('alpha') - scap_policy = make_scap_policy( + scap_policy = module_target_sat.cli_factory.make_scap_policy( { 'name': name, 'deploy-by': 'ansible', @@ -942,8 +969,10 @@ def test_positive_associate_scap_policy_with_single_server(self, scap_content): } ) host_name = host.name + "." + host.domain.name - Scappolicy.update({'id': scap_policy['id'], 'hosts': host_name}) - hosts = Host.list({'search': 'compliance_policy_id = {}'.format(scap_policy['id'])}) + module_target_sat.cli.Scappolicy.update({'id': scap_policy['id'], 'hosts': host_name}) + hosts = module_target_sat.cli.Host.list( + {'search': 'compliance_policy_id = {}'.format(scap_policy['id'])} + ) assert host_name in [host['name'] for host in hosts] @pytest.mark.stubbed diff --git a/tests/foreman/cli/test_oscap_tailoringfiles.py b/tests/foreman/cli/test_oscap_tailoringfiles.py index 285a907df3b..089e3bf6986 100644 --- a/tests/foreman/cli/test_oscap_tailoringfiles.py +++ b/tests/foreman/cli/test_oscap_tailoringfiles.py @@ -19,10 +19,8 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError, make_tailoringfile -from robottelo.cli.scap_tailoring_files import TailoringFiles from robottelo.constants import SNIPPET_DATA_FILE, DataFile +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_names_list, parametrized, @@ -35,7 +33,7 @@ class TestTailoringFiles: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create(self, tailoring_file_path, name): + def test_positive_create(self, tailoring_file_path, name, module_target_sat): """Create new Tailoring Files using different values types as name :id: e1bb4de2-1b64-4904-bc7c-f0befa9dbd6f @@ -48,17 +46,17 @@ def test_positive_create(self, tailoring_file_path, name): :parametrized: yes """ - tailoring_file = make_tailoringfile( + tailoring_file = module_target_sat.cli_factory.make_tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name # Delete tailoring files which created for all valid input (ex- latin1, cjk, utf-8, etc.) - TailoringFiles.delete({'id': tailoring_file['id']}) + module_target_sat.cli.TailoringFiles.delete({'id': tailoring_file['id']}) with pytest.raises(CLIReturnCodeError): - TailoringFiles.info({'id': tailoring_file['id']}) + module_target_sat.cli.TailoringFiles.info({'id': tailoring_file['id']}) @pytest.mark.tier1 - def test_positive_create_with_space(self, tailoring_file_path): + def test_positive_create_with_space(self, tailoring_file_path, module_target_sat): """Create tailoring files with space in name :id: c98ef4e7-41c5-4a8b-8a0b-8d53100b75a8 @@ -72,13 +70,13 @@ def test_positive_create_with_space(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('alphanumeric') + ' ' + gen_string('alphanumeric') - tailoring_file = make_tailoringfile( + tailoring_file = module_target_sat.cli_factory.make_tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name @pytest.mark.tier1 - def test_positive_get_info_of_tailoring_file(self, tailoring_file_path): + def test_positive_get_info_of_tailoring_file(self, tailoring_file_path, module_target_sat): """Get information of tailoring file :id: bc201194-e8c8-4385-a577-09f3455f5a4d @@ -96,12 +94,14 @@ def test_positive_get_info_of_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('alphanumeric') - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) - result = TailoringFiles.info({'name': name}) + module_target_sat.cli_factory.make_tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) + result = module_target_sat.cli.TailoringFiles.info({'name': name}) assert result['name'] == name @pytest.mark.tier1 - def test_positive_list_tailoring_file(self, tailoring_file_path): + def test_positive_list_tailoring_file(self, tailoring_file_path, module_target_sat): """List all created tailoring files :id: 2ea63c4b-eebe-468d-8153-807e86d1b6a2 @@ -118,8 +118,10 @@ def test_positive_list_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ name = gen_string('utf8', length=5) - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) - result = TailoringFiles.list() + module_target_sat.cli_factory.make_tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) + result = module_target_sat.cli.TailoringFiles.list() assert name in [tailoringfile['name'] for tailoringfile in result] @pytest.mark.tier1 @@ -139,11 +141,13 @@ def test_negative_create_with_invalid_file(self, target_sat): target_sat.put(DataFile.SNIPPET_DATA_FILE, f'/tmp/{SNIPPET_DATA_FILE}') name = gen_string('alphanumeric') with pytest.raises(CLIFactoryError): - make_tailoringfile({'name': name, 'scap-file': f'/tmp/{SNIPPET_DATA_FILE}'}) + target_sat.cli_factory.make_tailoringfile( + {'name': name, 'scap-file': f'/tmp/{SNIPPET_DATA_FILE}'} + ) @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, tailoring_file_path, name): + def test_negative_create_with_invalid_name(self, tailoring_file_path, name, module_target_sat): """Create Tailoring files with invalid name :id: 973eee82-9735-49bb-b534-0de619aa0279 @@ -159,7 +163,9 @@ def test_negative_create_with_invalid_name(self, tailoring_file_path, name): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_tailoringfile({'name': name, 'scap-file': tailoring_file_path['satellite']}) + module_target_sat.cli_factory.make_tailoringfile( + {'name': name, 'scap-file': tailoring_file_path['satellite']} + ) @pytest.mark.stubbed @pytest.mark.tier2 @@ -202,11 +208,13 @@ def test_positive_download_tailoring_file(self, tailoring_file_path, target_sat) """ name = gen_string('alphanumeric') file_path = f'/var{tailoring_file_path["satellite"]}' - tailoring_file = make_tailoringfile( + tailoring_file = target_sat.cli_factory.make_tailoringfile( {'name': name, 'scap-file': tailoring_file_path['satellite']} ) assert tailoring_file['name'] == name - result = TailoringFiles.download_tailoring_file({'name': name, 'path': '/var/tmp/'}) + result = target_sat.cli.TailoringFiles.download_tailoring_file( + {'name': name, 'path': '/var/tmp/'} + ) assert file_path in result result = target_sat.execute(f'find {file_path} 2> /dev/null') assert result.status == 0 @@ -214,7 +222,7 @@ def test_positive_download_tailoring_file(self, tailoring_file_path, target_sat) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_tailoring_file(self, tailoring_file_path): + def test_positive_delete_tailoring_file(self, tailoring_file_path, module_target_sat): """Delete tailoring file :id: 8bab5478-1ef1-484f-aafd-98e5cba7b1e7 @@ -228,10 +236,12 @@ def test_positive_delete_tailoring_file(self, tailoring_file_path): :CaseImportance: Medium """ - tailoring_file = make_tailoringfile({'scap-file': tailoring_file_path['satellite']}) - TailoringFiles.delete({'id': tailoring_file['id']}) + tailoring_file = module_target_sat.cli_factory.make_tailoringfile( + {'scap-file': tailoring_file_path['satellite']} + ) + module_target_sat.cli.TailoringFiles.delete({'id': tailoring_file['id']}) with pytest.raises(CLIReturnCodeError): - TailoringFiles.info({'id': tailoring_file['id']}) + module_target_sat.cli.TailoringFiles.info({'id': tailoring_file['id']}) @pytest.mark.stubbed @pytest.mark.tier4 diff --git a/tests/foreman/cli/test_ostreebranch.py b/tests/foreman/cli/test_ostreebranch.py index 23516360e48..ccb35ea0bae 100644 --- a/tests/foreman/cli/test_ostreebranch.py +++ b/tests/foreman/cli/test_ostreebranch.py @@ -21,15 +21,6 @@ from nailgun import entities import pytest -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - make_content_view, - make_org_with_credentials, - make_product_with_credentials, - make_repository_with_credentials, -) -from robottelo.cli.ostreebranch import OstreeBranch -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.constants.repos import OSTREE_REPO @@ -49,15 +40,17 @@ def ostree_user_credentials(): @pytest.fixture(scope='module') -def ostree_repo_with_user(ostree_user_credentials): +def ostree_repo_with_user(ostree_user_credentials, module_target_sat): """Create an user, organization, product and ostree repo, sync ostree repo for particular user, create content view and publish it for particular user """ - org = make_org_with_credentials(credentials=ostree_user_credentials) - product = make_product_with_credentials({'organization-id': org['id']}, ostree_user_credentials) + org = module_target_sat.cli_factory.org_with_credentials(credentials=ostree_user_credentials) + product = module_target_sat.cli_factory.product_with_credentials( + {'organization-id': org['id']}, ostree_user_credentials + ) # Create new custom ostree repo - ostree_repo = make_repository_with_credentials( + ostree_repo = module_target_sat.cli_factory.repository_with_credentials( { 'product-id': product['id'], 'content-type': 'ostree', @@ -66,30 +59,36 @@ def ostree_repo_with_user(ostree_user_credentials): }, ostree_user_credentials, ) - Repository.with_user(*ostree_user_credentials).synchronize({'id': ostree_repo['id']}) - cv = make_content_view( + module_target_sat.cli.Repository.with_user(*ostree_user_credentials).synchronize( + {'id': ostree_repo['id']} + ) + cv = module_target_sat.cli_factory.make_content_view( {'organization-id': org['id'], 'repository-ids': [ostree_repo['id']]}, ostree_user_credentials, ) - ContentView.with_user(*ostree_user_credentials).publish({'id': cv['id']}) - cv = ContentView.with_user(*ostree_user_credentials).info({'id': cv['id']}) + module_target_sat.cli.ContentView.with_user(*ostree_user_credentials).publish({'id': cv['id']}) + cv = module_target_sat.cli.ContentView.with_user(*ostree_user_credentials).info( + {'id': cv['id']} + ) return {'cv': cv, 'org': org, 'ostree_repo': ostree_repo, 'product': product} @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list(ostree_user_credentials, ostree_repo_with_user): +def test_positive_list(ostree_user_credentials, ostree_repo_with_user, module_target_sat): """List Ostree Branches :id: 0f5e7e63-c0e3-43fc-8238-caf19a478a46 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list() + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list() assert len(result) > 0 @pytest.mark.upgrade -def test_positive_list_by_repo_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_repo_id( + ostree_repo_with_user, ostree_user_credentials, module_target_sat +): """List Ostree branches by repo id :id: 8cf1a973-031c-4c02-af14-0faba22ab60b @@ -98,41 +97,43 @@ def test_positive_list_by_repo_id(ostree_repo_with_user, ostree_user_credentials """ - branch = OstreeBranch.with_user(*ostree_user_credentials) + branch = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials) result = branch.list({'repository-id': ostree_repo_with_user['ostree_repo']['id']}) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_product_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_product_id( + ostree_repo_with_user, ostree_user_credentials, module_target_sat +): """List Ostree branches by product id :id: e7b9d04d-cace-4271-b166-214017200c53 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'product-id': ostree_repo_with_user['product']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_org_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_org_id(ostree_repo_with_user, ostree_user_credentials, module_target_sat): """List Ostree branches by org id :id: 5b169619-305f-4934-b363-068193330701 :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'organization-id': ostree_repo_with_user['org']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_list_by_cv_id(ostree_repo_with_user, ostree_user_credentials): +def test_positive_list_by_cv_id(ostree_repo_with_user, ostree_user_credentials, module_target_sat): """List Ostree branches by cv id :id: 3654f107-44ee-4af2-a9e4-f9fd8c68491e @@ -140,23 +141,25 @@ def test_positive_list_by_cv_id(ostree_repo_with_user, ostree_user_credentials): :expectedresults: Ostree Branch List is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list( + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list( {'content-view-id': ostree_repo_with_user['cv']['id']} ) assert len(result) > 0 @pytest.mark.skip_if_open("BZ:1625783") -def test_positive_info_by_id(ostree_user_credentials, ostree_repo_with_user): +def test_positive_info_by_id(ostree_user_credentials, ostree_repo_with_user, module_target_sat): """Get info for Ostree branch by id :id: 7838c9a8-56da-44de-883c-28571ecfa75c :expectedresults: Ostree Branch Info is displayed """ - result = OstreeBranch.with_user(*ostree_user_credentials).list() + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).list() assert len(result) > 0 # Grab a random branch branch = random.choice(result) - result = OstreeBranch.with_user(*ostree_user_credentials).info({'id': branch['id']}) + result = module_target_sat.cli.OstreeBranch.with_user(*ostree_user_credentials).info( + {'id': branch['id']} + ) assert branch['id'] == result['id'] diff --git a/tests/foreman/cli/test_partitiontable.py b/tests/foreman/cli/test_partitiontable.py index 6540a5d755b..994fe11a7a6 100644 --- a/tests/foreman/cli/test_partitiontable.py +++ b/tests/foreman/cli/test_partitiontable.py @@ -21,9 +21,7 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_os, make_partition_table -from robottelo.cli.partitiontable import PartitionTable +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import generate_strings_list, parametrized @@ -32,7 +30,7 @@ class TestPartitionTable: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(generate_strings_list(length=1))) - def test_positive_create_with_one_character_name(self, name): + def test_positive_create_with_one_character_name(self, name, target_sat): """Create Partition table with 1 character in name :id: cfec857c-ed6e-4472-93bb-70e1d4f39bae @@ -45,7 +43,7 @@ def test_positive_create_with_one_character_name(self, name): :CaseImportance: Medium """ - ptable = make_partition_table({'name': name}) + ptable = target_sat.cli_factory.make_partition_table({'name': name}) assert ptable['name'] == name @pytest.mark.tier1 @@ -61,7 +59,7 @@ def test_positive_create_with_one_character_name(self, name): ) ) ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Create, read, update and delete Partition Tables with different names :id: ce512fef-fbf2-4365-b70b-d30221111d96 @@ -72,17 +70,17 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: Critical """ - ptable = make_partition_table({'name': name}) + ptable = module_target_sat.cli_factory.make_partition_table({'name': name}) assert ptable['name'] == name - PartitionTable.update({'id': ptable['id'], 'new-name': new_name}) - ptable = PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.update({'id': ptable['id'], 'new-name': new_name}) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert ptable['name'] == new_name - PartitionTable.delete({'name': ptable['name']}) + module_target_sat.cli.PartitionTable.delete({'name': ptable['name']}) with pytest.raises(CLIReturnCodeError): - PartitionTable.info({'name': ptable['name']}) + module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) @pytest.mark.tier1 - def test_positive_create_with_content(self): + def test_positive_create_with_content(self, module_target_sat): """Create a Partition Table with content :id: 28bfbd8b-2ada-44d0-89f3-63885cfb3495 @@ -92,13 +90,13 @@ def test_positive_create_with_content(self): :CaseImportance: Critical """ content = 'Fake ptable' - ptable = make_partition_table({'content': content}) - ptable_content = PartitionTable().dump({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) + ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) assert content in ptable_content @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_with_content_length(self): + def test_positive_create_with_content_length(self, module_target_sat): """Create a Partition Table with content length more than 4096 chars :id: 59e6f9ef-85c2-4229-8831-00edb41b19f4 @@ -108,12 +106,12 @@ def test_positive_create_with_content_length(self): :BZ: 1270181 """ content = gen_string('alpha', 5000) - ptable = make_partition_table({'content': content}) - ptable_content = PartitionTable().dump({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table({'content': content}) + ptable_content = module_target_sat.cli.PartitionTable().dump({'id': ptable['id']}) assert content in ptable_content @pytest.mark.tier1 - def test_positive_delete_by_id(self): + def test_positive_delete_by_id(self, module_target_sat): """Create a Partition Table then delete it by its ID :id: 4d2369eb-4dc1-4ab5-96d4-c872c39f4ff5 @@ -122,13 +120,13 @@ def test_positive_delete_by_id(self): :CaseImportance: Critical """ - ptable = make_partition_table() - PartitionTable.delete({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table() + module_target_sat.cli.PartitionTable.delete({'id': ptable['id']}) with pytest.raises(CLIReturnCodeError): - PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) @pytest.mark.tier2 - def test_positive_add_remove_os_by_id(self): + def test_positive_add_remove_os_by_id(self, module_target_sat): """Create a partition table then add and remove an operating system to it using IDs for association @@ -138,18 +136,22 @@ def test_positive_add_remove_os_by_id(self): :CaseLevel: Integration """ - ptable = make_partition_table() - os = make_os() - PartitionTable.add_operating_system({'id': ptable['id'], 'operatingsystem-id': os['id']}) - ptable = PartitionTable.info({'id': ptable['id']}) + ptable = module_target_sat.cli_factory.make_partition_table() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.PartitionTable.add_operating_system( + {'id': ptable['id'], 'operatingsystem-id': os['id']} + ) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert os['title'] in ptable['operating-systems'] - PartitionTable.remove_operating_system({'id': ptable['id'], 'operatingsystem-id': os['id']}) - ptable = PartitionTable.info({'id': ptable['id']}) + module_target_sat.cli.PartitionTable.remove_operating_system( + {'id': ptable['id'], 'operatingsystem-id': os['id']} + ) + ptable = module_target_sat.cli.PartitionTable.info({'id': ptable['id']}) assert os['title'] not in ptable['operating-systems'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_add_remove_os_by_name(self): + def test_positive_add_remove_os_by_name(self, module_target_sat): """Create a partition table then add and remove an operating system to it using names for association @@ -159,15 +161,15 @@ def test_positive_add_remove_os_by_name(self): :CaseLevel: Integration """ - ptable = make_partition_table() - os = make_os() - PartitionTable.add_operating_system( + ptable = module_target_sat.cli_factory.make_partition_table() + os = module_target_sat.cli_factory.make_os() + module_target_sat.cli.PartitionTable.add_operating_system( {'name': ptable['name'], 'operatingsystem': os['title']} ) - ptable = PartitionTable.info({'name': ptable['name']}) + ptable = module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) assert os['title'] in ptable['operating-systems'] - PartitionTable.remove_operating_system( + module_target_sat.cli.PartitionTable.remove_operating_system( {'name': ptable['name'], 'operatingsystem': os['title']} ) - ptable = PartitionTable.info({'name': ptable['name']}) + ptable = module_target_sat.cli.PartitionTable.info({'name': ptable['name']}) assert os['title'] not in ptable['operating-systems'] diff --git a/tests/foreman/cli/test_product.py b/tests/foreman/cli/test_product.py index e11b09f6367..88bfd3a9323 100644 --- a/tests/foreman/cli/test_product.py +++ b/tests/foreman/cli/test_product.py @@ -19,21 +19,9 @@ from fauxfactory import gen_alphanumeric, gen_integer, gen_string, gen_url import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.defaults import Defaults -from robottelo.cli.factory import ( - CLIFactoryError, - make_content_credential, - make_org, - make_product, - make_repository, - make_sync_plan, -) -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository from robottelo.config import settings from robottelo.constants import FAKE_0_YUM_REPO_PACKAGES_COUNT +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( invalid_values_list, parametrized, @@ -57,11 +45,11 @@ def test_positive_CRUD(module_org, target_sat): :CaseImportance: Critical """ desc = list(valid_data_list().values())[0] - gpg_key = make_content_credential({'organization-id': module_org.id}) + gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) name = list(valid_data_list().values())[0] label = valid_labels_list()[0] - sync_plan = make_sync_plan({'organization-id': module_org.id}) - product = make_product( + sync_plan = target_sat.cli_factory.make_sync_plan({'organization-id': module_org.id}) + product = target_sat.cli_factory.make_product( { 'description': desc, 'gpg-key-id': gpg_key['id'], @@ -80,10 +68,10 @@ def test_positive_CRUD(module_org, target_sat): # update desc = list(valid_data_list().values())[0] - new_gpg_key = make_content_credential({'organization-id': module_org.id}) - new_sync_plan = make_sync_plan({'organization-id': module_org.id}) + new_gpg_key = target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) + new_sync_plan = target_sat.cli_factory.make_sync_plan({'organization-id': module_org.id}) new_prod_name = gen_string('alpha', 8) - Product.update( + target_sat.cli.Product.update( { 'description': desc, 'id': product['id'], @@ -92,7 +80,7 @@ def test_positive_CRUD(module_org, target_sat): 'name': new_prod_name, } ) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert product['name'] == new_prod_name assert product['description'] == desc assert product['gpg']['gpg-key-id'] == new_gpg_key['id'] @@ -101,36 +89,36 @@ def test_positive_CRUD(module_org, target_sat): assert product['sync-plan-id'] != sync_plan['id'] # synchronize - repo = make_repository( + repo = target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'url': settings.repos.yum_0.url, }, ) - Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) - packages = Package.list({'product-id': product['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Product.synchronize({'id': product['id'], 'organization-id': module_org.id}) + packages = target_sat.cli.Package.list({'product-id': product['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['packages']) == len(packages) assert len(packages) == FAKE_0_YUM_REPO_PACKAGES_COUNT # delete - Product.remove_sync_plan({'id': product['id']}) - product = Product.info({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Product.remove_sync_plan({'id': product['id']}) + product = target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) assert len(product['sync-plan-id']) == 0 - Product.delete({'id': product['id']}) + target_sat.cli.Product.delete({'id': product['id']}) target_sat.wait_for_tasks( search_query="label = Actions::Katello::Product::Destroy" f" and resource_id = {product['id']}", max_tries=10, ) with pytest.raises(CLIReturnCodeError): - Product.info({'id': product['id'], 'organization-id': module_org.id}) + target_sat.cli.Product.info({'id': product['id'], 'organization-id': module_org.id}) @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name, module_org): +def test_negative_create_with_name(name, module_org, module_target_sat): """Check that only valid names can be used :id: 2da26ab2-8d79-47ea-b4d2-defcd98a0649 @@ -142,14 +130,14 @@ def test_negative_create_with_name(name, module_org): :CaseImportance: High """ with pytest.raises(CLIFactoryError): - make_product({'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.make_product({'name': name, 'organization-id': module_org.id}) @pytest.mark.tier2 @pytest.mark.parametrize( 'label', **parametrized([gen_string(e, 15) for e in ('latin1', 'utf8', 'html')]) ) -def test_negative_create_with_label(label, module_org): +def test_negative_create_with_label(label, module_org, module_target_sat): """Check that only valid labels can be used :id: 7cf970aa-48dc-425b-ae37-1e15dfab0626 @@ -161,7 +149,7 @@ def test_negative_create_with_label(label, module_org): :CaseImportance: High """ with pytest.raises(CLIFactoryError): - make_product( + module_target_sat.cli_factory.make_product( { 'label': label, 'name': gen_alphanumeric(), @@ -189,13 +177,15 @@ def test_product_list_with_default_settings(module_org, target_sat): org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') - non_default_org = make_org() - default_product = make_product({'name': default_product_name, 'organization-id': org_id}) - non_default_product = make_product( + non_default_org = target_sat.cli_factory.make_org() + default_product = target_sat.cli_factory.make_product( + {'name': default_product_name, 'organization-id': org_id} + ) + non_default_product = target_sat.cli_factory.make_product( {'name': non_default_product_name, 'organization-id': non_default_org['id']} ) for product in default_product, non_default_product: - make_repository( + target_sat.cli_factory.make_repository( { 'organization-id': org_id, 'product-id': product['id'], @@ -203,7 +193,7 @@ def test_product_list_with_default_settings(module_org, target_sat): }, ) - Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) + target_sat.cli.Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = target_sat.cli.Defaults.list(per_page=False) assert any([res['value'] == org_id for res in result if res['parameter'] == 'organization_id']) @@ -215,20 +205,20 @@ def test_product_list_with_default_settings(module_org, target_sat): assert any([res['product'] == default_product_name for res in result]) # verify that defaults setting should not affect other entities - product_list = Product.list({'organization-id': non_default_org['id']}) + product_list = target_sat.cli.Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] - repository_list = Repository.list({'organization-id': non_default_org['id']}) + repository_list = target_sat.cli.Repository.list({'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: - Defaults.delete({'param-name': 'organization_id'}) + target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) result = target_sat.cli.Defaults.list(per_page=False) assert not [res for res in result if res['parameter'] == 'organization_id'] @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1999541') -def test_positive_product_sync_state(module_org): +def test_positive_product_sync_state(module_org, module_target_sat): """hammer product info shows correct sync state. :id: 58af6239-85d7-4b8b-bd2d-ab4cd4f29840 @@ -246,8 +236,8 @@ def test_positive_product_sync_state(module_org): :expectedresults: hammer should show 'Sync Incomplete' in both cases. """ - product = make_product({'organization-id': module_org.id}) - repo_a1 = make_repository( + product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo_a1 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -257,13 +247,15 @@ def test_positive_product_sync_state(module_org): ) with pytest.raises(CLIReturnCodeError): - Repository.synchronize({'id': repo_a1['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_a1['id']}) - product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) - product_list = Product.list({'organization-id': module_org.id}) + product_info = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) + product_list = module_target_sat.cli.Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list] - repo_a2 = make_repository( + repo_a2 = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -272,7 +264,9 @@ def test_positive_product_sync_state(module_org): }, ) - Repository.synchronize({'id': repo_a2['id']}) - product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) - product_list = Product.list({'organization-id': module_org.id}) + module_target_sat.cli.Repository.synchronize({'id': repo_a2['id']}) + product_info = module_target_sat.cli.Product.info( + {'id': product['id'], 'organization-id': module_org.id} + ) + product_list = module_target_sat.cli.Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list] diff --git a/tests/foreman/cli/test_provisioningtemplate.py b/tests/foreman/cli/test_provisioningtemplate.py index 71214e84615..663aa64a1e2 100644 --- a/tests/foreman/cli/test_provisioningtemplate.py +++ b/tests/foreman/cli/test_provisioningtemplate.py @@ -23,7 +23,7 @@ import pytest from robottelo import constants -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') diff --git a/tests/foreman/cli/test_realm.py b/tests/foreman/cli/test_realm.py index 595a123e9e8..2c3b4ae5413 100644 --- a/tests/foreman/cli/test_realm.py +++ b/tests/foreman/cli/test_realm.py @@ -21,13 +21,11 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError, make_realm -from robottelo.cli.realm import Realm +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError @pytest.mark.tier1 -def test_negative_create_name_only(): +def test_negative_create_name_only(module_target_sat): """Create a realm with just a name parameter :id: 5606279f-0707-4d36-a307-b204ebb981ad @@ -35,11 +33,13 @@ def test_negative_create_name_only(): :expectedresults: Realm creation fails, requires proxy_id and type """ with pytest.raises(CLIFactoryError): - make_realm({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli_factory.make_realm( + {'name': gen_string('alpha', random.randint(1, 30))} + ) @pytest.mark.tier1 -def test_negative_create_invalid_id(): +def test_negative_create_invalid_id(module_target_sat): """Create a realm with an invalid proxy ID :id: 916bd1fb-4649-469c-b511-b0b07301a990 @@ -47,7 +47,7 @@ def test_negative_create_invalid_id(): :expectedresults: Realm creation fails, proxy_id must be numeric """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.make_realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': gen_string('alphanumeric'), @@ -57,7 +57,7 @@ def test_negative_create_invalid_id(): @pytest.mark.tier1 -def test_negative_create_invalid_realm_type(): +def test_negative_create_invalid_realm_type(module_target_sat): """Create a realm with an invalid type :id: 423a0969-9311-48d2-9220-040a42159a89 @@ -66,7 +66,7 @@ def test_negative_create_invalid_realm_type(): e.g. Red Hat Identity Management or Active Directory """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.make_realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -76,7 +76,7 @@ def test_negative_create_invalid_realm_type(): @pytest.mark.tier1 -def test_negative_create_invalid_location(): +def test_negative_create_invalid_location(module_target_sat): """Create a realm with an invalid location :id: 95335c3a-413f-4156-b727-91b525738171 @@ -84,7 +84,7 @@ def test_negative_create_invalid_location(): :expectedresults: Realm creation fails, location not found """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.make_realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -95,7 +95,7 @@ def test_negative_create_invalid_location(): @pytest.mark.tier1 -def test_negative_create_invalid_organization(): +def test_negative_create_invalid_organization(module_target_sat): """Create a realm with an invalid organization :id: c0ffbc6d-a2da-484b-9627-5454687a3abb @@ -103,7 +103,7 @@ def test_negative_create_invalid_organization(): :expectedresults: Realm creation fails, organization not found """ with pytest.raises(CLIFactoryError): - make_realm( + module_target_sat.cli_factory.make_realm( { 'name': gen_string('alpha', random.randint(1, 30)), 'realm-proxy-id': '1', @@ -114,7 +114,7 @@ def test_negative_create_invalid_organization(): @pytest.mark.tier2 -def test_negative_delete_nonexistent_realm_name(): +def test_negative_delete_nonexistent_realm_name(module_target_sat): """Delete a realm with a name that does not exist :id: 616db509-9643-4817-ba6b-f05cdb1cecb0 @@ -122,11 +122,11 @@ def test_negative_delete_nonexistent_realm_name(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.delete({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli.Realm.delete({'name': gen_string('alpha', random.randint(1, 30))}) @pytest.mark.tier2 -def test_negative_delete_nonexistent_realm_id(): +def test_negative_delete_nonexistent_realm_id(module_target_sat): """Delete a realm with an ID that does not exist :id: 70bb9d4e-7e71-479a-8c82-e6fcff88ea14 @@ -134,11 +134,11 @@ def test_negative_delete_nonexistent_realm_id(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.delete({'id': 0}) + module_target_sat.cli.Realm.delete({'id': 0}) @pytest.mark.tier2 -def test_negative_info_nonexistent_realm_name(): +def test_negative_info_nonexistent_realm_name(module_target_sat): """Get info for a realm with a name that does not exist :id: 24e4fbfa-7141-4f90-8c5d-eb88b162bd64 @@ -146,11 +146,11 @@ def test_negative_info_nonexistent_realm_name(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.info({'name': gen_string('alpha', random.randint(1, 30))}) + module_target_sat.cli.Realm.info({'name': gen_string('alpha', random.randint(1, 30))}) @pytest.mark.tier2 -def test_negative_info_nonexistent_realm_id(): +def test_negative_info_nonexistent_realm_id(module_target_sat): """Get info for a realm with an ID that does not exist :id: db8382eb-6d0b-4d6a-a9bf-38a462389f7b @@ -158,4 +158,4 @@ def test_negative_info_nonexistent_realm_id(): :expectedresults: Realm not found """ with pytest.raises(CLIReturnCodeError): - Realm.info({'id': 0}) + module_target_sat.cli.Realm.info({'id': 0}) diff --git a/tests/foreman/cli/test_remoteexecution.py b/tests/foreman/cli/test_remoteexecution.py index d360f1be8f7..3ed735ab926 100644 --- a/tests/foreman/cli/test_remoteexecution.py +++ b/tests/foreman/cli/test_remoteexecution.py @@ -27,23 +27,6 @@ import pytest from robottelo import constants -from robottelo.cli.factory import ( - make_filter, - make_job_invocation, - make_job_invocation_with_credentials, - make_job_template, - make_role, - make_user, -) -from robottelo.cli.filter import Filter -from robottelo.cli.globalparam import GlobalParameter -from robottelo.cli.host import Host -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.recurring_logic import RecurringLogic -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.task import Task -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import PRDS, REPOS, REPOSET from robottelo.hosts import ContentHost @@ -89,33 +72,39 @@ def infra_host(request, target_sat, module_capsule_configured): yield infra_hosts[request.param] -def assert_job_invocation_result(invocation_command_id, client_hostname, expected_result='success'): +def assert_job_invocation_result( + sat, invocation_command_id, client_hostname, expected_result='success' +): """Asserts the job invocation finished with the expected result and fetches job output when error occurs. Result is one of: success, pending, error, warning""" - result = JobInvocation.info({'id': invocation_command_id}) + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) try: assert result[expected_result] == '1' except AssertionError: raise AssertionError( 'host output: {}'.format( ' '.join( - JobInvocation.get_output({'id': invocation_command_id, 'host': client_hostname}) + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) ) ) ) -def assert_job_invocation_status(invocation_command_id, client_hostname, status): +def assert_job_invocation_status(sat, invocation_command_id, client_hostname, status): """Asserts the job invocation status and fetches job output when error occurs. Status is one of: queued, stopped, running, paused""" - result = JobInvocation.info({'id': invocation_command_id}) + result = sat.cli.JobInvocation.info({'id': invocation_command_id}) try: assert result['status'] == status except AssertionError: raise AssertionError( 'host output: {}'.format( ' '.join( - JobInvocation.get_output({'id': invocation_command_id, 'host': client_hostname}) + sat.cli.JobInvocation.get_output( + {'id': invocation_command_id, 'host': client_hostname} + ) ) ) ) @@ -128,7 +117,9 @@ class TestRemoteExecution: @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.rhel_ver_list([8]) - def test_positive_run_default_job_template(self, module_org, rex_contenthost): + def test_positive_run_default_job_template( + self, module_org, rex_contenthost, module_target_sat + ): """Run default template on host connected and list task :id: 811c7747-bec6-4a2d-8e5c-b5045d3fbc0d @@ -144,18 +135,18 @@ def test_positive_run_default_job_template(self, module_org, rex_contenthost): """ client = rex_contenthost command = f'echo {gen_string("alpha")}' - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', 'search-query': f"name ~ {client.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) - task = Task.list_tasks({'search': command})[0] - search = Task.list_tasks({'search': f'id={task["id"]}'}) + assert_job_invocation_result(module_target_sat, invocation_command['id'], client.hostname) + task = module_target_sat.cli.Task.list_tasks({'search': command})[0] + search = module_target_sat.cli.Task.list_tasks({'search': f'id={task["id"]}'}) assert search[0]['action'] == task['action'] - out = JobInvocation.get_output( + out = module_target_sat.cli.JobInvocation.get_output( { 'id': invocation_command['id'], 'host': client.hostname, @@ -169,7 +160,7 @@ def test_positive_run_default_job_template(self, module_org, rex_contenthost): @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.rhel_ver_list([7, 8, 9]) - def test_positive_run_job_effective_user(self, rex_contenthost): + def test_positive_run_job_effective_user(self, rex_contenthost, module_target_sat): """Run default job template as effective user on a host :id: 0cd75cab-f699-47e6-94d3-4477d2a94bb7 @@ -185,16 +176,16 @@ def test_positive_run_job_effective_user(self, rex_contenthost): # create a user on client via remote job username = gen_string('alpha') filename = gen_string('alpha') - make_user_job = make_job_invocation( + make_user_job = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=useradd -m {username}", 'search-query': f"name ~ {client.hostname}", } ) - assert_job_invocation_result(make_user_job['id'], client.hostname) + assert_job_invocation_result(module_target_sat, make_user_job['id'], client.hostname) # create a file as new user - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=touch /home/{username}/{filename}", @@ -202,7 +193,7 @@ def test_positive_run_job_effective_user(self, rex_contenthost): 'effective-user': f'{username}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(module_target_sat, invocation_command['id'], client.hostname) # check the file owner result = client.execute( f'''stat -c '%U' /home/{username}/{filename}''', @@ -233,10 +224,10 @@ def test_positive_run_custom_job_template(self, rex_contenthost, module_org, tar template_file = 'template_file.txt' target_sat.execute(f'echo "echo Enforcing" > {template_file}') template_name = gen_string('alpha', 7) - make_job_template( + target_sat.cli_factory.make_job_template( {'organizations': self.org.name, 'name': template_name, 'file': template_file} ) - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( {'job-template': template_name, 'search-query': f'name ~ {client.hostname}'} ) assert_job_invocation_result(invocation_command['id'], client.hostname) @@ -245,7 +236,9 @@ def test_positive_run_custom_job_template(self, rex_contenthost, module_org, tar @pytest.mark.upgrade @pytest.mark.no_containers @pytest.mark.rhel_ver_list([8]) - def test_positive_run_default_job_template_multiple_hosts(self, registered_hosts, module_org): + def test_positive_run_default_job_template_multiple_hosts( + self, registered_hosts, module_target_sat + ): """Run default job template against multiple hosts :id: 694a21d3-243b-4296-8bd0-4bad9663af15 @@ -255,7 +248,7 @@ def test_positive_run_default_job_template_multiple_hosts(self, registered_hosts :parametrized: yes """ clients = registered_hosts - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -269,13 +262,13 @@ def test_positive_run_default_job_template_multiple_hosts(self, registered_hosts 'host output from {}: {}'.format( vm.hostname, ' '.join( - JobInvocation.get_output( + module_target_sat.cli.JobInvocation.get_output( {'id': invocation_command['id'], 'host': vm.hostname} ) ), ) ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '2', output_msgs @pytest.mark.tier3 @@ -306,21 +299,21 @@ def test_positive_install_remove_multiple_packages_with_a_job( repo=settings.repos.yum_3.url, ) # Install packages - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Install Package - Katello Script Default', 'inputs': f'package={" ".join(packages)}', 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run(f'rpm -q {" ".join(packages)}') assert result.status == 0 # Update packages pre_versions = result.stdout.splitlines() result = client.run(f'dnf -y downgrade {" ".join(packages)}') assert result.status == 0 - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Update Package - Katello Script Default', 'inputs': f'package={" ".join(packages)}', @@ -331,14 +324,14 @@ def test_positive_install_remove_multiple_packages_with_a_job( post_versions = client.run(f'rpm -q {" ".join(packages)}').stdout.splitlines() assert set(pre_versions) == set(post_versions) # Remove packages - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Remove Package - Katello Script Default', 'inputs': f'package={" ".join(packages)}', 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run(f'rpm -q {" ".join(packages)}') assert result.status == len(packages) @@ -370,26 +363,26 @@ def test_positive_install_remove_packagegroup_with_a_job( repo=settings.repos.yum_1.url, ) # Install the package groups - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Install Group - Katello Script Default', 'inputs': f'package={" ".join(groups)}', 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run('dnf grouplist --installed') assert all(item in result.stdout for item in groups) # Remove one of the installed package groups remove = random.choice(groups) - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Remove Group - Katello Script Default', 'inputs': f'package={remove}', 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run('dnf grouplist --installed') assert remove not in result.stdout @@ -420,14 +413,14 @@ def test_positive_install_errata_with_a_job( ) client.run(f'dnf install -y {constants.FAKE_1_CUSTOM_PACKAGE}') # Install errata - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Install Errata - Katello Script Default', 'inputs': f'errata={settings.repos.yum_0.errata[1]}', 'search-query': f'name ~ {client.hostname}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) result = client.run(f'rpm -q {constants.FAKE_2_CUSTOM_PACKAGE}') assert result.status == 0 @@ -447,7 +440,7 @@ def test_positive_match_feature_templates(self, target_sat, feature): @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) - def test_positive_run_recurring_job_with_max_iterations(self, rex_contenthost): + def test_positive_run_recurring_job_with_max_iterations(self, rex_contenthost, target_sat): """Run default job template multiple times with max iteration :id: 0a3d1627-95d9-42ab-9478-a908f2a7c509 @@ -458,7 +451,7 @@ def test_positive_run_recurring_job_with_max_iterations(self, rex_contenthost): :parametrized: yes """ client = rex_contenthost - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -467,16 +460,16 @@ def test_positive_run_recurring_job_with_max_iterations(self, rex_contenthost): 'max-iteration': 2, # just two runs } ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert_job_invocation_status(invocation_command['id'], client.hostname, 'queued') sleep(150) - rec_logic = RecurringLogic.info({'id': result['recurring-logic-id']}) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) assert rec_logic['state'] == 'finished' assert rec_logic['iteration'] == '2' @pytest.mark.tier3 @pytest.mark.rhel_ver_list([8]) - def test_positive_time_expressions(self, rex_contenthost): + def test_positive_time_expressions(self, rex_contenthost, target_sat): """Test various expressions for extended cronline syntax :id: 584e7b27-9484-436a-b850-11acb900a7d8 @@ -541,7 +534,7 @@ def test_positive_time_expressions(self, rex_contenthost): ], ] for exp in fugit_expressions: - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -550,9 +543,9 @@ def test_positive_time_expressions(self, rex_contenthost): 'max-iteration': 1, } ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert_job_invocation_status(invocation_command['id'], client.hostname, 'queued') - rec_logic = RecurringLogic.info({'id': result['recurring-logic-id']}) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) assert ( rec_logic['next-occurrence'] == exp[1] ), f'Job was not scheduled as expected using {exp[0]}' @@ -573,7 +566,7 @@ def test_positive_run_scheduled_job_template(self, rex_contenthost, target_sat): system_current_time = target_sat.execute('date --utc +"%b %d %Y %I:%M%p"').stdout current_time_object = datetime.strptime(system_current_time.strip('\n'), '%b %d %Y %I:%M%p') plan_time = (current_time_object + timedelta(seconds=30)).strftime("%Y-%m-%d %H:%M") - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -584,10 +577,10 @@ def test_positive_run_scheduled_job_template(self, rex_contenthost, target_sat): # Wait until the job runs pending_state = '1' while pending_state != '0': - invocation_info = JobInvocation.info({'id': invocation_command['id']}) + invocation_info = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) pending_state = invocation_info['pending'] sleep(30) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) class TestAnsibleREX: @@ -598,7 +591,7 @@ class TestAnsibleREX: @pytest.mark.pit_client @pytest.mark.pit_server @pytest.mark.rhel_ver_list([7, 8, 9]) - def test_positive_run_effective_user_job(self, rex_contenthost): + def test_positive_run_effective_user_job(self, rex_contenthost, target_sat): """Tests Ansible REX job having effective user runs successfully :id: a5fa20d8-c2bd-4bbf-a6dc-bf307b59dd8c @@ -625,16 +618,16 @@ def test_positive_run_effective_user_job(self, rex_contenthost): # create a user on client via remote job username = gen_string('alpha') filename = gen_string('alpha') - make_user_job = make_job_invocation( + make_user_job = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Ansible Default', 'inputs': f"command=useradd -m {username}", 'search-query': f"name ~ {client.hostname}", } ) - assert_job_invocation_result(make_user_job['id'], client.hostname) + assert_job_invocation_result(target_sat, make_user_job['id'], client.hostname) # create a file as new user - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Ansible Default', 'inputs': f"command=touch /home/{username}/{filename}", @@ -642,7 +635,7 @@ def test_positive_run_effective_user_job(self, rex_contenthost): 'effective-user': f'{username}', } ) - assert_job_invocation_result(invocation_command['id'], client.hostname) + assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname) # check the file owner result = client.execute( f'''stat -c '%U' /home/{username}/{filename}''', @@ -653,7 +646,7 @@ def test_positive_run_effective_user_job(self, rex_contenthost): @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.rhel_ver_list([8]) - def test_positive_run_reccuring_job(self, rex_contenthost): + def test_positive_run_reccuring_job(self, rex_contenthost, target_sat): """Tests Ansible REX reccuring job runs successfully multiple times :id: 49b0d31d-58f9-47f1-aa5d-561a1dcb0d66 @@ -679,7 +672,7 @@ def test_positive_run_reccuring_job(self, rex_contenthost): :parametrized: yes """ client = rex_contenthost - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Ansible Default', 'inputs': 'command=ls', @@ -688,9 +681,9 @@ def test_positive_run_reccuring_job(self, rex_contenthost): 'max-iteration': 2, # just two runs } ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) sleep(150) - rec_logic = RecurringLogic.info({'id': result['recurring-logic-id']}) + rec_logic = target_sat.cli.RecurringLogic.info({'id': result['recurring-logic-id']}) assert rec_logic['state'] == 'finished' assert rec_logic['iteration'] == '2' # 2129432 @@ -705,7 +698,7 @@ def test_positive_run_reccuring_job(self, rex_contenthost): @pytest.mark.tier3 @pytest.mark.no_containers - def test_positive_run_concurrent_jobs(self, registered_hosts, module_org): + def test_positive_run_concurrent_jobs(self, registered_hosts, target_sat): """Tests Ansible REX concurent jobs without batch trigger :id: ad0f108c-03f2-49c7-8732-b1056570567b @@ -729,10 +722,10 @@ def test_positive_run_concurrent_jobs(self, registered_hosts, module_org): :parametrized: yes """ param_name = 'foreman_tasks_proxy_batch_trigger' - GlobalParameter().set({'name': param_name, 'value': 'false'}) + target_sat.cli.GlobalParameter().set({'name': param_name, 'value': 'false'}) clients = registered_hosts output_msgs = [] - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Ansible Default', 'inputs': 'command=ls', @@ -745,16 +738,16 @@ def test_positive_run_concurrent_jobs(self, registered_hosts, module_org): 'host output from {}: {}'.format( vm.hostname, ' '.join( - JobInvocation.get_output( + target_sat.cli.JobInvocation.get_output( {'id': invocation_command['id'], 'host': vm.hostname} ) ), ) ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '2', output_msgs - GlobalParameter().delete({'name': param_name}) - assert len(GlobalParameter().list({'search': param_name})) == 0 + target_sat.cli.GlobalParameter().delete({'name': param_name}) + assert len(target_sat.cli.GlobalParameter().list({'search': param_name})) == 0 @pytest.mark.tier3 @pytest.mark.upgrade @@ -808,7 +801,7 @@ def test_positive_run_packages_and_services_job( repo=settings.repos.yum_3.url, ) # install package - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Package Action - Ansible Default', 'inputs': 'state=latest, name={}'.format(*packages), @@ -821,7 +814,7 @@ def test_positive_run_packages_and_services_job( # stop a service service = "rsyslog" - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Service Action - Ansible Default', 'inputs': f'state=stopped, name={service}', @@ -833,7 +826,7 @@ def test_positive_run_packages_and_services_job( assert result.status == 3 # start it again - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Service Action - Ansible Default', 'inputs': f'state=started, name={service}', @@ -849,7 +842,7 @@ def test_positive_run_packages_and_services_job( 'fixture_sca_vmsetup', [{'nick': 'rhel8'}], ids=['rhel8'], indirect=True ) def test_positive_install_ansible_collection( - self, fixture_sca_vmsetup, module_sca_manifest_org + self, fixture_sca_vmsetup, module_sca_manifest_org, target_sat ): """Test whether Ansible collection can be installed via REX @@ -868,7 +861,7 @@ def test_positive_install_ansible_collection( :Team: Rocket """ # Configure repository to prepare for installing ansible on host - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'basearch': 'x86_64', 'name': REPOSET['rhae2.9_el8'], @@ -877,7 +870,7 @@ def test_positive_install_ansible_collection( 'releasever': '8', } ) - Repository.synchronize( + target_sat.cli.Repository.synchronize( { 'name': REPOS['rhae2.9_el8']['name'], 'organization-id': module_sca_manifest_org.id, @@ -888,27 +881,27 @@ def test_positive_install_ansible_collection( client.execute('subscription-manager refresh') client.execute(f'subscription-manager repos --enable {REPOS["rhae2.9_el8"]["id"]}') client.execute('dnf -y install ansible') - collection_job = make_job_invocation( + collection_job = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Ansible Collection - Install from Galaxy', 'inputs': 'ansible_collections_list="oasis_roles.system"', 'search-query': f'name ~ {client.hostname}', } ) - result = JobInvocation.info({'id': collection_job['id']}) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) assert result['success'] == '1' collection_path = client.execute('ls /etc/ansible/collections/ansible_collections').stdout assert 'oasis_roles' in collection_path # Extend test with custom collections_path advanced input field - collection_job = make_job_invocation( + collection_job = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Ansible Collection - Install from Galaxy', 'inputs': 'ansible_collections_list="oasis_roles.system", collections_path="~/"', 'search-query': f'name ~ {client.hostname}', } ) - result = JobInvocation.info({'id': collection_job['id']}) + result = target_sat.cli.JobInvocation.info({'id': collection_job['id']}) assert result['success'] == '1' collection_path = client.execute('ls ~/ansible_collections').stdout assert 'oasis_roles' in collection_path @@ -918,38 +911,50 @@ class TestRexUsers: """Tests related to remote execution users""" @pytest.fixture(scope='class') - def class_rexmanager_user(self, module_org): + def class_rexmanager_user(self, module_org, class_target_sat): """Creates a user with Remote Execution Manager role""" password = gen_string('alpha') rexmanager = gen_string('alpha') - make_user({'login': rexmanager, 'password': password, 'organization-ids': module_org.id}) - User.add_role({'login': rexmanager, 'role': 'Remote Execution Manager'}) + class_target_sat.cli_factory.make_user( + {'login': rexmanager, 'password': password, 'organization-ids': module_org.id} + ) + class_target_sat.cli.User.add_role( + {'login': rexmanager, 'role': 'Remote Execution Manager'} + ) yield (rexmanager, password) @pytest.fixture(scope='class') - def class_rexinfra_user(self, module_org): + def class_rexinfra_user(self, module_org, class_target_sat): """Creates a user with all Remote Execution related permissions""" password = gen_string('alpha') rexinfra = gen_string('alpha') - make_user({'login': rexinfra, 'password': password, 'organization-ids': module_org.id}) - role = make_role({'organization-ids': module_org.id}) + class_target_sat.cli_factory.make_user( + {'login': rexinfra, 'password': password, 'organization-ids': module_org.id} + ) + role = class_target_sat.cli_factory.make_role({'organization-ids': module_org.id}) invocation_permissions = [ permission['name'] - for permission in Filter.available_permissions( + for permission in class_target_sat.cli.Filter.available_permissions( {'search': 'resource_type=JobInvocation'} ) ] template_permissions = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=JobTemplate'}) + for permission in class_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=JobTemplate'} + ) ] permissions = ','.join(invocation_permissions) - make_filter({'role-id': role['id'], 'permissions': permissions}) + class_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) permissions = ','.join(template_permissions) # needs execute_jobs_on_infrastructure_host permission - make_filter({'role-id': role['id'], 'permissions': permissions}) - User.add_role({'login': rexinfra, 'role': role['name']}) - User.add_role({'login': rexinfra, 'role': 'Remote Execution Manager'}) + class_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) + class_target_sat.cli.User.add_role({'login': rexinfra, 'role': role['name']}) + class_target_sat.cli.User.add_role({'login': rexinfra, 'role': 'Remote Execution Manager'}) yield (rexinfra, password) @pytest.mark.tier3 @@ -992,11 +997,13 @@ def test_positive_rex_against_infra_hosts( """ client = rex_contenthost infra_host.add_rex_key(satellite=target_sat) - Host.update({'name': infra_host.hostname, 'new-organization-id': module_org.id}) + target_sat.cli.Host.update( + {'name': infra_host.hostname, 'new-organization-id': module_org.id} + ) # run job as admin command = f"echo {gen_string('alpha')}" - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -1007,14 +1014,16 @@ def test_positive_rex_against_infra_hosts( hostnames = [client.hostname, infra_host.hostname] for hostname in hostnames: inv_output = ' '.join( - JobInvocation.get_output({'id': invocation_command['id'], 'host': hostname}) + target_sat.cli.JobInvocation.get_output( + {'id': invocation_command['id'], 'host': hostname} + ) ) output_msgs.append(f"host output from {hostname}: { inv_output }") - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '2', output_msgs # run job as regular rex user on all hosts - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.make_job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -1023,11 +1032,11 @@ def test_positive_rex_against_infra_hosts( class_rexmanager_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '1' # run job as regular rex user just on infra hosts - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.make_job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -1035,11 +1044,11 @@ def test_positive_rex_against_infra_hosts( }, class_rexmanager_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '0' # run job as rex user on Satellite - invocation_command = make_job_invocation_with_credentials( + invocation_command = target_sat.cli_factory.make_job_invocation_with_credentials( { 'job-template': 'Run Command - Script Default', 'inputs': f'command={command}', @@ -1047,7 +1056,7 @@ def test_positive_rex_against_infra_hosts( }, class_rexinfra_user, ) - result = JobInvocation.info({'id': invocation_command['id']}) + result = target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) assert result['success'] == '1' @@ -1100,7 +1109,7 @@ def test_positive_run_job_on_host_registered_to_async_ssh_provider( assert result.status == 0, f'Failed to register host: {result.stderr}' # run script provider rex command, longer-running command is needed to # verify the connection is not shut down too soon - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=echo start; sleep 10; echo done', @@ -1173,7 +1182,7 @@ def test_positive_run_job_on_host_converted_to_pull_provider( result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -1186,21 +1195,23 @@ def test_positive_run_job_on_host_converted_to_pull_provider( assert result.status == 0, 'Failed to start goferd on client' # run Ansible rex command to prove ssh provider works, remove katello-agent - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Package Action - Ansible Default', 'inputs': 'state=absent, name=katello-agent', 'search-query': f"name ~ {rhel_contenthost.hostname}", } ) - assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) + assert_job_invocation_result( + module_target_sat, invocation_command['id'], rhel_contenthost.hostname + ) # check katello-agent removal did not influence ygdrassil (SAT-1672) result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', @@ -1208,7 +1219,7 @@ def test_positive_run_job_on_host_converted_to_pull_provider( } ) assert_job_invocation_result(invocation_command['id'], rhel_contenthost.hostname) - result = JobInvocation.info({'id': invocation_command['id']}) + result = module_target_sat.cli.JobInvocation.info({'id': invocation_command['id']}) @pytest.mark.tier3 @pytest.mark.upgrade @@ -1223,6 +1234,7 @@ def test_positive_run_job_on_host_registered_to_pull_provider( module_ak_with_cv, module_capsule_configured_mqtt, rhel_contenthost, + target_sat, ): """Run custom template on host registered to mqtt, check effective user setting @@ -1267,7 +1279,7 @@ def test_positive_run_job_on_host_registered_to_pull_provider( result = rhel_contenthost.execute('systemctl status yggdrasild') assert result.status == 0, f'Failed to start yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Service Action - Script Default', 'inputs': 'action=status, service=yggdrasild', @@ -1278,7 +1290,7 @@ def test_positive_run_job_on_host_registered_to_pull_provider( # create user on host username = gen_string('alpha') filename = gen_string('alpha') - make_user_job = make_job_invocation( + make_user_job = target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=useradd -m {username}", @@ -1287,7 +1299,7 @@ def test_positive_run_job_on_host_registered_to_pull_provider( ) assert_job_invocation_result(make_user_job['id'], rhel_contenthost.hostname) # create a file as new user - invocation_command = make_job_invocation( + invocation_command = module_target_sat.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': f"command=touch /home/{username}/{filename}", @@ -1360,7 +1372,7 @@ def test_positive_run_pull_job_on_offline_host( result = rhel_contenthost.execute('systemctl stop yggdrasild') assert result.status == 0, f'Failed to stop yggdrasil on client: {result.stderr}' # run script provider rex command - invocation_command = make_job_invocation( + invocation_command = module_target_sat.cli_factory.make_job_invocation( { 'job-template': 'Run Command - Script Default', 'inputs': 'command=ls', diff --git a/tests/foreman/cli/test_report.py b/tests/foreman/cli/test_report.py index b2d84d3adb8..94946b88e66 100644 --- a/tests/foreman/cli/test_report.py +++ b/tests/foreman/cli/test_report.py @@ -20,7 +20,7 @@ import pytest -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='module') diff --git a/tests/foreman/cli/test_reporttemplates.py b/tests/foreman/cli/test_reporttemplates.py index 7f1b939f1e0..74748160ad8 100644 --- a/tests/foreman/cli/test_reporttemplates.py +++ b/tests/foreman/cli/test_reporttemplates.py @@ -19,38 +19,6 @@ from fauxfactory import gen_alpha import pytest -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.base import Base, CLIReturnCodeError -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - CLIFactoryError, - make_activation_key, - make_architecture, - make_content_view, - make_fake_host, - make_filter, - make_lifecycle_environment, - make_medium, - make_os, - make_partition_table, - make_product, - make_report_template, - make_repository, - make_role, - make_template_input, - make_user, - setup_org_for_a_custom_repo, - setup_org_for_a_rh_repo, -) -from robottelo.cli.filter import Filter -from robottelo.cli.host import Host -from robottelo.cli.location import Location -from robottelo.cli.org import Org -from robottelo.cli.report_template import ReportTemplate -from robottelo.cli.repository import Repository -from robottelo.cli.settings import Settings -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import ( DEFAULT_LOC, @@ -65,41 +33,50 @@ REPOS, REPOSET, ) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.hosts import ContentHost @pytest.fixture(scope='module') -def local_environment(module_entitlement_manifest_org): +def local_environment(module_entitlement_manifest_org, module_target_sat): """Create a lifecycle environment with CLI factory""" - return make_lifecycle_environment({'organization-id': module_entitlement_manifest_org.id}) + return module_target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': module_entitlement_manifest_org.id} + ) @pytest.fixture(scope='module') -def local_content_view(module_entitlement_manifest_org): +def local_content_view(module_entitlement_manifest_org, module_target_sat): """Create content view, repository, and product""" - new_product = make_product({'organization-id': module_entitlement_manifest_org.id}) - new_repo = make_repository({'product-id': new_product['id']}) - Repository.synchronize({'id': new_repo['id']}) - content_view = make_content_view({'organization-id': module_entitlement_manifest_org.id}) - ContentView.add_repository( + new_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_entitlement_manifest_org.id} + ) + new_repo = module_target_sat.cli_factory.make_repository({'product-id': new_product['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + content_view = module_target_sat.cli_factory.make_content_view( + {'organization-id': module_entitlement_manifest_org.id} + ) + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_entitlement_manifest_org.id, 'repository-id': new_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) return content_view @pytest.fixture(scope='module') -def local_ak(module_entitlement_manifest_org, local_environment, local_content_view): +def local_ak( + module_entitlement_manifest_org, local_environment, local_content_view, module_target_sat +): """Promote a content view version and create an activation key with CLI Factory""" - cvv = ContentView.info({'id': local_content_view['id']})['versions'][0] - ContentView.version_promote( + cvv = module_target_sat.cli.ContentView.info({'id': local_content_view['id']})['versions'][0] + module_target_sat.cli.ContentView.version_promote( {'id': cvv['id'], 'to-lifecycle-environment-id': local_environment['id']} ) - return make_activation_key( + return module_target_sat.cli_factory.make_activation_key( { 'lifecycle-environment-id': local_environment['id'], 'content-view': local_content_view['name'], @@ -110,19 +87,21 @@ def local_ak(module_entitlement_manifest_org, local_environment, local_content_v @pytest.fixture(scope='module') -def local_subscription(module_entitlement_manifest_org, local_ak): - for subscription in Subscription.list( +def local_subscription(module_entitlement_manifest_org, local_ak, module_target_sat): + for subscription in module_target_sat.cli.Subscription.list( {'organization-id': module_entitlement_manifest_org.id}, per_page=False ): if subscription['name'] == DEFAULT_SUBSCRIPTION_NAME: break - ActivationKey.add_subscription({'id': local_ak['id'], 'subscription-id': subscription['id']}) + module_target_sat.cli.ActivationKey.add_subscription( + {'id': local_ak['id'], 'subscription-id': subscription['id']} + ) return subscription @pytest.mark.tier2 -def test_positive_report_help(): +def test_positive_report_help(module_target_sat): """hammer level of help included in test: Base level hammer help includes report-templates, Command level hammer help contains usage details, @@ -143,9 +122,9 @@ def test_positive_report_help(): report-templates create command details are displayed """ - command_output = Base().execute('--help') + command_output = module_target_sat.cli.Base().execute('--help') assert 'report-template' in command_output - command_output = Base().execute('report-template --help') + command_output = module_target_sat.cli.Base().execute('report-template --help') assert all( [ phrase in command_output @@ -158,7 +137,7 @@ def test_positive_report_help(): ] ] ) - command_output = Base().execute('report-template create --help') + command_output = module_target_sat.cli.Base().execute('report-template create --help') assert all( [ phrase in command_output @@ -169,7 +148,7 @@ def test_positive_report_help(): @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud_and_list(): +def test_positive_end_to_end_crud_and_list(target_sat): """CRUD test + list test for report templates :id: 2a143ddf-683f-49e2-badb-f9a387cfc53c @@ -197,34 +176,36 @@ def test_positive_end_to_end_crud_and_list(): """ # create name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = target_sat.cli_factory.make_report_template({'name': name}) assert report_template['name'] == name # list - create second template tmp_name = gen_alpha() - tmp_report_template = make_report_template({'name': tmp_name}) - result_list = ReportTemplate.list() + tmp_report_template = target_sat.cli_factory.make_report_template({'name': tmp_name}) + result_list = target_sat.cli.ReportTemplate.list() assert name in [rt['name'] for rt in result_list] # info - result = ReportTemplate.info({'id': report_template['id']}) + result = target_sat.cli.ReportTemplate.info({'id': report_template['id']}) assert name == result['name'] # update new_name = gen_alpha() - result = ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + result = target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) assert new_name == result[0]['name'] - rt_list = ReportTemplate.list() + rt_list = target_sat.cli.ReportTemplate.list() assert name not in [rt['name'] for rt in rt_list] # delete tmp - ReportTemplate.delete({'name': tmp_report_template['name']}) + target_sat.cli.ReportTemplate.delete({'name': tmp_report_template['name']}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.info({'id': tmp_report_template['id']}) + target_sat.cli.ReportTemplate.info({'id': tmp_report_template['id']}) @pytest.mark.tier1 -def test_positive_generate_report_nofilter_and_with_filter(): +def test_positive_generate_report_nofilter_and_with_filter(module_target_sat): """Generate Host Status report without filter and with filter :id: 5af03399-b918-468a-9306-1c76dda6a369 @@ -243,21 +224,23 @@ def test_positive_generate_report_nofilter_and_with_filter(): :CaseImportance: Critical """ host_name = gen_alpha() - host1 = make_fake_host({'name': host_name}) + host1 = module_target_sat.cli_factory.make_fake_host({'name': host_name}) host_name_2 = gen_alpha() - host2 = make_fake_host({'name': host_name_2}) + host2 = module_target_sat.cli_factory.make_fake_host({'name': host_name_2}) - result_list = ReportTemplate.list() + result_list = module_target_sat.cli.ReportTemplate.list() assert 'Host - Statuses' in [rt['name'] for rt in result_list] - rt_host_statuses = ReportTemplate.info({'name': 'Host - Statuses'}) - result_no_filter = ReportTemplate.generate({'name': rt_host_statuses['name']}) + rt_host_statuses = module_target_sat.cli.ReportTemplate.info({'name': 'Host - Statuses'}) + result_no_filter = module_target_sat.cli.ReportTemplate.generate( + {'name': rt_host_statuses['name']} + ) assert host1['name'] in [item.split(',')[0] for item in result_no_filter.splitlines()] assert host2['name'] in [item.split(',')[0] for item in result_no_filter.splitlines()] - result = ReportTemplate.generate( + result = module_target_sat.cli.ReportTemplate.generate( { 'name': rt_host_statuses['name'], 'inputs': ( @@ -270,7 +253,7 @@ def test_positive_generate_report_nofilter_and_with_filter(): @pytest.mark.tier2 -def test_positive_lock_and_unlock_report(): +def test_positive_lock_and_unlock_report(module_target_sat): """Lock and unlock report template :id: df306515-8798-4ce3-9430-6bc3bf9b9b33 @@ -287,19 +270,23 @@ def test_positive_lock_and_unlock_report(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1}) + report_template = module_target_sat.cli_factory.make_report_template({'name': name}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 1}) new_name = gen_alpha() with pytest.raises(CLIReturnCodeError): - ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) - ReportTemplate.update({'name': report_template['name'], 'locked': 0}) - result = ReportTemplate.update({'name': report_template['name'], 'new-name': new_name}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 0}) + result = module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'new-name': new_name} + ) assert result[0]['name'] == new_name @pytest.mark.tier2 -def test_positive_report_add_userinput(): +def test_positive_report_add_userinput(module_target_sat): """Add user input to template :id: 84b577db-144e-4761-a46e-e83887464986 @@ -314,17 +301,17 @@ def test_positive_report_add_userinput(): """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.make_report_template({'name': name}) ti_name = gen_alpha() - template_input = make_template_input( + template_input = module_target_sat.cli_factory.make_template_input( {'name': ti_name, 'input-type': 'user', 'template-id': report_template['id']} ) - result = ReportTemplate.info({'name': report_template['name']}) + result = module_target_sat.cli.ReportTemplate.info({'name': report_template['name']}) assert result['template-inputs'][0]['name'] == template_input['name'] @pytest.mark.tier2 -def test_positive_dump_report(): +def test_positive_dump_report(module_target_sat): """Export report template :id: 84b577db-144e-4761-a42e-a83887464986 @@ -341,13 +328,15 @@ def test_positive_dump_report(): """ name = gen_alpha() content = gen_alpha() - report_template = make_report_template({'name': name, 'content': content}) - result = ReportTemplate.dump({'id': report_template['id']}) + report_template = module_target_sat.cli_factory.make_report_template( + {'name': name, 'content': content} + ) + result = module_target_sat.cli.ReportTemplate.dump({'id': report_template['id']}) assert content in result @pytest.mark.tier2 -def test_positive_clone_locked_report(): +def test_positive_clone_locked_report(module_target_sat): """Clone locked report template :id: cc843731-b9c2-4fc9-9e15-d1ee5d967cda @@ -364,19 +353,21 @@ def test_positive_clone_locked_report(): """ name = gen_alpha() - report_template = make_report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1, 'default': 1}) + report_template = module_target_sat.cli_factory.make_report_template({'name': name}) + module_target_sat.cli.ReportTemplate.update( + {'name': report_template['name'], 'locked': 1, 'default': 1} + ) new_name = gen_alpha() - ReportTemplate.clone({'id': report_template['id'], 'new-name': new_name}) - result_list = ReportTemplate.list() + module_target_sat.cli.ReportTemplate.clone({'id': report_template['id'], 'new-name': new_name}) + result_list = module_target_sat.cli.ReportTemplate.list() assert new_name in [rt['name'] for rt in result_list] - result_info = ReportTemplate.info({'id': report_template['id']}) + result_info = module_target_sat.cli.ReportTemplate.info({'id': report_template['id']}) assert result_info['locked'] == 'yes' assert result_info['default'] == 'yes' @pytest.mark.tier2 -def test_positive_generate_report_sanitized(): +def test_positive_generate_report_sanitized(module_target_sat): """Generate report template where there are values in comma outputted which might brake CSV format @@ -395,10 +386,10 @@ def test_positive_generate_report_sanitized(): """ # create a name that has a comma in it, some randomized text, and no spaces. os_name = gen_alpha(start='test', separator=',').replace(' ', '') - architecture = make_architecture() - partition_table = make_partition_table() - medium = make_medium() - os = make_os( + architecture = module_target_sat.cli_factory.make_architecture() + partition_table = module_target_sat.cli_factory.make_partition_table() + medium = module_target_sat.cli_factory.make_medium() + os = module_target_sat.cli_factory.make_os( { 'name': os_name, 'architecture-ids': architecture['id'], @@ -408,7 +399,7 @@ def test_positive_generate_report_sanitized(): ) host_name = gen_alpha() - host = make_fake_host( + host = module_target_sat.cli_factory.make_fake_host( { 'name': host_name, 'architecture-id': architecture['id'], @@ -418,9 +409,11 @@ def test_positive_generate_report_sanitized(): } ) - report_template = make_report_template({'content': REPORT_TEMPLATE_FILE}) + report_template = module_target_sat.cli_factory.make_report_template( + {'content': REPORT_TEMPLATE_FILE} + ) - result = ReportTemplate.generate({'name': report_template['name']}) + result = module_target_sat.cli.ReportTemplate.generate({'name': report_template['name']}) assert 'Name,Operating System' in result # verify header of custom template assert f'{host["name"]},"{host["operating-system"]["operating-system"]}"' in result @@ -494,7 +487,7 @@ def test_positive_generate_email_uncompressed(): @pytest.mark.tier2 -def test_negative_create_report_without_name(): +def test_negative_create_report_without_name(module_target_sat): """Try to create a report template with empty name :id: 84b577db-144e-4771-a42e-e93887464986 @@ -510,11 +503,11 @@ def test_negative_create_report_without_name(): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError): - make_report_template({'name': ''}) + module_target_sat.cli_factory.make_report_template({'name': ''}) @pytest.mark.tier2 -def test_negative_delete_locked_report(): +def test_negative_delete_locked_report(module_target_sat): """Try to delete a locked report template :id: 84b577db-144e-4871-a42e-e93887464986 @@ -530,16 +523,16 @@ def test_negative_delete_locked_report(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.make_report_template({'name': name}) - ReportTemplate.update({'name': report_template['name'], 'locked': 1}) + module_target_sat.cli.ReportTemplate.update({'name': report_template['name'], 'locked': 1}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.delete({'name': report_template['name']}) + module_target_sat.cli.ReportTemplate.delete({'name': report_template['name']}) @pytest.mark.tier2 -def test_negative_bad_email(): +def test_negative_bad_email(module_target_sat): """Report can't be generated when incorrectly formed mail specified :id: a4ba77db-144e-4871-a42e-e93887464986 @@ -555,14 +548,16 @@ def test_negative_bad_email(): :CaseImportance: Medium """ name = gen_alpha() - report_template = make_report_template({'name': name}) + report_template = module_target_sat.cli_factory.make_report_template({'name': name}) with pytest.raises(CLIReturnCodeError): - ReportTemplate.schedule({'name': report_template['name'], 'mail-to': gen_alpha()}) + module_target_sat.cli.ReportTemplate.schedule( + {'name': report_template['name'], 'mail-to': gen_alpha()} + ) @pytest.mark.tier3 -def test_negative_nonauthor_of_report_cant_download_it(): +def test_negative_nonauthor_of_report_cant_download_it(module_target_sat): """The resulting report should only be downloadable by the user that generated it or admin. Check. @@ -581,10 +576,10 @@ def test_negative_nonauthor_of_report_cant_download_it(): uname_viewer2 = gen_alpha() password = gen_alpha() - loc = Location.info({'name': DEFAULT_LOC}) - org = Org.info({'name': DEFAULT_ORG}) + loc = module_target_sat.cli.Location.info({'name': DEFAULT_LOC}) + org = module_target_sat.cli.Org.info({'name': DEFAULT_ORG}) - user1 = make_user( + user1 = module_target_sat.cli_factory.make_user( { 'login': uname_viewer, 'password': password, @@ -593,7 +588,7 @@ def test_negative_nonauthor_of_report_cant_download_it(): } ) - user2 = make_user( + user2 = module_target_sat.cli_factory.make_user( { 'login': uname_viewer2, 'password': password, @@ -602,62 +597,72 @@ def test_negative_nonauthor_of_report_cant_download_it(): } ) - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions_org = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=Organization'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=Organization'} + ) ] permissions_loc = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=Location'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=Location'} + ) ] permissions_rt = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=ReportTemplate'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=ReportTemplate'} + ) ] permissions_pt = [ permission['name'] - for permission in Filter.available_permissions( + for permission in module_target_sat.cli.Filter.available_permissions( {'search': 'resource_type=ProvisioningTemplate'} ) ] permissions_jt = [ permission['name'] - for permission in Filter.available_permissions({'search': 'resource_type=JobTemplate'}) + for permission in module_target_sat.cli.Filter.available_permissions( + {'search': 'resource_type=JobTemplate'} + ) ] # Assign filters to created role for perm in [permissions_org, permissions_loc, permissions_rt, permissions_pt, permissions_jt]: - make_filter({'role-id': role['id'], 'permissions': perm}) - User.add_role({'login': user1['login'], 'role-id': role['id']}) - User.add_role({'login': user2['login'], 'role-id': role['id']}) + module_target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': perm}) + module_target_sat.cli.User.add_role({'login': user1['login'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'login': user2['login'], 'role-id': role['id']}) name = gen_alpha() content = gen_alpha() - report_template = ReportTemplate.with_user(username=user1['login'], password=password).create( + report_template = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).create( {'name': name, 'organization-id': org['id'], 'location-id': loc['id'], 'file': content} ) - schedule = ReportTemplate.with_user(username=user1['login'], password=password).schedule( - {'name': report_template['name']} - ) + schedule = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).schedule({'name': report_template['name']}) job_id = schedule.split('Job ID: ', 1)[1].strip() - report_data = ReportTemplate.with_user(username=user1['login'], password=password).report_data( - {'id': report_template['name'], 'job-id': job_id} - ) + report_data = module_target_sat.cli.ReportTemplate.with_user( + username=user1['login'], password=password + ).report_data({'id': report_template['name'], 'job-id': job_id}) assert content in report_data with pytest.raises(CLIReturnCodeError): - ReportTemplate.with_user(username=user2['login'], password=password).report_data( - {'id': report_template['name'], 'job-id': job_id} - ) + module_target_sat.cli.ReportTemplate.with_user( + username=user2['login'], password=password + ).report_data({'id': report_template['name'], 'job-id': job_id}) @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1750924') -def test_positive_generate_with_name_and_org(): +def test_positive_generate_with_name_and_org(module_target_sat): """Generate Host Status report, specifying template name and organization :id: 5af03399-b918-468a-1306-1c76dda6f369 @@ -680,16 +685,18 @@ def test_positive_generate_with_name_and_org(): :BZ: 1750924 """ host_name = gen_alpha() - host = make_fake_host({'name': host_name}) + host = module_target_sat.cli_factory.make_fake_host({'name': host_name}) - result = ReportTemplate.generate({'name': 'Host - Statuses', 'organization': DEFAULT_ORG}) + result = module_target_sat.cli.ReportTemplate.generate( + {'name': 'Host - Statuses', 'organization': DEFAULT_ORG} + ) assert host['name'] in [item.split(',')[0] for item in result.split('\n')] @pytest.mark.tier2 @pytest.mark.skip_if_open('BZ:1782807') -def test_positive_generate_ansible_template(): +def test_positive_generate_ansible_template(module_target_sat): """Report template named 'Ansible Inventory' (default name is specified in settings) must be present in Satellite 6.7 and later in order to provide enhanced functionality for Ansible Tower inventory synchronization with Satellite. @@ -710,19 +717,19 @@ def test_positive_generate_ansible_template(): :CaseImportance: Medium """ - settings = Settings.list({'search': 'name=ansible_inventory_template'}) + settings = module_target_sat.cli.Settings.list({'search': 'name=ansible_inventory_template'}) assert 1 == len(settings) template_name = settings[0]['value'] - report_list = ReportTemplate.list() + report_list = module_target_sat.cli.ReportTemplate.list() assert template_name in [rt['name'] for rt in report_list] login = gen_alpha().lower() password = gen_alpha().lower() - loc = Location.info({'name': DEFAULT_LOC}) - org = Org.info({'name': DEFAULT_ORG}) + loc = module_target_sat.cli.Location.info({'name': DEFAULT_LOC}) + org = module_target_sat.cli.Org.info({'name': DEFAULT_ORG}) - user = make_user( + user = module_target_sat.cli_factory.make_user( { 'login': login, 'password': password, @@ -731,19 +738,21 @@ def test_positive_generate_ansible_template(): } ) - User.add_role({'login': user['login'], 'role': 'Ansible Tower Inventory Reader'}) + module_target_sat.cli.User.add_role( + {'login': user['login'], 'role': 'Ansible Tower Inventory Reader'} + ) host_name = gen_alpha().lower() - host = make_fake_host({'name': host_name}) + host = module_target_sat.cli_factory.make_fake_host({'name': host_name}) - schedule = ReportTemplate.with_user(username=user['login'], password=password).schedule( - {'name': template_name} - ) + schedule = module_target_sat.cli.ReportTemplate.with_user( + username=user['login'], password=password + ).schedule({'name': template_name}) job_id = schedule.split('Job ID: ', 1)[1].strip() - report_data = ReportTemplate.with_user(username=user['login'], password=password).report_data( - {'name': template_name, 'job-id': job_id} - ) + report_data = module_target_sat.cli.ReportTemplate.with_user( + username=user['login'], password=password + ).report_data({'name': template_name, 'job-id': job_id}) assert host['name'] in [item.split(',')[1] for item in report_data.split('\n') if len(item) > 0] @@ -776,7 +785,7 @@ def test_positive_generate_entitlements_report_multiple_formats( client.install_katello_ca(target_sat) client.register_contenthost(module_entitlement_manifest_org.label, local_ak['name']) assert client.subscribed - result_html = ReportTemplate.generate( + result_html = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -786,7 +795,7 @@ def test_positive_generate_entitlements_report_multiple_formats( ) assert client.hostname in result_html assert local_subscription['name'] in result_html - result_yaml = ReportTemplate.generate( + result_yaml = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -799,7 +808,7 @@ def test_positive_generate_entitlements_report_multiple_formats( assert client.hostname in entry elif 'Subscription Name:' in entry: assert local_subscription['name'] in entry - result_csv = ReportTemplate.generate( + result_csv = target_sat.cli.ReportTemplate.generate( { 'organization': module_entitlement_manifest_org.name, 'name': 'Subscription - Entitlement Report', @@ -838,7 +847,7 @@ def test_positive_schedule_entitlements_report( client.install_katello_ca(target_sat) client.register_contenthost(module_entitlement_manifest_org.label, local_ak['name']) assert client.subscribed - scheduled_csv = ReportTemplate.schedule( + scheduled_csv = target_sat.cli.ReportTemplate.schedule( { 'name': 'Subscription - Entitlement Report', 'organization': module_entitlement_manifest_org.name, @@ -846,7 +855,7 @@ def test_positive_schedule_entitlements_report( 'inputs': 'Days from Now=no limit', } ) - data_csv = ReportTemplate.report_data( + data_csv = target_sat.cli.ReportTemplate.report_data( { 'name': 'Subscription - Entitlement Report', 'job-id': scheduled_csv.split('\n', 1)[0].split('Job ID: ', 1)[1], @@ -879,7 +888,7 @@ def test_positive_generate_hostpkgcompare( :BZ: 1860430 """ # Add subscription to Satellite Tools repo to activation key - setup_org_for_a_rh_repo( + target_sat.cli_factory.setup_org_for_a_rh_repo( { 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], @@ -890,7 +899,7 @@ def test_positive_generate_hostpkgcompare( 'activationkey-id': local_ak['id'], } ) - setup_org_for_a_custom_repo( + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_6.url, 'organization-id': module_entitlement_manifest_org.id, @@ -911,7 +920,7 @@ def test_positive_generate_hostpkgcompare( clients.append(client) client.enable_repo(REPOS['rhst7']['id']) clients.sort(key=lambda client: client.hostname) - hosts_info = [Host.info({'name': client.hostname}) for client in clients] + hosts_info = [target_sat.cli.Host.info({'name': client.hostname}) for client in clients] host1, host2 = hosts_info res = clients[0].execute( @@ -921,7 +930,7 @@ def test_positive_generate_hostpkgcompare( res = clients[1].execute(f'yum -y install {FAKE_2_CUSTOM_PACKAGE}') assert not res.status - result = ReportTemplate.generate( + result = target_sat.cli.ReportTemplate.generate( { 'name': 'Host - compare content hosts packages', 'inputs': f'Host 1 = {host1["name"]}, ' f'Host 2 = {host2["name"]}', @@ -962,7 +971,7 @@ def test_positive_generate_hostpkgcompare( @pytest.mark.tier3 -def test_negative_generate_hostpkgcompare_nonexistent_host(): +def test_negative_generate_hostpkgcompare_nonexistent_host(module_target_sat): """Try to generate 'Host - compare content hosts packages' report with nonexistent hosts inputs @@ -981,7 +990,7 @@ def test_negative_generate_hostpkgcompare_nonexistent_host(): :BZ: 1860351 """ with pytest.raises(CLIReturnCodeError) as cm: - ReportTemplate.generate( + module_target_sat.cli.ReportTemplate.generate( { 'name': 'Host - compare content hosts packages', 'inputs': 'Host 1 = nonexistent1, ' 'Host 2 = nonexistent2', @@ -1020,7 +1029,7 @@ def test_positive_generate_installed_packages_report( :customerscenario: true """ - setup_org_for_a_custom_repo( + target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_6.url, 'organization-id': module_entitlement_manifest_org.id, diff --git a/tests/foreman/cli/test_repository.py b/tests/foreman/cli/test_repository.py index 85a9d751bd4..4298be1eb6a 100644 --- a/tests/foreman/cli/test_repository.py +++ b/tests/foreman/cli/test_repository.py @@ -25,36 +25,6 @@ import requests from wait_for import wait_for -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.content_export import ContentExport -from robottelo.cli.content_import import ContentImport -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - CLIFactoryError, - make_content_credential, - make_content_view, - make_filter, - make_lifecycle_environment, - make_location, - make_org, - make_product, - make_repository, - make_role, - make_user, -) -from robottelo.cli.file import File -from robottelo.cli.filter import Filter -from robottelo.cli.module_stream import ModuleStream -from robottelo.cli.org import Org -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.role import Role -from robottelo.cli.settings import Settings -from robottelo.cli.srpm import Srpm -from robottelo.cli.task import Task -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, @@ -79,6 +49,7 @@ FAKE_YUM_MD5_REPO, FAKE_YUM_SRPM_REPO, ) +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.datafactory import ( invalid_values_list, @@ -87,9 +58,7 @@ valid_docker_repository_names, valid_http_credentials, ) - -# from robottelo.constants.repos import FEDORA_OSTREE_REPO - +from tests.foreman.api.test_contentview import content_view YUM_REPOS = ( settings.repos.yum_0.url, @@ -107,23 +76,26 @@ ) -def _get_image_tags_count(repo): - return Repository.info({'id': repo['id']}) +def _get_image_tags_count(repo, sat): + return sat.cli.Repository.info({'id': repo['id']}) -def _validated_image_tags_count(repo): +def _validated_image_tags_count(repo, sat): """Wrapper around Repository.info(), that returns once container-image-tags in repo is greater than 0. Needed due to BZ#1664631 (container-image-tags is not populated immediately after synchronization), which was CLOSED WONTFIX """ wait_for( - lambda: int(_get_image_tags_count(repo=repo)['content-counts']['container-image-tags']) > 0, + lambda: int( + _get_image_tags_count(repo=repo, sat=sat)['content-counts']['container-image-tags'] + ) + > 0, timeout=30, delay=2, logger=logger, ) - return _get_image_tags_count(repo=repo) + return _get_image_tags_count(repo=repo, sat=sat) @pytest.fixture @@ -136,15 +108,15 @@ def repo_options(request, module_org, module_product): @pytest.fixture -def repo(repo_options): +def repo(repo_options, target_sat): """create a new repository.""" - return make_repository(repo_options) + return target_sat.cli_factory.make_repository(repo_options) @pytest.fixture -def gpg_key(module_org): +def gpg_key(module_org, module_target_sat): """Create a new GPG key.""" - return make_content_credential({'organization-id': module_org.id}) + return module_target_sat.cli_factory.make_content_credential({'organization-id': module_org.id}) class TestRepository: @@ -350,7 +322,7 @@ def test_positive_mirroring_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_positive_create_with_default_download_policy(self, repo_options, repo): + def test_positive_create_with_default_download_policy(self, repo_options, repo, target_sat): """Verify if the default download policy is assigned when creating a YUM repo without `--download-policy` @@ -362,7 +334,7 @@ def test_positive_create_with_default_download_policy(self, repo_options, repo): :CaseImportance: Critical """ - default_dl_policy = Settings.list({'search': 'name=default_download_policy'}) + default_dl_policy = target_sat.cli.Settings.list({'search': 'name=default_download_policy'}) assert default_dl_policy assert repo.get('download-policy') == default_dl_policy[0]['value'] @@ -370,7 +342,7 @@ def test_positive_create_with_default_download_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo): + def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo, target_sat): """Update `immediate` download policy to `on_demand` for a newly created YUM repository @@ -385,8 +357,8 @@ def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo) :BZ: 1732056 """ assert repo.get('download-policy') == 'immediate' - Repository.update({'id': repo['id'], 'download-policy': 'on_demand'}) - result = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'download-policy': 'on_demand'}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result.get('download-policy') == 'on_demand' @pytest.mark.tier1 @@ -395,7 +367,7 @@ def test_positive_create_immediate_update_to_on_demand(self, repo_options, repo) **parametrized([{'content-type': 'yum', 'download-policy': 'on_demand'}]), indirect=True, ) - def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo): + def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo, target_sat): """Update `on_demand` download policy to `immediate` for a newly created YUM repository @@ -407,13 +379,13 @@ def test_positive_create_on_demand_update_to_immediate(self, repo_options, repo) :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'download-policy': 'immediate'}) - result = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'download-policy': 'immediate'}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result['download-policy'] == 'immediate' @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key): + def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key, target_sat): """Check if repository can be created with gpg key ID :id: 6d22f0ea-2d27-4827-9b7a-3e1550a47285 @@ -425,7 +397,7 @@ def test_positive_create_with_gpg_key_by_id(self, repo_options, gpg_key): :CaseImportance: Critical """ repo_options['gpg-key-id'] = gpg_key['id'] - repo = make_repository(repo_options) + repo = target_sat.cli_factory.make_repository(repo_options) assert repo['gpg-key']['id'] == gpg_key['id'] assert repo['gpg-key']['name'] == gpg_key['name'] @@ -590,16 +562,18 @@ def test_positive_create_repo_with_new_organization_and_location(self, target_sa :CaseImportance: High """ - new_org = make_org() - new_location = make_location() - new_product = make_product( + new_org = target_sat.cli_factory.make_org() + new_location = target_sat.cli_factory.make_location() + new_product = target_sat.cli_factory.make_product( {'organization-id': new_org['id'], 'description': 'test_product'} ) - Org.add_location({'location-id': new_location['id'], 'name': new_org['name']}) - assert new_location['name'] in Org.info({'id': new_org['id']})['locations'] - make_repository( + target_sat.cli.Org.add_location( + {'location-id': new_location['id'], 'name': new_org['name']} + ) + assert new_location['name'] in target_sat.cli.Org.info({'id': new_org['id']})['locations'] + target_sat.cli_factory.make_repository( { - 'location-id': new_location['id'], + 'content-type': 'yum', 'organization-id': new_org['id'], 'product-id': new_product['id'], } @@ -617,7 +591,7 @@ def test_positive_create_repo_with_new_organization_and_location(self, target_sa **parametrized([{'name': name} for name in invalid_values_list()]), indirect=True, ) - def test_negative_create_with_name(self, repo_options): + def test_negative_create_with_name(self, repo_options, target_sat): """Repository name cannot be 300-characters long :id: af0652d3-012d-4846-82ac-047918f74722 @@ -629,7 +603,7 @@ def test_negative_create_with_name(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -637,7 +611,9 @@ def test_negative_create_with_name(self, repo_options): **parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}.com'}]), indirect=True, ) - def test_negative_create_with_url_with_special_characters(self, repo_options): + def test_negative_create_with_url_with_special_characters( + self, repo_options, module_target_sat + ): """Verify that repository URL cannot contain unquoted special characters :id: 2bd5ee17-0fe5-43cb-9cdc-dc2178c5374c @@ -649,7 +625,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -657,7 +633,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): **parametrized([{'content-type': 'yum', 'download-policy': gen_string('alpha', 5)}]), indirect=True, ) - def test_negative_create_with_invalid_download_policy(self, repo_options): + def test_negative_create_with_invalid_download_policy(self, repo_options, module_target_sat): """Verify that YUM repository cannot be created with invalid download policy @@ -671,13 +647,13 @@ def test_negative_create_with_invalid_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **parametrized([{'content-type': 'yum'}]), indirect=True ) - def test_negative_update_to_invalid_download_policy(self, repo_options, repo): + def test_negative_update_to_invalid_download_policy(self, repo_options, repo, target_sat): """Verify that YUM repository cannot be updated to invalid download policy @@ -691,7 +667,9 @@ def test_negative_update_to_invalid_download_policy(self, repo_options, repo): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - Repository.update({'id': repo['id'], 'download-policy': gen_string('alpha', 5)}) + target_sat.cli.Repository.update( + {'id': repo['id'], 'download-policy': gen_string('alpha', 5)} + ) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -706,7 +684,7 @@ def test_negative_update_to_invalid_download_policy(self, repo_options, repo): ), indirect=True, ) - def test_negative_create_non_yum_with_download_policy(self, repo_options): + def test_negative_create_non_yum_with_download_policy(self, repo_options, module_target_sat): """Verify that non-YUM repositories cannot be created with download policy TODO: Remove ostree from exceptions when ostree is added back in Satellite 7 @@ -725,7 +703,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): CLIFactoryError, match='Download policy Cannot set attribute download_policy for content type', ): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -742,7 +720,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): ), indirect=True, ) - def test_positive_synchronize_yum_repo(self, repo_options, repo): + def test_positive_synchronize_yum_repo(self, repo_options, repo, target_sat): """Check if repository can be created and synced :id: e3a62529-edbd-4062-9246-bef5f33bdcf0 @@ -758,9 +736,9 @@ def test_positive_synchronize_yum_repo(self, repo_options, repo): # Repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' @pytest.mark.tier1 @@ -769,7 +747,7 @@ def test_positive_synchronize_yum_repo(self, repo_options, repo): **parametrized([{'content-type': 'file', 'url': CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_synchronize_file_repo(self, repo_options, repo): + def test_positive_synchronize_file_repo(self, repo_options, repo, target_sat): """Check if repository can be created and synced :id: eafc421d-153e-41e1-afbd-938e556ef827 @@ -785,9 +763,9 @@ def test_positive_synchronize_file_repo(self, repo_options, repo): # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert int(repo['content-counts']['files']) == CUSTOM_FILE_REPO_FILES_COUNT @@ -809,7 +787,7 @@ def test_positive_synchronize_file_repo(self, repo_options, repo): ), indirect=True, ) - def test_positive_synchronize_auth_yum_repo(self, repo): + def test_positive_synchronize_auth_yum_repo(self, repo, target_sat): """Check if secured repository can be created and synced :id: b0db676b-e0f0-428c-adf3-1d7c0c3599f0 @@ -825,9 +803,9 @@ def test_positive_synchronize_auth_yum_repo(self, repo): # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - new_repo = Repository.info({'id': repo['id']}) + new_repo = target_sat.cli.Repository.info({'id': repo['id']}) assert new_repo['sync']['status'] == 'Success' @pytest.mark.skip_if_open("BZ:2035025") @@ -850,7 +828,7 @@ def test_positive_synchronize_auth_yum_repo(self, repo): ), indirect=['repo_options'], ) - def test_negative_synchronize_auth_yum_repo(self, repo): + def test_negative_synchronize_auth_yum_repo(self, repo, target_sat): """Check if secured repo fails to synchronize with invalid credentials :id: 809905ae-fb76-465d-9468-1f99c4274aeb @@ -864,8 +842,10 @@ def test_negative_synchronize_auth_yum_repo(self, repo): :CaseLevel: Integration """ # Try to synchronize it - repo_sync = Repository.synchronize({'id': repo['id'], 'async': True}) - response = Task.progress({'id': repo_sync[0]['id']}, return_raw_response=True) + repo_sync = target_sat.cli.Repository.synchronize({'id': repo['id'], 'async': True}) + response = target_sat.cli.Task.progress( + {'id': repo_sync[0]['id']}, return_raw_response=True + ) assert "Error: 401, message='Unauthorized'" in response.stderr[1].decode('utf-8') @pytest.mark.tier2 @@ -884,7 +864,9 @@ def test_negative_synchronize_auth_yum_repo(self, repo): ), indirect=True, ) - def test_positive_synchronize_docker_repo(self, repo, module_product, module_org): + def test_positive_synchronize_docker_repo( + self, repo, module_product, module_org, module_target_sat + ): """Check if Docker repository can be created, synced, and deleted :id: cb9ae788-743c-4785-98b2-6ae0c161bc9a @@ -900,17 +882,17 @@ def test_positive_synchronize_docker_repo(self, repo, module_product, module_org # Assertion that repo is not yet synced assert repo['sync']['status'] == 'Not Synced' # Synchronize it - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Verify it has finished - new_repo = Repository.info({'id': repo['id']}) + new_repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert new_repo['sync']['status'] == 'Success' # For BZ#1810165, assert repo can be deleted - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) assert ( new_repo['name'] - not in Product.info({'id': module_product.id, 'organization-id': module_org.id})[ - 'content' - ] + not in module_target_sat.cli.Product.info( + {'id': module_product.id, 'organization-id': module_org.id} + )['content'] ) @pytest.mark.tier2 @@ -929,7 +911,7 @@ def test_positive_synchronize_docker_repo(self, repo, module_product, module_org ), indirect=True, ) - def test_verify_checksum_container_repo(self, repo): + def test_verify_checksum_container_repo(self, repo, target_sat): """Check if Verify Content Checksum can be run on non container repos :id: c8f0eb45-3cb6-41b2-aad9-52ac847d7bf8 @@ -944,8 +926,8 @@ def test_verify_checksum_container_repo(self, repo): :customerscenario: true """ assert repo['sync']['status'] == 'Not Synced' - Repository.synchronize({'id': repo['id'], 'validate-contents': 'true'}) - new_repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id'], 'validate-contents': 'true'}) + new_repo = target_sat.cli.Repository.info({'id': repo['id']}) assert new_repo['sync']['status'] == 'Success' @pytest.mark.tier2 @@ -964,7 +946,9 @@ def test_verify_checksum_container_repo(self, repo): ), indirect=True, ) - def test_positive_synchronize_docker_repo_with_tags_whitelist(self, repo_options, repo): + def test_positive_synchronize_docker_repo_with_tags_whitelist( + self, repo_options, repo, target_sat + ): """Check if only whitelisted tags are synchronized :id: aa820c65-2de1-4b32-8890-98bd8b4320dc @@ -973,8 +957,8 @@ def test_positive_synchronize_docker_repo_with_tags_whitelist(self, repo_options :expectedresults: Only whitelisted tag is synchronized """ - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert repo_options['include-tags'] in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) == 1 @@ -993,7 +977,7 @@ def test_positive_synchronize_docker_repo_with_tags_whitelist(self, repo_options ), indirect=True, ) - def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): + def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo, target_sat): """Verify that adding tags whitelist and re-syncing after synchronizing full repository doesn't remove content that was already pulled in when mirroring policy is set to additive @@ -1005,13 +989,13 @@ def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): :expectedresults: Non-whitelisted tags are not removed """ tags = 'latest' - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert not repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 - Repository.update({'id': repo['id'], 'include-tags': tags}) - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.update({'id': repo['id'], 'include-tags': tags}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert tags in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 @@ -1030,7 +1014,7 @@ def test_positive_synchronize_docker_repo_set_tags_later_additive(self, repo): ), indirect=True, ) - def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo): + def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo, target_sat): """Verify that adding tags whitelist and re-syncing after synchronizing full repository does remove content that was already pulled in when mirroring policy is set to content only @@ -1043,13 +1027,13 @@ def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo :expectedresults: Non-whitelisted tags are removed """ tags = 'latest' - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert not repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) >= 2 - Repository.update({'id': repo['id'], 'include-tags': tags}) - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.update({'id': repo['id'], 'include-tags': tags}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) assert tags in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) <= 2 @@ -1068,7 +1052,9 @@ def test_positive_synchronize_docker_repo_set_tags_later_content_only(self, repo ), indirect=True, ) - def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo_options, repo): + def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags( + self, repo_options, repo, target_sat + ): """Set tags whitelist to contain both valid and invalid (non-existing) tags. Check if only whitelisted tags are synchronized @@ -1078,8 +1064,8 @@ def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo :expectedresults: Only whitelisted tag is synchronized """ - Repository.synchronize({'id': repo['id']}) - repo = _validated_image_tags_count(repo=repo) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = _validated_image_tags_count(repo=repo, sat=target_sat) for tag in repo_options['include-tags'].split(','): assert tag in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) == 1 @@ -1099,7 +1085,9 @@ def test_negative_synchronize_docker_repo_with_mix_valid_invalid_tags(self, repo ), indirect=True, ) - def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, repo): + def test_negative_synchronize_docker_repo_with_invalid_tags( + self, repo_options, repo, target_sat + ): """Set tags whitelist to contain only invalid (non-existing) tags. Check that no data is synchronized. @@ -1109,8 +1097,8 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, :expectedresults: Tags are not synchronized """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) for tag in repo_options['include-tags'].split(','): assert tag in repo['container-image-tags-filter'] assert int(repo['content-counts']['container-image-tags']) == 0 @@ -1121,7 +1109,7 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_resynchronize_rpm_repo(self, repo): + def test_positive_resynchronize_rpm_repo(self, repo, target_sat): """Check that repository content is resynced after packages were removed from repository @@ -1135,20 +1123,20 @@ def test_positive_resynchronize_rpm_repo(self, repo): :CaseLevel: Integration """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list({'repository-id': repo['id']}) - Repository.remove_content( + packages = target_sat.cli.Package.list({'repository-id': repo['id']}) + target_sat.cli.Repository.remove_content( {'id': repo['id'], 'ids': [package['id'] for package in packages]} ) - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' # Re-synchronize repository - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' @@ -1177,7 +1165,7 @@ def test_positive_resynchronize_rpm_repo(self, repo): ), ) @pytest.mark.tier2 - def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2): + def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2, module_target_sat): """ Check that a package removed upstream is removed downstream when the repo is next synced if mirror-on-sync is enabled (the default setting). @@ -1198,36 +1186,40 @@ def test_mirror_on_sync_removes_rpm(self, module_org, repo, repo_options_2): :CaseImportance: Medium """ # Add description to repo 1 and its product - Product.update( + module_target_sat.cli.Product.update( { 'id': repo.get('product')['id'], 'organization': module_org.label, 'description': 'Fake Upstream', } ) - Repository.update({'id': repo['id'], 'description': ['Fake Upstream']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'description': ['Fake Upstream']} + ) # Sync repo 1 from the real upstream - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Make 2nd repo - prod_2 = make_product({'organization-id': module_org.id, 'description': 'Downstream'}) + prod_2 = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'description': 'Downstream'} + ) repo_options_2['organization-id'] = module_org.id repo_options_2['product-id'] = prod_2['id'] repo_options_2['url'] = repo.get('published-at') - repo_2 = make_repository(repo_options_2) - Repository.update({'id': repo_2['id'], 'description': ['Downstream']}) - repo_2 = Repository.info({'id': repo_2['id']}) - Repository.synchronize({'id': repo_2['id']}) + repo_2 = module_target_sat.cli_factory.make_repository(repo_options_2) + module_target_sat.cli.Repository.update({'id': repo_2['id'], 'description': ['Downstream']}) + repo_2 = module_target_sat.cli.Repository.info({'id': repo_2['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_2['id']}) # Get list of repo 1's packages and remove one - package = choice(Package.list({'repository-id': repo['id']})) - Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) - repo = Repository.info({'id': repo['id']}) + package = choice(module_target_sat.cli.Package.list({'repository-id': repo['id']})) + module_target_sat.cli.Repository.remove_content({'id': repo['id'], 'ids': [package['id']]}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '31' # Re-synchronize repo_2, the downstream repository - Repository.synchronize({'id': repo_2['id']}) - repo_2 = Repository.info({'id': repo_2['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo_2['id']}) + repo_2 = module_target_sat.cli.Repository.info({'id': repo_2['id']}) assert repo_2['sync']['status'] == 'Success' assert repo_2['content-counts']['packages'] == '31' @@ -1261,8 +1253,8 @@ def test_positive_synchronize_rpm_repo_ignore_SRPM( :CaseLevel: Integration """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['source-rpms'] == '0', 'content not ignored correctly' @@ -1270,7 +1262,7 @@ def test_positive_synchronize_rpm_repo_ignore_SRPM( @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_update_url(self, repo): + def test_positive_update_url(self, repo, module_target_sat): """Update the original url for a repository :id: 1a2cf29b-5c30-4d4c-b6d1-2f227b0a0a57 @@ -1282,9 +1274,9 @@ def test_positive_update_url(self, repo): :CaseImportance: Critical """ # Update the url - Repository.update({'id': repo['id'], 'url': settings.repos.yum_2.url}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'url': settings.repos.yum_2.url}) # Fetch it again - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['url'] == settings.repos.yum_2.url @pytest.mark.tier1 @@ -1292,7 +1284,9 @@ def test_positive_update_url(self, repo): 'new_repo_options', **parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}'}]), ) - def test_negative_update_url_with_special_characters(self, new_repo_options, repo): + def test_negative_update_url_with_special_characters( + self, new_repo_options, repo, module_target_sat + ): """Verify that repository URL cannot be updated to contain the forbidden characters @@ -1305,13 +1299,15 @@ def test_negative_update_url_with_special_characters(self, new_repo_options, rep :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - Repository.update({'id': repo['id'], 'url': new_repo_options['url']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'url': new_repo_options['url']} + ) # Fetch it again, ensure url hasn't changed. - result = Repository.info({'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['url'] == repo['url'] @pytest.mark.tier1 - def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): + def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key, target_sat): """Update the original gpg key :id: 367ff375-4f52-4a8c-b974-8c1c54e3fdd3 @@ -1322,11 +1318,13 @@ def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key['id']}) + target_sat.cli.Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key['id']}) - gpg_key_new = make_content_credential({'organization-id': module_org.id}) - Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key_new['id']}) - result = Repository.info({'id': repo['id']}) + gpg_key_new = target_sat.cli_factory.make_content_credential( + {'organization-id': module_org.id} + ) + target_sat.cli.Repository.update({'id': repo['id'], 'gpg-key-id': gpg_key_new['id']}) + result = target_sat.cli.Repository.info({'id': repo['id']}) assert result['gpg-key']['id'] == gpg_key_new['id'] @pytest.mark.tier1 @@ -1335,7 +1333,7 @@ def test_positive_update_gpg_key(self, repo_options, module_org, repo, gpg_key): **parametrized([{'mirroring-policy': policy} for policy in MIRRORING_POLICIES]), indirect=True, ) - def test_positive_update_mirroring_policy(self, repo, repo_options): + def test_positive_update_mirroring_policy(self, repo, repo_options, module_target_sat): """Update the mirroring policy rule for repository :id: 9bab2537-3223-40d7-bc4c-a51b09d2e812 @@ -1346,15 +1344,17 @@ def test_positive_update_mirroring_policy(self, repo, repo_options): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'mirroring-policy': repo_options['mirroring-policy']}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update( + {'id': repo['id'], 'mirroring-policy': repo_options['mirroring-policy']} + ) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['mirroring-policy'] == MIRRORING_POLICIES[repo_options['mirroring-policy']] @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **parametrized([{'publish-via-http': 'no'}]), indirect=True ) - def test_positive_update_publish_method(self, repo): + def test_positive_update_publish_method(self, repo, module_target_sat): """Update the original publishing method :id: e7bd2667-4851-4a64-9c70-1b5eafbc3f71 @@ -1365,8 +1365,8 @@ def test_positive_update_publish_method(self, repo): :CaseImportance: Critical """ - Repository.update({'id': repo['id'], 'publish-via-http': 'yes'}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'id': repo['id'], 'publish-via-http': 'yes'}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['publish-via-http'] == 'yes' @pytest.mark.tier1 @@ -1376,7 +1376,9 @@ def test_positive_update_publish_method(self, repo): indirect=True, ) @pytest.mark.parametrize('checksum_type', ['sha1', 'sha256']) - def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): + def test_positive_update_checksum_type( + self, repo_options, repo, checksum_type, module_target_sat + ): """Create a YUM repository and update the checksum type :id: 42f14257-d860-443d-b337-36fd355014bc @@ -1389,8 +1391,8 @@ def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): :CaseImportance: Critical """ assert repo['content-type'] == repo_options['content-type'] - Repository.update({'checksum-type': checksum_type, 'id': repo['id']}) - result = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.update({'checksum-type': checksum_type, 'id': repo['id']}) + result = module_target_sat.cli.Repository.info({'id': repo['id']}) assert result['checksum-type'] == checksum_type @pytest.mark.tier1 @@ -1408,7 +1410,7 @@ def test_positive_update_checksum_type(self, repo_options, repo, checksum_type): ), indirect=True, ) - def test_negative_create_checksum_with_on_demand_policy(self, repo_options): + def test_negative_create_checksum_with_on_demand_policy(self, repo_options, module_target_sat): """Attempt to create repository with checksum and on_demand policy. :id: 33d712e6-e91f-42bb-8c5d-35bdc427182c @@ -1422,7 +1424,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): :BZ: 1732056 """ with pytest.raises(CLIFactoryError): - make_repository(repo_options) + module_target_sat.cli_factory.make_repository(repo_options) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1430,7 +1432,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): **parametrized([{'name': name} for name in valid_data_list().values()]), indirect=True, ) - def test_positive_delete_by_id(self, repo): + def test_positive_delete_by_id(self, repo, target_sat): """Check if repository can be created and deleted :id: bcf096db-0033-4138-90a3-cb7355d5dfaf @@ -1441,9 +1443,9 @@ def test_positive_delete_by_id(self, repo): :CaseImportance: Critical """ - Repository.delete({'id': repo['id']}) + target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.upgrade @@ -1452,7 +1454,7 @@ def test_positive_delete_by_id(self, repo): **parametrized([{'name': name} for name in valid_data_list().values()]), indirect=True, ) - def test_positive_delete_by_name(self, repo_options, repo): + def test_positive_delete_by_name(self, repo_options, repo, module_target_sat): """Check if repository can be created and deleted :id: 463980a4-dbcf-4178-83a6-1863cf59909a @@ -1463,9 +1465,11 @@ def test_positive_delete_by_name(self, repo_options, repo): :CaseImportance: Critical """ - Repository.delete({'name': repo['name'], 'product-id': repo_options['product-id']}) + module_target_sat.cli.Repository.delete( + {'name': repo['name'], 'product-id': repo_options['product-id']} + ) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1473,7 +1477,7 @@ def test_positive_delete_by_name(self, repo_options, repo): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_delete_rpm(self, repo): + def test_positive_delete_rpm(self, repo, module_target_sat): """Check if rpm repository with packages can be deleted. :id: 1172492f-d595-4c8e-89c1-fabb21eb04ac @@ -1484,14 +1488,14 @@ def test_positive_delete_rpm(self, repo): :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' # Check that there is at least one package assert int(repo['content-counts']['packages']) > 0 - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.upgrade @@ -1500,7 +1504,9 @@ def test_positive_delete_rpm(self, repo): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_remove_content_by_repo_name(self, module_org, module_product, repo): + def test_positive_remove_content_by_repo_name( + self, module_org, module_product, repo, module_target_sat + ): """Synchronize and remove rpm content using repo name :id: a8b6f17d-3b13-4185-920a-2558ace59458 @@ -1513,14 +1519,14 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, :CaseImportance: Critical """ - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': repo['name'], 'product': module_product.name, 'organization': module_org.name, } ) - repo = Repository.info( + repo = module_target_sat.cli.Repository.info( { 'name': repo['name'], 'product': module_product.name, @@ -1530,14 +1536,14 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list( + packages = module_target_sat.cli.Package.list( { 'repository': repo['name'], 'product': module_product.name, 'organization': module_org.name, } ) - Repository.remove_content( + module_target_sat.cli.Repository.remove_content( { 'name': repo['name'], 'product': module_product.name, @@ -1545,7 +1551,7 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, 'ids': [package['id'] for package in packages], } ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' @pytest.mark.tier1 @@ -1555,7 +1561,7 @@ def test_positive_remove_content_by_repo_name(self, module_org, module_product, **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_positive_remove_content_rpm(self, repo): + def test_positive_remove_content_rpm(self, repo, module_target_sat): """Synchronize repository and remove rpm content from it :id: c4bcda0e-c0d6-424c-840d-26684ca7c9f1 @@ -1568,16 +1574,16 @@ def test_positive_remove_content_rpm(self, repo): :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['packages'] == '32' # Find repo packages and remove them - packages = Package.list({'repository-id': repo['id']}) - Repository.remove_content( + packages = module_target_sat.cli.Package.list({'repository-id': repo['id']}) + module_target_sat.cli.Repository.remove_content( {'id': repo['id'], 'ids': [package['id'] for package in packages]} ) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['content-counts']['packages'] == '0' @pytest.mark.tier1 @@ -1662,7 +1668,7 @@ def test_positive_upload_content_to_file_repo(self, repo, target_sat): **parametrized([{'content-type': 'yum', 'url': settings.repos.yum_1.url}]), indirect=True, ) - def test_negative_restricted_user_cv_add_repository(self, module_org, repo): + def test_negative_restricted_user_cv_add_repository(self, module_org, repo, module_target_sat): """Attempt to add a product repository to content view with a restricted user, using product name not visible to restricted user. @@ -1731,7 +1737,7 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): content_view_name = f"Test_{gen_string('alpha', 20)}" # Create a non admin user, for the moment without any permissions - user = make_user( + user = module_target_sat.cli_factory.make_user( { 'admin': False, 'default-organization-id': module_org.id, @@ -1741,9 +1747,9 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): } ) # Create a new role - role = make_role() + role = module_target_sat.cli_factory.make_role() # Get the available permissions - available_permissions = Filter.available_permissions() + available_permissions = module_target_sat.cli.Filter.available_permissions() # group the available permissions by resource type available_rc_permissions = {} for permission in available_permissions: @@ -1764,43 +1770,45 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): # assert that all the required permissions are available assert set(permission_names) == set(available_permission_names) # Create the current resource type role permissions - make_filter({'role-id': role['id'], 'permissions': permission_names, 'search': search}) + module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permission_names, 'search': search} + ) # Add the created and initiated role with permissions to user - User.add_role({'id': user['id'], 'role-id': role['id']}) + module_target_sat.cli.User.add_role({'id': user['id'], 'role-id': role['id']}) # assert that the user is not an admin one and cannot read the current # role info (note: view_roles is not in the required permissions) with pytest.raises( CLIReturnCodeError, match=r'Access denied\\nMissing one of the required permissions: view_roles', ): - Role.with_user(user_name, user_password).info({'id': role['id']}) + module_target_sat.cli.Role.with_user(user_name, user_password).info({'id': role['id']}) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) # Create a content view - content_view = make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'organization-id': module_org.id, 'name': content_view_name} ) # assert that the user can read the content view info as per required # permissions - user_content_view = ContentView.with_user(user_name, user_password).info( - {'id': content_view['id']} - ) + user_content_view = module_target_sat.cli.ContentView.with_user( + user_name, user_password + ).info({'id': content_view['id']}) # assert that this is the same content view assert content_view['name'] == user_content_view['name'] # assert admin user is able to view the product - repos = Repository.list({'organization-id': module_org.id}) + repos = module_target_sat.cli.Repository.list({'organization-id': module_org.id}) assert len(repos) == 1 # assert that this is the same repo assert repos[0]['id'] == repo['id'] # assert that restricted user is not able to view the product - repos = Repository.with_user(user_name, user_password).list( + repos = module_target_sat.cli.Repository.with_user(user_name, user_password).list( {'organization-id': module_org.id} ) assert len(repos) == 0 # assert that the user cannot add the product repo to content view with pytest.raises(CLIReturnCodeError): - ContentView.with_user(user_name, user_password).add_repository( + module_target_sat.cli.ContentView.with_user(user_name, user_password).add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, @@ -1808,7 +1816,7 @@ def test_negative_restricted_user_cv_add_repository(self, module_org, repo): } ) # assert that restricted user still not able to view the product - repos = Repository.with_user(user_name, user_password).list( + repos = module_target_sat.cli.Repository.with_user(user_name, user_password).list( {'organization-id': module_org.id} ) assert len(repos) == 0 @@ -1834,7 +1842,7 @@ def test_positive_upload_remove_srpm_content(self, repo, target_sat): remote_path=f"/tmp/{SRPM_TO_UPLOAD}", ) # Upload SRPM - result = Repository.upload_content( + result = target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -1844,17 +1852,23 @@ def test_positive_upload_remove_srpm_content(self, repo, target_sat): } ) assert f"Successfully uploaded file '{SRPM_TO_UPLOAD}'" in result[0]['message'] - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 1 + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 1 + ) # Remove uploaded SRPM - Repository.remove_content( + target_sat.cli.Repository.remove_content( { 'id': repo['id'], - 'ids': [Srpm.list({'repository-id': repo['id']})[0]['id']], + 'ids': [target_sat.cli.Srpm.list({'repository-id': repo['id']})[0]['id']], 'content-type': 'srpm', } ) - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 0 + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 0 + ) @pytest.mark.upgrade @pytest.mark.tier2 @@ -1880,7 +1894,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): remote_path=f"/tmp/{SRPM_TO_UPLOAD}", ) # Upload SRPM - Repository.upload_content( + target_sat.cli.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -1889,15 +1903,18 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): 'content-type': 'srpm', } ) - assert len(Srpm.list()) > 0 - srpm_list = Srpm.list({'repository-id': repo['id']}) + assert len(target_sat.cli.Srpm.list()) > 0 + srpm_list = target_sat.cli.Srpm.list({'repository-id': repo['id']}) assert srpm_list[0]['filename'] == SRPM_TO_UPLOAD assert len(srpm_list) == 1 - assert Srpm.info({'id': srpm_list[0]['id']})[0]['filename'] == SRPM_TO_UPLOAD - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == 1 + assert target_sat.cli.Srpm.info({'id': srpm_list[0]['id']})[0]['filename'] == SRPM_TO_UPLOAD + assert ( + int(target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms']) + == 1 + ) assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'organization': repo['organization'], 'product-id': repo['product']['id'], @@ -1907,10 +1924,10 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) > 0 ) - assert len(Srpm.list({'organization': repo['organization']})) > 0 + assert len(target_sat.cli.Srpm.list({'organization': repo['organization']})) > 0 assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'organization': repo['organization'], 'lifecycle-environment': 'Library', @@ -1921,7 +1938,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) assert ( len( - Srpm.list( + target_sat.cli.Srpm.list( { 'content-view': 'Default Organization View', 'lifecycle-environment': 'Library', @@ -1933,16 +1950,16 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): ) # Remove uploaded SRPM - Repository.remove_content( + target_sat.cli.Repository.remove_content( { 'id': repo['id'], - 'ids': [Srpm.list({'repository-id': repo['id']})[0]['id']], + 'ids': [target_sat.cli.Srpm.list({'repository-id': repo['id']})[0]['id']], 'content-type': 'srpm', } ) - assert int(Repository.info({'id': repo['id']})['content-counts']['source-rpms']) == len( - Srpm.list({'repository-id': repo['id']}) - ) + assert int( + target_sat.cli.Repository.info({'id': repo['id']})['content-counts']['source-rpms'] + ) == len(target_sat.cli.Srpm.list({'repository-id': repo['id']})) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -1951,7 +1968,7 @@ def test_positive_srpm_list_end_to_end(self, repo, target_sat): indirect=True, ) def test_positive_create_get_update_delete_module_streams( - self, repo_options, module_org, module_product, repo + self, repo_options, module_org, module_product, repo, module_target_sat ): """Check module-stream get for each create, get, update, delete. @@ -1980,34 +1997,34 @@ def test_positive_create_get_update_delete_module_streams( :CaseImportance: Critical """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert ( repo['content-counts']['module-streams'] == '7' ), 'Module Streams not synced correctly' # adding repo with same yum url should not change count. - duplicate_repo = make_repository(repo_options) - Repository.synchronize({'id': duplicate_repo['id']}) + duplicate_repo = module_target_sat.cli_factory.make_repository(repo_options) + module_target_sat.cli.Repository.synchronize({'id': duplicate_repo['id']}) - module_streams = ModuleStream.list({'organization-id': module_org.id}) + module_streams = module_target_sat.cli.ModuleStream.list({'organization-id': module_org.id}) assert len(module_streams) == 7, 'Module Streams get worked correctly' - Repository.update( + module_target_sat.cli.Repository.update( { 'product-id': module_product.id, 'id': repo['id'], 'url': settings.repos.module_stream_1.url, } ) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert ( repo['content-counts']['module-streams'] == '7' ), 'Module Streams not synced correctly' - Repository.delete({'id': repo['id']}) + module_target_sat.cli.Repository.delete({'id': repo['id']}) with pytest.raises(CLIReturnCodeError): - Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.info({'id': repo['id']}) @pytest.mark.tier1 @pytest.mark.parametrize( @@ -2019,7 +2036,9 @@ def test_positive_create_get_update_delete_module_streams( 'repo_options_2', **parametrized([{'content-type': 'yum', 'url': settings.repos.module_stream_1.url}]), ) - def test_module_stream_list_validation(self, module_org, repo, repo_options_2): + def test_module_stream_list_validation( + self, module_org, repo, repo_options_2, module_target_sat + ): """Check module-stream get with list on hammer. :id: 9842a0c3-8532-4b16-a00a-534fc3b0a776ff89f23e-cd00-4d20-84d3-add0ea24abf8 @@ -2038,17 +2057,17 @@ def test_module_stream_list_validation(self, module_org, repo, repo_options_2): :CaseAutomation: Automated """ - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) - prod_2 = make_product({'organization-id': module_org.id}) + prod_2 = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) repo_options_2['organization-id'] = module_org.id repo_options_2['product-id'] = prod_2['id'] - repo_2 = make_repository(repo_options_2) + repo_2 = module_target_sat.cli_factory.make_repository(repo_options_2) - Repository.synchronize({'id': repo_2['id']}) - module_streams = ModuleStream.list() + module_target_sat.cli.Repository.synchronize({'id': repo_2['id']}) + module_streams = module_target_sat.cli.ModuleStream.list() assert len(module_streams) > 13, 'Module Streams list failed' - module_streams = ModuleStream.list({'product-id': prod_2['id']}) + module_streams = module_target_sat.cli.ModuleStream.list({'product-id': prod_2['id']}) assert len(module_streams) == 7, 'Module Streams list by product failed' @pytest.mark.tier1 @@ -2057,7 +2076,7 @@ def test_module_stream_list_validation(self, module_org, repo, repo_options_2): **parametrized([{'content-type': 'yum', 'url': settings.repos.module_stream_1.url}]), indirect=True, ) - def test_module_stream_info_validation(self, repo): + def test_module_stream_info_validation(self, repo, module_target_sat): """Check module-stream get with info on hammer. :id: ddbeb49e-d292-4dc4-8fb9-e9b768acc441a2c2e797-02b7-4b12-9f95-cffc93254198 @@ -2076,11 +2095,11 @@ def test_module_stream_info_validation(self, repo): :CaseAutomation: Automated """ - Repository.synchronize({'id': repo['id']}) - module_streams = ModuleStream.list( + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + module_streams = module_target_sat.cli.ModuleStream.list( {'repository-id': repo['id'], 'search': 'name="walrus" and stream="5.21"'} ) - actual_result = ModuleStream.info({'id': module_streams[0]['id']}) + actual_result = module_target_sat.cli.ModuleStream.info({'id': module_streams[0]['id']}) expected_result = { 'module-stream-name': 'walrus', 'stream': '5.21', @@ -2092,7 +2111,7 @@ def test_module_stream_info_validation(self, repo): @pytest.mark.tier1 @pytest.mark.skip_if_open('BZ:2002653') - def test_negative_update_red_hat_repo(self, module_manifest_org): + def test_negative_update_red_hat_repo(self, module_manifest_org, module_target_sat): """Updates to Red Hat products fail. :id: d3ac0ea2-faab-4df4-be66-733e1b7ae6b4 @@ -2109,26 +2128,34 @@ def test_negative_update_red_hat_repo(self, module_manifest_org): :expectedresults: hammer returns error code. The repository is not updated. """ - rh_repo_set_id = RepositorySet.list({'organization-id': module_manifest_org.id})[0]['id'] + rh_repo_set_id = module_target_sat.cli.RepositorySet.list( + {'organization-id': module_manifest_org.id} + )[0]['id'] - RepositorySet.enable( + module_target_sat.cli.RepositorySet.enable( { 'organization-id': module_manifest_org.id, 'basearch': "x86_64", 'id': rh_repo_set_id, } ) - repo_list = Repository.list({'organization-id': module_manifest_org.id}) + repo_list = module_target_sat.cli.Repository.list( + {'organization-id': module_manifest_org.id} + ) - rh_repo_id = Repository.list({'organization-id': module_manifest_org.id})[0]['id'] + rh_repo_id = module_target_sat.cli.Repository.list( + {'organization-id': module_manifest_org.id} + )[0]['id'] - Repository.update( + module_target_sat.cli.Repository.update( { 'id': rh_repo_id, 'url': f'{gen_url(scheme="https")}:{gen_integer(min_value=10, max_value=9999)}', } ) - repo_info = Repository.info({'organization-id': module_manifest_org.id, 'id': rh_repo_id}) + repo_info = module_target_sat.cli.Repository.info( + {'organization-id': module_manifest_org.id, 'id': rh_repo_id} + ) assert repo_info['url'] in [repo.get('url') for repo in repo_list] @pytest.mark.tier1 @@ -2164,7 +2191,7 @@ def test_positive_accessible_content_status( **parametrized([{'content_type': 'yum', 'url': CUSTOM_RPM_SHA}]), indirect=True, ) - def test_positive_sync_sha_repo(self, repo_options): + def test_positive_sync_sha_repo(self, repo_options, module_target_sat): """Sync a 'sha' repo successfully :id: 20579f52-a67b-4d3f-be07-41eec059a891 @@ -2177,10 +2204,10 @@ def test_positive_sync_sha_repo(self, repo_options): :SubComponent: Candlepin """ - sha_repo = make_repository(repo_options) - sha_repo = Repository.info({'id': sha_repo['id']}) - Repository.synchronize({'id': sha_repo['id']}) - sha_repo = Repository.info({'id': sha_repo['id']}) + sha_repo = module_target_sat.cli_factory.make_repository(repo_options) + sha_repo = module_target_sat.cli.Repository.info({'id': sha_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': sha_repo['id']}) + sha_repo = module_target_sat.cli.Repository.info({'id': sha_repo['id']}) assert sha_repo['sync']['status'] == 'Success' @pytest.mark.tier2 @@ -2189,7 +2216,7 @@ def test_positive_sync_sha_repo(self, repo_options): **parametrized([{'content_type': 'yum', 'url': CUSTOM_3RD_PARTY_REPO}]), indirect=True, ) - def test_positive_sync_third_party_repo(self, repo_options): + def test_positive_sync_third_party_repo(self, repo_options, module_target_sat): """Sync third party repo successfully :id: 45936ab8-46b7-4f07-8b71-d7c8a4a2d984 @@ -2202,10 +2229,10 @@ def test_positive_sync_third_party_repo(self, repo_options): :SubComponent: Pulp """ - repo = make_repository(repo_options) - repo = Repository.info({'id': repo['id']}) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + repo = module_target_sat.cli_factory.make_repository(repo_options) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' @@ -2413,7 +2440,7 @@ def test_positive_sync(self, repo, module_org, module_product, target_sat): :expectedresults: srpms can be listed in repository """ - Repository.synchronize({'id': repo['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library" f"/custom/{module_product.label}/{repo['label']}/Packages/t/ | grep .src.rpm" @@ -2436,10 +2463,10 @@ def test_positive_sync_publish_cv(self, module_org, module_product, repo, target :expectedresults: srpms can be listed in content view """ - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/content_views/" f"{cv['label']}/1.0/custom/{module_product.label}/{repo['label']}/Packages/t/" @@ -2465,14 +2492,16 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product :expectedresults: srpms can be listed in content view in proper lifecycle environment """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + target_sat.cli.content_view = target_sat.cli.ContentView.info({'id': cv['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/{lce['label']}/" f"{cv['label']}/custom/{module_product.label}/{repo['label']}/Packages/t" @@ -2509,7 +2538,7 @@ class TestAnsibleCollectionRepository: ids=['ansible_galaxy', 'ansible_hub'], indirect=True, ) - def test_positive_sync_ansible_collection(self, repo, module_org, module_product): + def test_positive_sync_ansible_collection(self, repo, module_target_sat): """Sync ansible collection repository from Ansible Galaxy and Hub :id: 4b6a819b-8c3d-4a74-bd97-ee3f34cf5d92 @@ -2523,8 +2552,8 @@ def test_positive_sync_ansible_collection(self, repo, module_org, module_product :parametrized: yes """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' @pytest.mark.tier2 @@ -2543,7 +2572,7 @@ def test_positive_sync_ansible_collection(self, repo, module_org, module_product ids=['ansible_galaxy'], indirect=True, ) - def test_positive_export_ansible_collection(self, repo, module_org, module_product, target_sat): + def test_positive_export_ansible_collection(self, repo, module_org, target_sat): """Export ansible collection between organizations :id: 4858227e-1669-476d-8da3-4e6bfb6b7e2a @@ -2555,27 +2584,35 @@ def test_positive_export_ansible_collection(self, repo, module_org, module_produ :CaseImportance: High """ - import_org = make_org() - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + import_org = target_sat.cli_factory.make_org() + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' # export - result = ContentExport.completeLibrary({'organization-id': module_org.id}) + result = target_sat.cli.ContentExport.completeLibrary({'organization-id': module_org.id}) target_sat.execute(f'cp -r /var/lib/pulp/exports/{module_org.name} /var/lib/pulp/imports/.') target_sat.execute('chown -R pulp:pulp /var/lib/pulp/imports') export_metadata = result['message'].split()[1] # import import_path = export_metadata.replace('/metadata.json', '').replace('exports', 'imports') - ContentImport.library({'organization-id': import_org['id'], 'path': import_path}) - cv = ContentView.info({'name': 'Import-Library', 'organization-label': import_org['label']}) + target_sat.cli.ContentImport.library( + {'organization-id': import_org['id'], 'path': import_path} + ) + cv = target_sat.cli.ContentView.info( + {'name': 'Import-Library', 'organization-label': import_org['label']} + ) assert cv['description'] == 'Content View used for importing into library' - prods = Product.list({'organization-id': import_org['id']}) - prod = Product.info({'id': prods[0]['id'], 'organization-id': import_org['id']}) + prods = target_sat.cli_factory.Product.list({'organization-id': import_org['id']}) + prod = target_sat.cli_factory.Product.info( + {'id': prods[0]['id'], 'organization-id': import_org['id']} + ) ac_content = [ cont for cont in prod['content'] if cont['content-type'] == 'ansible_collection' ] assert len(ac_content) > 0 - repo = Repository.info({'name': ac_content[0]['repo-name'], 'product-id': prod['id']}) + repo = target_sat.cli_factory.Repository.info( + {'name': ac_content[0]['repo-name'], 'product-id': prod['id']} + ) result = target_sat.execute(f'curl {repo["published-at"]}') assert "available_versions" in result.stdout @@ -2595,9 +2632,7 @@ def test_positive_export_ansible_collection(self, repo, module_org, module_produ ids=['ansible_galaxy'], indirect=True, ) - def test_positive_sync_ansible_collection_from_satellite( - self, repo, module_org, module_product, target_sat - ): + def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat): """Sync ansible collection from another organization :id: f7897a56-d014-4189-b4c7-df8f15aaf30a @@ -2609,16 +2644,16 @@ def test_positive_sync_ansible_collection_from_satellite( :CaseImportance: High """ - import_org = make_org() - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + import_org = target_sat.cli_factory.make_org() + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' published_url = repo['published-at'] # sync from different org - prod_2 = make_product( + prod_2 = target_sat.cli_factory.make_product( {'organization-id': import_org['id'], 'description': 'Sync from Satellite'} ) - repo_2 = make_repository( + repo_2 = target_sat.cli_factory.make_repository( { 'organization-id': import_org['id'], 'product-id': prod_2['id'], @@ -2628,8 +2663,8 @@ def test_positive_sync_ansible_collection_from_satellite( [{ name: theforeman.operations, version: "0.1.0"}]}', } ) - Repository.synchronize({'id': repo_2['id']}) - repo_2_status = Repository.info({'id': repo_2['id']}) + target_sat.cli_factory.Repository.synchronize({'id': repo_2['id']}) + repo_2_status = target_sat.cli_factory.Repository.info({'id': repo_2['id']}) assert repo_2_status['sync']['status'] == 'Success' @@ -2641,7 +2676,7 @@ class TestMD5Repository: @pytest.mark.parametrize( 'repo_options', **parametrized([{'url': FAKE_YUM_MD5_REPO}]), indirect=True ) - def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product, target_sat): + def test_positive_sync_publish_promote_cv(self, repo, module_org, target_sat): """Synchronize MD5 signed repository with add repository to content view, publish and promote content view to lifecycle environment @@ -2652,18 +2687,20 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product :expectedresults: rpms can be listed in content view in proper lifecycle environment """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - synced_repo = Repository.info({'id': repo['id']}) + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + synced_repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert synced_repo['sync']['status'].lower() == 'success' assert synced_repo['content-counts']['packages'] == '35' - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view = target_sat.cli.ContentView.info({'id': cv['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) - cv = ContentView.info({'id': cv['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert synced_repo['id'] in [repo['id'] for repo in cv['yum-repositories']] assert lce['id'] in [lc['id'] for lc in cv['lifecycle-environments']] @@ -2686,7 +2723,7 @@ def test_positive_sync(self, repo, module_org, module_product, target_sat): :expectedresults: drpms can be listed in repository """ - Repository.synchronize({'id': repo['id']}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library" f"/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" @@ -2709,10 +2746,10 @@ def test_positive_sync_publish_cv(self, repo, module_org, module_product, target :expectedresults: drpms can be listed in content view """ - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/content_views/" f"{cv['label']}/1.0/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" @@ -2737,14 +2774,16 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product :expectedresults: drpms can be listed in content view in proper lifecycle environment """ - lce = make_lifecycle_environment({'organization-id': module_org.id}) - Repository.synchronize({'id': repo['id']}) - cv = make_content_view({'organization-id': module_org.id}) - ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) - ContentView.publish({'id': cv['id']}) - content_view = ContentView.info({'id': cv['id']}) + lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) + target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + content_view = target_sat.cli.ContentView.info({'id': cv['id']}) cvv = content_view['versions'][0] - ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']}) + target_sat.cli.ContentView.version_promote( + {'id': cvv['id'], 'to-lifecycle-environment-id': lce['id']} + ) result = target_sat.execute( f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/{lce['label']}" f"/{cv['label']}/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm" @@ -2972,7 +3011,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat) local_path=DataFile.RPM_TO_UPLOAD, remote_path=f"/tmp/{RPM_TO_UPLOAD}", ) - result = Repository.upload_content( + result = target_sat.cli_factory.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -2981,7 +3020,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat) } ) assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( query={"search": f"name={RPM_TO_UPLOAD} and repository={repo['name']}"} @@ -3037,7 +3076,7 @@ def test_positive_remove_file(self, repo, target_sat): local_path=DataFile.RPM_TO_UPLOAD, remote_path=f"/tmp/{RPM_TO_UPLOAD}", ) - result = Repository.upload_content( + result = target_sat.cli_factory.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3046,11 +3085,13 @@ def test_positive_remove_file(self, repo, target_sat): } ) assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 0 - files = File.list({'repository-id': repo['id']}) - Repository.remove_content({'id': repo['id'], 'ids': [file_['id'] for file_ in files]}) - repo = Repository.info({'id': repo['id']}) + files = target_sat.cli.File.list({'repository-id': repo['id']}) + target_sat.cli_factory.Repository.remove_content( + {'id': repo['id'], 'ids': [file_['id'] for file_ in files]} + ) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['content-counts']['files'] == '0' @pytest.mark.tier2 @@ -3068,7 +3109,7 @@ def test_positive_remove_file(self, repo, target_sat): ), indirect=True, ) - def test_positive_remote_directory_sync(self, repo): + def test_positive_remote_directory_sync(self, repo, module_target_sat): """Check an entire remote directory can be synced to File Repository through http @@ -3088,8 +3129,8 @@ def test_positive_remote_directory_sync(self, repo): :expectedresults: entire directory is synced over http """ - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']}) assert repo['sync']['status'] == 'Success' assert repo['content-counts']['files'] == '2' @@ -3126,8 +3167,8 @@ def test_positive_file_repo_local_directory_sync(self, repo, target_sat): f'wget -P {CUSTOM_LOCAL_FOLDER} -r -np -nH --cut-dirs=5 -R "index.html*" ' f'{CUSTOM_FILE_REPO}' ) - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 1 @pytest.mark.tier2 @@ -3166,8 +3207,8 @@ def test_positive_symlinks_sync(self, repo, target_sat): ) target_sat.execute(f'ln -s {CUSTOM_LOCAL_FOLDER} /{gen_string("alpha")}') - Repository.synchronize({'id': repo['id']}) - repo = Repository.info({'id': repo['id']}) + target_sat.cli_factory.Repository.synchronize({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) assert int(repo['content-counts']['files']) > 1 @pytest.mark.tier2 @@ -3200,7 +3241,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat """ text_file_name = f'test-{gen_string("alpha", 5)}.txt'.lower() target_sat.execute(f'echo "First File" > /tmp/{text_file_name}') - result = Repository.upload_content( + result = target_sat.cli_factory.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3209,7 +3250,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat } ) assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) # Assert there is only one file assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( @@ -3218,7 +3259,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat assert text_file_name == filesearch[0].name # Create new version of the file by changing the text target_sat.execute(f'echo "Second File" > /tmp/{text_file_name}') - result = Repository.upload_content( + result = target_sat.cli_factory.Repository.upload_content( { 'name': repo['name'], 'organization': repo['organization'], @@ -3227,7 +3268,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat } ) assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message'] - repo = Repository.info({'id': repo['id']}) + repo = target_sat.cli_factory.Repository.info({'id': repo['id']}) # Assert there is still only one file assert repo['content-counts']['files'] == '1' filesearch = entities.File().search( diff --git a/tests/foreman/cli/test_repository_set.py b/tests/foreman/cli/test_repository_set.py index 0be98f8e32d..317f19e33d5 100644 --- a/tests/foreman/cli/test_repository_set.py +++ b/tests/foreman/cli/test_repository_set.py @@ -18,15 +18,13 @@ """ import pytest -from robottelo.cli.product import Product -from robottelo.cli.repository_set import RepositorySet from robottelo.constants import PRDS, REPOSET pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.tier1] @pytest.fixture -def params(function_entitlement_manifest_org): +def params(function_entitlement_manifest_org, target_sat): PRODUCT_NAME = PRDS['rhel'] REPOSET_NAME = REPOSET['rhva6'] ARCH = 'x86_64' @@ -34,8 +32,10 @@ def params(function_entitlement_manifest_org): RELEASE = '6Server' manifest_org = function_entitlement_manifest_org - product_id = Product.info({'name': PRODUCT_NAME, 'organization-id': manifest_org.id})['id'] - reposet_id = RepositorySet.info( + product_id = target_sat.cli.Product.info( + {'name': PRODUCT_NAME, 'organization-id': manifest_org.id} + )['id'] + reposet_id = target_sat.cli.RepositorySet.info( {'name': REPOSET_NAME, 'organization-id': manifest_org.id, 'product-id': product_id} )['id'] @@ -114,7 +114,7 @@ def match_repos(repos, match_params): @pytest.mark.upgrade -def test_positive_list_available_repositories(params): +def test_positive_list_available_repositories(params, target_sat): """List available repositories for repository-set :id: 987d6b08-acb0-4264-a459-9cef0d2c6f3f @@ -125,39 +125,39 @@ def test_positive_list_available_repositories(params): :CaseImportance: Critical """ # No repos should be enabled by default - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 0 # Enable repo from Repository Set - RepositorySet.enable(params['enable']['id']) + target_sat.cli.RepositorySet.enable(params['enable']['id']) # Only 1 repo should be enabled, and it should match the arch and releasever - result = RepositorySet.available_repositories(params['avail']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled'])) == 1 # Enable one more repo - RepositorySet.enable(params['enable']['arch_2']) + target_sat.cli.RepositorySet.enable(params['enable']['arch_2']) # 2 repos should be enabled - result = RepositorySet.available_repositories(params['avail']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled'])) == 2 # Disable one repo - RepositorySet.disable(params['enable']['id']) + target_sat.cli.RepositorySet.disable(params['enable']['id']) # There should remain only 1 enabled repo - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 1 # Disable the last enabled repo - RepositorySet.disable(params['enable']['arch_2']) + target_sat.cli.RepositorySet.disable(params['enable']['arch_2']) # There should be no enabled repos - result = RepositorySet.available_repositories(params['avail']['id']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['id']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_enable_by_name(params): +def test_positive_enable_by_name(params, target_sat): """Enable repo from reposet by names of reposet, org and product :id: a78537bd-b88d-4f00-8901-e7944e5de729 @@ -166,12 +166,12 @@ def test_positive_enable_by_name(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['name']) - result = RepositorySet.available_repositories(params['avail']['name']) + target_sat.cli.RepositorySet.enable(params['enable']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_enable_by_label(params): +def test_positive_enable_by_label(params, target_sat): """Enable repo from reposet by org label, reposet and product names @@ -181,12 +181,12 @@ def test_positive_enable_by_label(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['label']) - result = RepositorySet.available_repositories(params['avail']['label']) + target_sat.cli.RepositorySet.enable(params['enable']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_enable_by_id(params): +def test_positive_enable_by_id(params, target_sat): """Enable repo from reposet by IDs of reposet, org and product :id: f7c88534-1d45-45d9-9b87-c50c4e268e8d @@ -195,12 +195,12 @@ def test_positive_enable_by_id(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['ids']) - result = RepositorySet.available_repositories(params['avail']['ids']) + target_sat.cli.RepositorySet.enable(params['enable']['ids']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['ids']) assert len(match_repos(result, params['match']['enabled_arch_rel'])) == 1 -def test_positive_disable_by_name(params): +def test_positive_disable_by_name(params, target_sat): """Disable repo from reposet by names of reposet, org and product @@ -210,13 +210,13 @@ def test_positive_disable_by_name(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['name']) - RepositorySet.disable(params['enable']['name']) - result = RepositorySet.available_repositories(params['avail']['name']) + target_sat.cli.RepositorySet.enable(params['enable']['name']) + target_sat.cli.RepositorySet.disable(params['enable']['name']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['name']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_disable_by_label(params): +def test_positive_disable_by_label(params, target_sat): """Disable repo from reposet by org label, reposet and product names @@ -226,13 +226,13 @@ def test_positive_disable_by_label(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['label']) - RepositorySet.disable(params['enable']['label']) - result = RepositorySet.available_repositories(params['avail']['label']) + target_sat.cli.RepositorySet.enable(params['enable']['label']) + target_sat.cli.RepositorySet.disable(params['enable']['label']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['label']) assert len(match_repos(result, params['match']['enabled'])) == 0 -def test_positive_disable_by_id(params): +def test_positive_disable_by_id(params, target_sat): """Disable repo from reposet by IDs of reposet, org and product :id: 0d6102ba-3fb9-4eb8-972e-d537e252a8e6 @@ -241,7 +241,7 @@ def test_positive_disable_by_id(params): :CaseImportance: Critical """ - RepositorySet.enable(params['enable']['ids']) - RepositorySet.disable(params['enable']['ids']) - result = RepositorySet.available_repositories(params['avail']['ids']) + target_sat.cli.RepositorySet.enable(params['enable']['ids']) + target_sat.cli.RepositorySet.disable(params['enable']['ids']) + result = target_sat.cli.RepositorySet.available_repositories(params['avail']['ids']) assert len(match_repos(result, params['match']['enabled'])) == 0 diff --git a/tests/foreman/cli/test_role.py b/tests/foreman/cli/test_role.py index feeb1c928be..93c9ed677c0 100644 --- a/tests/foreman/cli/test_role.py +++ b/tests/foreman/cli/test_role.py @@ -23,19 +23,8 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIDataBaseError, CLIReturnCodeError -from robottelo.cli.factory import ( - make_filter, - make_location, - make_org, - make_role, - make_user, -) -from robottelo.cli.filter import Filter -from robottelo.cli.role import Role -from robottelo.cli.settings import Settings -from robottelo.cli.user import User from robottelo.constants import PERMISSIONS, ROLES +from robottelo.exceptions import CLIDataBaseError, CLIReturnCodeError from robottelo.utils.datafactory import generate_strings_list, parametrized @@ -49,7 +38,7 @@ class TestRole: list(zip(generate_strings_list(length=10), generate_strings_list(length=10))) ), ) - def test_positive_crud_with_name(self, name, new_name): + def test_positive_crud_with_name(self, name, new_name, module_target_sat): """Create new role with provided name, update name and delete role by ID :id: f77b8e84-e964-4007-b12b-142949134d8b @@ -63,18 +52,18 @@ def test_positive_crud_with_name(self, name, new_name): :CaseImportance: Critical """ - role = make_role({'name': name}) + role = module_target_sat.cli_factory.make_role({'name': name}) assert role['name'] == name - Role.update({'id': role['id'], 'new-name': new_name}) - role = Role.info({'id': role['id']}) + module_target_sat.cli.Role.update({'id': role['id'], 'new-name': new_name}) + role = module_target_sat.cli.Role.info({'id': role['id']}) assert role['name'] == new_name - Role.delete({'id': role['id']}) + module_target_sat.cli.Role.delete({'id': role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': role['id']}) + module_target_sat.cli.Role.info({'id': role['id']}) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_with_permission(self): + def test_positive_create_with_permission(self, module_target_sat): """Create new role with a set of permission :id: 7cb2b2e2-ad4d-41e9-b6b2-c0366eb09b9a @@ -83,18 +72,24 @@ def test_positive_create_with_permission(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - make_filter({'role-id': role['id'], 'permissions': permissions}) - assert set(Role.filters({'id': role['id']})[0]['permissions']) == set(permissions) + module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) + assert set(module_target_sat.cli.Role.filters({'id': role['id']})[0]['permissions']) == set( + permissions + ) @pytest.mark.tier1 - def test_positive_list_filters_by_id(self): + def test_positive_list_filters_by_id(self, module_target_sat): """Create new role with a filter and list it by role id :id: 6979ad8d-629b-481e-9d3a-8f3b3bca53f9 @@ -103,19 +98,23 @@ def test_positive_list_filters_by_id(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - filter_ = make_filter({'role-id': role['id'], 'permissions': permissions}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role-id': role['id'], 'permissions': permissions} + ) assert role['name'] == filter_['role'] - assert Role.filters({'id': role['id']})[0]['id'] == filter_['id'] + assert module_target_sat.cli.Role.filters({'id': role['id']})[0]['id'] == filter_['id'] @pytest.mark.tier1 - def test_positive_list_filters_by_name(self): + def test_positive_list_filters_by_name(self, module_target_sat): """Create new role with a filter and list it by role name :id: bbcb3982-f484-4dde-a3ea-7145fd28ab1f @@ -124,19 +123,23 @@ def test_positive_list_filters_by_name(self): :CaseImportance: Critical """ - role = make_role() + role = module_target_sat.cli_factory.make_role() # Pick permissions by its resource type permissions = [ permission['name'] - for permission in Filter.available_permissions({"search": "resource_type=Organization"}) + for permission in module_target_sat.cli.Filter.available_permissions( + {"search": "resource_type=Organization"} + ) ] # Assign filter to created role - filter_ = make_filter({'role': role['name'], 'permissions': permissions}) + filter_ = module_target_sat.cli_factory.make_filter( + {'role': role['name'], 'permissions': permissions} + ) assert role['name'] == filter_['role'] - assert Role.filters({'name': role['name']})[0]['id'] == filter_['id'] + assert module_target_sat.cli.Role.filters({'name': role['name']})[0]['id'] == filter_['id'] @pytest.mark.tier1 - def test_negative_list_filters_without_parameters(self): + def test_negative_list_filters_without_parameters(self, module_target_sat): """Try to list filter without specifying role id or name :id: 56cafbe0-d1cb-413e-8eac-0e01a3590fd2 @@ -149,28 +152,28 @@ def test_negative_list_filters_without_parameters(self): """ with pytest.raises(CLIReturnCodeError) as err: try: - Role.filters() + module_target_sat.cli.Role.filters() except CLIDataBaseError as err: pytest.fail(err) assert re.search('At least one of options .* is required', err.value.msg) @pytest.fixture() - def make_role_with_permissions(self): + def make_role_with_permissions(self, target_sat): """Create new role with a filter""" - role = make_role() + role = target_sat.cli_factory.make_role() res_types = iter(PERMISSIONS.keys()) permissions = [] # Collect more than 20 different permissions while len(permissions) <= 20: permissions += [ permission['name'] - for permission in Filter.available_permissions( + for permission in target_sat.cli.Filter.available_permissions( {"search": f"resource_type={next(res_types)}"} ) ] # Create a filter for each permission for perm in permissions: - make_filter({'role': role['name'], 'permissions': perm}) + target_sat.cli_factory.make_filter({'role': role['name'], 'permissions': perm}) return { 'role': role, 'permissions': permissions, @@ -179,7 +182,9 @@ def make_role_with_permissions(self): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('per_page', [1, 5, 20]) - def test_positive_list_filters_with_pagination(self, make_role_with_permissions, per_page): + def test_positive_list_filters_with_pagination( + self, make_role_with_permissions, per_page, module_target_sat + ): """Make sure filters list can be displayed with different items per page value @@ -197,14 +202,14 @@ def test_positive_list_filters_with_pagination(self, make_role_with_permissions, """ # Verify the first page contains exactly the same items count # as `per-page` value - filters = Role.filters( + filters = module_target_sat.cli.Role.filters( {'name': make_role_with_permissions['role']['name'], 'per-page': per_page} ) assert len(filters) == per_page # Verify pagination and total amount of pages by checking the # items count on the last page last_page = ceil(len(make_role_with_permissions['permissions']) / per_page) - filters = Role.filters( + filters = module_target_sat.cli.Role.filters( { 'name': make_role_with_permissions['role']['name'], 'page': last_page, @@ -217,7 +222,7 @@ def test_positive_list_filters_with_pagination(self, make_role_with_permissions, @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_delete_cloned_builtin(self): + def test_positive_delete_cloned_builtin(self, module_target_sat): """Clone a builtin role and attempt to delete it :id: 1fd9c636-596a-4cb2-b100-de19238042cc @@ -229,27 +234,29 @@ def test_positive_delete_cloned_builtin(self): :CaseImportance: Critical """ - role_list = Role.list({'search': f'name=\\"{choice(ROLES)}\\"'}) + role_list = module_target_sat.cli.Role.list({'search': f'name=\\"{choice(ROLES)}\\"'}) assert len(role_list) == 1 - cloned_role = Role.clone({'id': role_list[0]['id'], 'new-name': gen_string('alphanumeric')}) - Role.delete({'id': cloned_role['id']}) + cloned_role = module_target_sat.cli.Role.clone( + {'id': role_list[0]['id'], 'new-name': gen_string('alphanumeric')} + ) + module_target_sat.cli.Role.delete({'id': cloned_role['id']}) with pytest.raises(CLIReturnCodeError): - Role.info({'id': cloned_role['id']}) + module_target_sat.cli.Role.info({'id': cloned_role['id']}) class TestSystemAdmin: """Test class for System Admin role end to end CLI""" @pytest.fixture(scope='class', autouse=True) - def tearDown(self): + def tearDown(self, class_target_sat): """Will reset the changed value of settings""" yield - Settings.set({'name': "outofsync_interval", 'value': "30"}) + class_target_sat.cli.Settings.set({'name': "outofsync_interval", 'value': "30"}) @pytest.mark.upgrade @pytest.mark.tier3 @pytest.mark.e2e - def test_system_admin_role_end_to_end(self): + def test_system_admin_role_end_to_end(self, target_sat): """Test System admin role with a end to end workflow :id: da6b3549-d1cf-44fc-869f-08d15d407fa2 @@ -277,27 +284,27 @@ def test_system_admin_role_end_to_end(self): :CaseLevel: System """ - org = make_org() - location = make_location() + org = target_sat.cli_factory.make_org() + location = target_sat.cli_factory.make_location() common_pass = gen_string('alpha') - role = Role.info({'name': 'System admin'}) - system_admin_1 = make_user( + role = target_sat.cli.Role.info({'name': 'System admin'}) + system_admin_1 = target_sat.cli_factory.make_user( { 'password': common_pass, 'organization-ids': org['id'], 'location-ids': location['id'], } ) - User.add_role({'id': system_admin_1['id'], 'role-id': role['id']}) - Settings.with_user(username=system_admin_1['login'], password=common_pass).set( - {'name': "outofsync_interval", 'value': "32"} - ) - sync_time = Settings.list({'search': 'name=outofsync_interval'})[0] + target_sat.cli.User.add_role({'id': system_admin_1['id'], 'role-id': role['id']}) + target_sat.cli.Settings.with_user( + username=system_admin_1['login'], password=common_pass + ).set({'name': "outofsync_interval", 'value': "32"}) + sync_time = target_sat.cli.Settings.list({'search': 'name=outofsync_interval'})[0] # Asserts if the setting was updated successfully assert '32' == sync_time['value'] # Create another System Admin user using the first one - system_admin = User.with_user( + system_admin = target_sat.cli.User.with_user( username=system_admin_1['login'], password=common_pass ).create( { @@ -313,7 +320,9 @@ def test_system_admin_role_end_to_end(self): } ) # Create the Org Admin user - org_role = Role.with_user(username=system_admin['login'], password=common_pass).clone( + org_role = target_sat.cli.Role.with_user( + username=system_admin['login'], password=common_pass + ).clone( { 'name': 'Organization admin', 'new-name': gen_string('alpha'), @@ -321,7 +330,9 @@ def test_system_admin_role_end_to_end(self): 'location-ids': location['id'], } ) - org_admin = User.with_user(username=system_admin['login'], password=common_pass).create( + org_admin = target_sat.cli.User.with_user( + username=system_admin['login'], password=common_pass + ).create( { 'auth-source-id': 1, 'firstname': gen_string('alpha'), @@ -336,20 +347,20 @@ def test_system_admin_role_end_to_end(self): ) # Assert if the cloning was successful assert org_role['id'] is not None - org_role_filters = Role.filters({'id': org_role['id']}) + org_role_filters = target_sat.cli.Role.filters({'id': org_role['id']}) search_filter = None for arch_filter in org_role_filters: if arch_filter['resource-type'] == 'Architecture': search_filter = arch_filter break - Filter.with_user(username=system_admin['login'], password=common_pass).update( - {'role-id': org_role['id'], 'id': arch_filter['id'], 'search': 'name=x86_64'} - ) + target_sat.cli.Filter.with_user( + username=system_admin['login'], password=common_pass + ).update({'role-id': org_role['id'], 'id': arch_filter['id'], 'search': 'name=x86_64'}) # Asserts if the filter is updated - assert 'name=x86_64' in Filter.info({'id': search_filter['id']}).values() - org_admin = User.with_user(username=system_admin['login'], password=common_pass).info( - {'id': org_admin['id']} - ) + assert 'name=x86_64' in target_sat.cli.Filter.info({'id': search_filter['id']}).values() + org_admin = target_sat.cli.User.with_user( + username=system_admin['login'], password=common_pass + ).info({'id': org_admin['id']}) # Asserts Created Org Admin assert org_role['name'] in org_admin['roles'] assert org['name'] in org_admin['organizations'] diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index d5a5cfdf9f6..61229834794 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -22,20 +22,6 @@ from manifester import Manifester import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.content_export import ContentExport -from robottelo.cli.content_import import ContentImport -from robottelo.cli.contentview import ContentView -from robottelo.cli.factory import ( - make_content_view, - make_org, - make_product, - make_repository, -) -from robottelo.cli.package import Package -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.settings import Settings from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, @@ -48,19 +34,34 @@ REPOS, ) from robottelo.constants.repos import ANSIBLE_GALAXY +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='class') -def config_export_import_settings(): +def config_export_import_settings(module_target_sat): """Check settings and set download policy for export. Reset to original state after import""" - download_policy_value = Settings.info({'name': 'default_download_policy'})['value'] - rh_download_policy_value = Settings.info({'name': 'default_redhat_download_policy'})['value'] - subs_conn_enabled_value = Settings.info({'name': 'subscription_connection_enabled'})['value'] - Settings.set({'name': 'default_redhat_download_policy', 'value': 'immediate'}) + download_policy_value = module_target_sat.cli.Settings.info( + {'name': 'default_download_policy'} + )['value'] + rh_download_policy_value = module_target_sat.cli.Settings.info( + {'name': 'default_redhat_download_policy'} + )['value'] + subs_conn_enabled_value = module_target_sat.cli.Settings.info( + {'name': 'subscription_connection_enabled'} + )['value'] + module_target_sat.cli.Settings.set( + {'name': 'default_redhat_download_policy', 'value': 'immediate'} + ) yield - Settings.set({'name': 'default_download_policy', 'value': download_policy_value}) - Settings.set({'name': 'default_redhat_download_policy', 'value': rh_download_policy_value}) - Settings.set({'name': 'subscription_connection_enabled', 'value': subs_conn_enabled_value}) + module_target_sat.cli.Settings.set( + {'name': 'default_download_policy', 'value': download_policy_value} + ) + module_target_sat.cli.Settings.set( + {'name': 'default_redhat_download_policy', 'value': rh_download_policy_value} + ) + module_target_sat.cli.Settings.set( + {'name': 'subscription_connection_enabled', 'value': subs_conn_enabled_value} + ) @pytest.fixture(scope='function') @@ -100,8 +101,8 @@ def function_import_org_with_manifest(target_sat, function_import_org): @pytest.fixture(scope='class') def docker_repo(module_target_sat, module_org): - product = make_product({'organization-id': module_org.id}) - repo = make_repository( + product = module_target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = module_target_sat.cli_factory.make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], @@ -111,7 +112,7 @@ def docker_repo(module_target_sat, module_org): 'docker-upstream-name': 'quay/busybox', } ) - Repository.synchronize({'id': repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': repo['id']}) yield repo @@ -322,19 +323,23 @@ def test_positive_export_complete_library_rh_repo( """ # Create cv and publish cv_name = gen_string('alpha') - cv = make_content_view({'name': cv_name, 'organization-id': function_sca_manifest_org.id}) - ContentView.add_repository( + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} + ) + target_sat.cli.ContentView.add_repository( { 'id': cv['id'], 'organization-id': function_sca_manifest_org.id, 'repository-id': function_synced_rhel_repo['id'], } ) - ContentView.publish({'id': cv['id']}) + target_sat.cli.ContentView.publish({'id': cv['id']}) # Verify export directory is empty assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' # Export content view - ContentExport.completeLibrary({'organization-id': function_sca_manifest_org.id}) + target_sat.cli.ContentExport.completeLibrary( + {'organization-id': function_sca_manifest_org.id} + ) # Verify export directory is not empty assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) != '' @@ -399,7 +404,9 @@ def test_positive_export_version_docker( """ # Create CV and publish cv_name = gen_string('alpha') - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': module_org.id} + ) target_sat.cli.ContentView.add_repository( { 'id': cv['id'], @@ -430,12 +437,14 @@ def test_positive_export_version_docker( @pytest.fixture(scope='class') -def class_export_entities(module_org): +def class_export_entities(module_org, module_target_sat): """Setup custom repos for export""" exporting_prod_name = gen_string('alpha') - product = make_product({'organization-id': module_org.id, 'name': exporting_prod_name}) + product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'name': exporting_prod_name} + ) exporting_repo_name = gen_string('alpha') - exporting_repo = make_repository( + exporting_repo = module_target_sat.cli_factory.make_repository( { 'name': exporting_repo_name, 'mirror-on-sync': 'no', @@ -443,7 +452,7 @@ def class_export_entities(module_org): 'product-id': product['id'], } ) - Repository.synchronize({'id': exporting_repo['id']}) + module_target_sat.cli.Repository.synchronize({'id': exporting_repo['id']}) exporting_cv_name = gen_string('alpha') exporting_cv, exporting_cvv_id = _create_cv(exporting_cv_name, exporting_repo, module_org) return { @@ -457,7 +466,7 @@ def class_export_entities(module_org): } -def _create_cv(cv_name, repo, module_org, publish=True): +def _create_cv(cv_name, repo, module_org, module_target_sat, publish=True): """Creates CV and/or publishes in organization with given name and repository :param cv_name: The name of CV to create @@ -467,26 +476,28 @@ def _create_cv(cv_name, repo, module_org, publish=True): :return: The directory of CV and Content View ID """ description = gen_string('alpha') - content_view = make_content_view( + content_view = module_target_sat.cli_factory.make_content_view( {'name': cv_name, 'description': description, 'organization-id': module_org.id} ) - ContentView.add_repository( + module_target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': repo['id'], } ) - content_view = ContentView.info({'name': cv_name, 'organization-id': module_org.id}) + content_view = module_target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': module_org.id} + ) cvv_id = None if publish: - ContentView.publish({'id': content_view['id']}) - content_view = ContentView.info({'id': content_view['id']}) + module_target_sat.cli.ContentView.publish({'id': content_view['id']}) + content_view = module_target_sat.cli.ContentView.info({'id': content_view['id']}) cvv_id = content_view['versions'][0]['id'] return content_view, cvv_id -def _import_entities(product, repo, cv, mos='no'): +def _import_entities(product, repo, cv, sat, mos='no'): """Sets same CV, product and repository in importing organization as exporting organization @@ -496,9 +507,11 @@ def _import_entities(product, repo, cv, mos='no'): :param str mos: Mirror on Sync repo, by default 'no' can override to 'yes' :returns dictionary with CLI entities created in this function """ - importing_org = make_org() - importing_prod = make_product({'organization-id': importing_org['id'], 'name': product}) - importing_repo = make_repository( + importing_org = sat.cli_factory.make_org() + importing_prod = sat.cli_factory.make_product( + {'organization-id': importing_org['id'], 'name': product} + ) + importing_repo = sat.cli_factory.make_repository( { 'name': repo, 'mirror-on-sync': mos, @@ -506,8 +519,10 @@ def _import_entities(product, repo, cv, mos='no'): 'product-id': importing_prod['id'], } ) - importing_cv = make_content_view({'name': cv, 'organization-id': importing_org['id']}) - ContentView.add_repository( + importing_cv = sat.cli_factory.make_content_view( + {'name': cv, 'organization-id': importing_org['id']} + ) + sat.cli.ContentView.add_repository( { 'id': importing_cv['id'], 'organization-id': importing_org['id'], @@ -756,7 +771,7 @@ def test_positive_export_import_filtered_cvv( False, ) filter_name = gen_string('alphanumeric') - ContentView.filter.create( + target_sat.cli.ContentView.filter.create( { 'name': filter_name, 'content-view-id': exporting_cv['id'], @@ -764,46 +779,50 @@ def test_positive_export_import_filtered_cvv( 'type': 'rpm', } ) - ContentView.filter.rule.create( + target_sat.cli.ContentView.filter.rule.create( { 'name': 'cat', 'content-view-filter': filter_name, 'content-view-id': exporting_cv['id'], } ) - ContentView.publish( + target_sat.cli.ContentView.publish( { 'id': exporting_cv['id'], 'organization-id': class_export_entities['exporting_org'].id, } ) - exporting_cv = ContentView.info({'id': exporting_cv['id']}) + exporting_cv = target_sat.cli.ContentView.info({'id': exporting_cv['id']}) exporting_cvv_id = exporting_cv['versions'][0]['id'] # Check presence of 1 rpm due to filter - export_packages = Package.list({'content-view-version-id': exporting_cvv_id}) + export_packages = target_sat.cli.Package.list({'content-view-version-id': exporting_cvv_id}) assert len(export_packages) == 1 # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - export = ContentExport.completeVersion( + export = target_sat.cli.ContentExport.completeVersion( {'id': exporting_cvv_id, 'organization-id': module_org.id} ) import_path = target_sat.move_pulp_archive(module_org, export['message']) # Import section - importing_org = make_org() + importing_org = target_sat.cli_factory.make_org() # set disconnected mode - Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) + target_sat.cli.Settings.set({'name': 'subscription_connection_enabled', 'value': "No"}) # check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import file and verify content - ContentImport.version({'organization-id': importing_org['id'], 'path': import_path}) - importing_cvv = ContentView.info( + target_sat.cli.ContentImport.version( + {'organization-id': importing_org['id'], 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( {'name': importing_cvv, 'organization-id': importing_org['id']} )['versions'] assert len(importing_cvv) >= 1 - imported_packages = Package.list({'content-view-version-id': importing_cvv[0]['id']}) + imported_packages = target_sat.cli.Package.list( + {'content-view-version-id': importing_cvv[0]['id']} + ) assert len(imported_packages) == 1 assert len(export_packages) == len(imported_packages) @@ -1146,7 +1165,7 @@ def test_negative_import_same_cv_twice( ) in error.value.message @pytest.mark.tier2 - def test_negative_import_invalid_path(self, module_org): + def test_negative_import_invalid_path(self, module_org, module_target_sat): """Import cv that doesn't exist in path :id: 4cc69666-407f-4d66-b3d2-8fe2ed135a5f @@ -1164,7 +1183,9 @@ def test_negative_import_invalid_path(self, module_org): import_path = f'{PULP_IMPORT_DIR}{export_folder}' # Import section with pytest.raises(CLIReturnCodeError) as error: - ContentImport.version({'organization-id': module_org.id, 'path': import_path}) + module_target_sat.cli.ContentImport.version( + {'organization-id': module_org.id, 'path': import_path} + ) assert ( f'''Error: Unable to find '{import_path}/metadata.json'. ''' 'If the metadata.json file is at a different location provide it to the ' @@ -1195,13 +1216,13 @@ def test_postive_export_cv_with_mixed_content_repos( :customerscenario: true """ - product = make_product( + product = target_sat.cli_factory.make_product( { 'organization-id': module_org.id, 'name': gen_string('alpha'), } ) - nonyum_repo = make_repository( + nonyum_repo = target_sat.cli_factory.make_repository( { 'content-type': 'docker', 'docker-upstream-name': 'quay/busybox', @@ -1210,8 +1231,8 @@ def test_postive_export_cv_with_mixed_content_repos( 'url': CONTAINER_REGISTRY_HUB, }, ) - Repository.synchronize({'id': nonyum_repo['id']}) - yum_repo = make_repository( + target_sat.cli.Repository.synchronize({'id': nonyum_repo['id']}) + yum_repo = target_sat.cli_factory.make_repository( { 'name': gen_string('alpha'), 'download-policy': 'immediate', @@ -1219,34 +1240,36 @@ def test_postive_export_cv_with_mixed_content_repos( 'product-id': product['id'], } ) - Repository.synchronize({'id': yum_repo['id']}) - content_view = make_content_view({'organization-id': module_org.id}) + target_sat.cli.Repository.synchronize({'id': yum_repo['id']}) + content_view = target_sat.cli_factory.make_content_view({'organization-id': module_org.id}) # Add docker and yum repo - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': nonyum_repo['id'], } ) - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': module_org.id, 'repository-id': yum_repo['id'], } ) - ContentView.publish({'id': content_view['id']}) - exporting_cv_id = ContentView.info({'id': content_view['id']}) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + exporting_cv_id = target_sat.cli.ContentView.info({'id': content_view['id']}) assert len(exporting_cv_id['versions']) == 1 exporting_cvv_id = exporting_cv_id['versions'][0] # check packages - exported_packages = Package.list({'content-view-version-id': exporting_cvv_id['id']}) + exported_packages = target_sat.cli.Package.list( + {'content-view-version-id': exporting_cvv_id['id']} + ) assert len(exported_packages) # Verify export directory is empty assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' # Export cv - ContentExport.completeVersion( + target_sat.cli.ContentExport.completeVersion( {'id': exporting_cvv_id['id'], 'organization-id': module_org.id} ) # Verify export directory is not empty @@ -1383,7 +1406,9 @@ def test_postive_export_import_ansible_collection_repo( import_product = target_sat.cli.Product.info( { 'organization-id': function_import_org.id, - 'id': Product.list({'organization-id': function_import_org.id})[0]['id'], + 'id': target_sat.cli.Product.list({'organization-id': function_import_org.id})[0][ + 'id' + ], } ) assert import_product['name'] == export_product['name'] diff --git a/tests/foreman/cli/test_settings.py b/tests/foreman/cli/test_settings.py index 3a47b6500a5..4923e8ec5f8 100644 --- a/tests/foreman/cli/test_settings.py +++ b/tests/foreman/cli/test_settings.py @@ -21,9 +21,8 @@ import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.settings import Settings from robottelo.config import settings +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import ( gen_string, generate_strings_list, @@ -51,7 +50,7 @@ def test_negative_update_hostname_with_empty_fact(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['discovery_prefix'], indirect=True) -def test_positive_update_hostname_prefix_without_value(setting_update): +def test_positive_update_hostname_prefix_without_value(setting_update, module_target_sat): """Update the Hostname_prefix settings without any string(empty values) :id: a84c28ea-6821-4c31-b4ab-8662c22c9135 @@ -64,12 +63,12 @@ def test_positive_update_hostname_prefix_without_value(setting_update): """ with pytest.raises(CLIReturnCodeError): - Settings.set({'name': "discovery_prefix", 'value': ""}) + module_target_sat.cli.Settings.set({'name': "discovery_prefix", 'value': ""}) @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['discovery_prefix'], indirect=True) -def test_positive_update_hostname_default_prefix(setting_update): +def test_positive_update_hostname_default_prefix(setting_update, module_target_sat): """Update the default set prefix of hostname_prefix setting :id: a6e46e53-6273-406a-8009-f184d9551d66 @@ -80,8 +79,8 @@ def test_positive_update_hostname_default_prefix(setting_update): """ hostname_prefix_value = gen_string('alpha') - Settings.set({'name': "discovery_prefix", 'value': hostname_prefix_value}) - discovery_prefix = Settings.list({'search': 'name=discovery_prefix'})[0] + module_target_sat.cli.Settings.set({'name': "discovery_prefix", 'value': hostname_prefix_value}) + discovery_prefix = module_target_sat.cli.Settings.list({'search': 'name=discovery_prefix'})[0] assert hostname_prefix_value == discovery_prefix['value'] @@ -116,7 +115,7 @@ def test_negative_discover_host_with_invalid_prefix(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text(setting_update): +def test_positive_update_login_page_footer_text(setting_update, module_target_sat): """Updates parameter "login_text" in settings :id: 4d4e1151-5bd6-4fa2-8dbb-e182b43ad7ec @@ -132,14 +131,14 @@ def test_positive_update_login_page_footer_text(setting_update): """ login_text_value = random.choice(list(valid_data_list().values())) - Settings.set({'name': "login_text", 'value': login_text_value}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': login_text_value}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text["value"] == login_text_value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text_without_value(setting_update): +def test_positive_update_login_page_footer_text_without_value(setting_update, module_target_sat): """Updates parameter "login_text" without any string (empty value) :id: 01ce95de-2994-42b6-b9f8-f7882981fb69 @@ -154,14 +153,14 @@ def test_positive_update_login_page_footer_text_without_value(setting_update): :expectedresults: Message on login screen should be removed """ - Settings.set({'name': "login_text", 'value': ""}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': ""}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text['value'] == '' @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['login_text'], indirect=True) -def test_positive_update_login_page_footer_text_with_long_string(setting_update): +def test_positive_update_login_page_footer_text_with_long_string(setting_update, module_target_sat): """Attempt to update parameter "Login_page_footer_text" with long length string under General tab @@ -180,8 +179,8 @@ def test_positive_update_login_page_footer_text_with_long_string(setting_update) login_text_value = random.choice( list(generate_strings_list(length=1000, exclude_types=['latin1', 'utf8', 'cjk', 'html'])) ) - Settings.set({'name': "login_text", 'value': login_text_value}) - login_text = Settings.list({'search': 'name=login_text'})[0] + module_target_sat.cli.Settings.set({'name': "login_text", 'value': login_text_value}) + login_text = module_target_sat.cli.Settings.list({'search': 'name=login_text'})[0] assert login_text['value'] == login_text_value @@ -214,7 +213,7 @@ def test_positive_update_email_delivery_method_smtp(): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['delivery_method'], indirect=True) -def test_positive_update_email_delivery_method_sendmail(setting_update): +def test_positive_update_email_delivery_method_sendmail(setting_update, module_target_sat): """Check Updating Sendmail params through settings subcommand :id: 578de898-fde2-4957-b39a-9dd059f490bf @@ -238,13 +237,13 @@ def test_positive_update_email_delivery_method_sendmail(setting_update): 'sendmail_location': '/usr/sbin/sendmail', } for key, value in sendmail_config_params.items(): - Settings.set({'name': f'{key}', 'value': f'{value}'}) - assert Settings.list({'search': f'name={key}'})[0]['value'] == value + module_target_sat.cli.Settings.set({'name': f'{key}', 'value': f'{value}'}) + assert module_target_sat.cli.Settings.list({'search': f'name={key}'})[0]['value'] == value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_reply_address'], indirect=True) -def test_positive_update_email_reply_address(setting_update): +def test_positive_update_email_reply_address(setting_update, module_target_sat): """Check email reply address is updated :id: cb0907d1-9cb6-45c4-b2bb-e2790ea55f16 @@ -259,8 +258,8 @@ def test_positive_update_email_reply_address(setting_update): """ email_address = random.choice(list(valid_emails_list())) email_address = email_address.replace('"', r'\"').replace('`', r'\`') - Settings.set({'name': "email_reply_address", 'value': email_address}) - email_reply_address = Settings.list( + module_target_sat.cli.Settings.set({'name': "email_reply_address", 'value': email_address}) + email_reply_address = module_target_sat.cli.Settings.list( {'search': 'name=email_reply_address'}, output_format='json' )[0] updated_email_address = ( @@ -271,7 +270,7 @@ def test_positive_update_email_reply_address(setting_update): @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_reply_address'], indirect=True) -def test_negative_update_email_reply_address(setting_update): +def test_negative_update_email_reply_address(setting_update, module_target_sat): """Check email reply address is not updated :id: 2a2220c2-badf-47d5-ba3f-e6329930ab39 @@ -288,12 +287,14 @@ def test_negative_update_email_reply_address(setting_update): """ invalid_email_address = random.choice(list(invalid_emails_list())) with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'email_reply_address', 'value': invalid_email_address}) + module_target_sat.cli.Settings.set( + {'name': 'email_reply_address', 'value': invalid_email_address} + ) @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_subject_prefix'], indirect=True) -def test_positive_update_email_subject_prefix(setting_update): +def test_positive_update_email_subject_prefix(setting_update, module_target_sat): """Check email subject prefix is updated :id: c8e6b323-7b39-43d6-a9f1-5474f920bba2 @@ -307,14 +308,18 @@ def test_positive_update_email_subject_prefix(setting_update): :CaseImportance: Low """ email_subject_prefix_value = gen_string('alpha') - Settings.set({'name': "email_subject_prefix", 'value': email_subject_prefix_value}) - email_subject_prefix = Settings.list({'search': 'name=email_subject_prefix'})[0] + module_target_sat.cli.Settings.set( + {'name': "email_subject_prefix", 'value': email_subject_prefix_value} + ) + email_subject_prefix = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] assert email_subject_prefix_value == email_subject_prefix['value'] @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['email_subject_prefix'], indirect=True) -def test_negative_update_email_subject_prefix(setting_update): +def test_negative_update_email_subject_prefix(setting_update, module_target_sat): """Check email subject prefix is not updated :id: 8a638596-248f-4196-af36-ad2982196382 @@ -329,18 +334,26 @@ def test_negative_update_email_subject_prefix(setting_update): :CaseImportance: Low """ - email_subject_prefix_original = Settings.list({'search': 'name=email_subject_prefix'})[0] + email_subject_prefix_original = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] email_subject_prefix_value = gen_string('alpha', 256) with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'email_subject_prefix', 'value': email_subject_prefix_value}) - email_subject_prefix = Settings.list({'search': 'name=email_subject_prefix'})[0] + module_target_sat.cli.Settings.set( + {'name': 'email_subject_prefix', 'value': email_subject_prefix_value} + ) + email_subject_prefix = module_target_sat.cli.Settings.list( + {'search': 'name=email_subject_prefix'} + )[0] assert email_subject_prefix == email_subject_prefix_original @pytest.mark.tier2 @pytest.mark.parametrize('send_welcome_email_value', ["true", "false"]) @pytest.mark.parametrize('setting_update', ['send_welcome_email'], indirect=True) -def test_positive_update_send_welcome_email(setting_update, send_welcome_email_value): +def test_positive_update_send_welcome_email( + setting_update, send_welcome_email_value, module_target_sat +): """Check email send welcome email is updated :id: cdaf6cd0-5eea-4252-87c5-f9ec3ba79ac1 @@ -355,15 +368,19 @@ def test_positive_update_send_welcome_email(setting_update, send_welcome_email_v :CaseImportance: Low """ - Settings.set({'name': 'send_welcome_email', 'value': send_welcome_email_value}) - host_value = Settings.list({'search': 'name=send_welcome_email'})[0]['value'] + module_target_sat.cli.Settings.set( + {'name': 'send_welcome_email', 'value': send_welcome_email_value} + ) + host_value = module_target_sat.cli.Settings.list({'search': 'name=send_welcome_email'})[0][ + 'value' + ] assert send_welcome_email_value == host_value @pytest.mark.tier2 @pytest.mark.parametrize('rss_enable_value', ["true", "false"]) @pytest.mark.parametrize('setting_update', ['rss_enable'], indirect=True) -def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value): +def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value, module_target_sat): """Check if the RSS feed can be enabled or disabled :id: 021cefab-2629-44e2-a30d-49c944d0a234 @@ -376,14 +393,14 @@ def test_positive_enable_disable_rssfeed(setting_update, rss_enable_value): :CaseAutomation: Automated """ - Settings.set({'name': 'rss_enable', 'value': rss_enable_value}) - rss_setting = Settings.list({'search': 'name=rss_enable'})[0] + module_target_sat.cli.Settings.set({'name': 'rss_enable', 'value': rss_enable_value}) + rss_setting = module_target_sat.cli.Settings.list({'search': 'name=rss_enable'})[0] assert rss_setting["value"] == rss_enable_value @pytest.mark.tier2 @pytest.mark.parametrize('setting_update', ['rss_url'], indirect=True) -def test_positive_update_rssfeed_url(setting_update): +def test_positive_update_rssfeed_url(setting_update, module_target_sat): """Check if the RSS feed URL is updated :id: 166ff6f2-e36e-4934-951f-b947139d0d73 @@ -401,14 +418,14 @@ def test_positive_update_rssfeed_url(setting_update): :CaseAutomation: Automated """ test_url = random.choice(list(valid_url_list())) - Settings.set({'name': 'rss_url', 'value': test_url}) - updated_url = Settings.list({'search': 'name=rss_url'})[0] + module_target_sat.cli.Settings.set({'name': 'rss_url', 'value': test_url}) + updated_url = module_target_sat.cli.Settings.list({'search': 'name=rss_url'})[0] assert updated_url['value'] == test_url @pytest.mark.parametrize('value', **xdist_adapter(invalid_boolean_strings())) @pytest.mark.tier2 -def test_negative_update_send_welcome_email(value): +def test_negative_update_send_welcome_email(value, module_target_sat): """Check email send welcome email is updated :id: 2f75775d-72a1-4b2f-86c2-98c36e446099 @@ -426,7 +443,7 @@ def test_negative_update_send_welcome_email(value): :CaseImportance: Low """ with pytest.raises(CLIReturnCodeError): - Settings.set({'name': 'send_welcome_email', 'value': value}) + module_target_sat.cli.Settings.set({'name': 'send_welcome_email', 'value': value}) @pytest.mark.tier3 @@ -461,11 +478,11 @@ def test_positive_failed_login_attempts_limit(setting_update, target_sat): username = settings.server.admin_username password = settings.server.admin_password assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 0 - Settings.set({'name': 'failed_login_attempts_limit', 'value': '5'}) + target_sat.cli.Settings.set({'name': 'failed_login_attempts_limit', 'value': '5'}) for _ in range(5): assert target_sat.execute(f'hammer -u {username} -p BAD_PASS user list').status == 129 assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 129 sleep(301) assert target_sat.execute(f'hammer -u {username} -p {password} user list').status == 0 - Settings.set({'name': 'failed_login_attempts_limit', 'value': '0'}) - assert Settings.info({'name': 'failed_login_attempts_limit'})['value'] == '0' + target_sat.cli.Settings.set({'name': 'failed_login_attempts_limit', 'value': '0'}) + assert target_sat.cli.Settings.info({'name': 'failed_login_attempts_limit'})['value'] == '0' diff --git a/tests/foreman/cli/test_subnet.py b/tests/foreman/cli/test_subnet.py index 6dc36a640f7..1e5f06f7429 100644 --- a/tests/foreman/cli/test_subnet.py +++ b/tests/foreman/cli/test_subnet.py @@ -22,10 +22,8 @@ from fauxfactory import gen_choice, gen_integer, gen_ipaddr import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import CLIFactoryError, make_domain, make_subnet -from robottelo.cli.subnet import Subnet from robottelo.constants import SUBNET_IPAM_TYPES +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.utils.datafactory import ( filtered_datapoint, parametrized, @@ -72,7 +70,7 @@ def invalid_missing_attributes(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_CRUD(): +def test_positive_CRUD(module_target_sat): """Create, update and delete subnet :id: d74a52a7-df56-44ef-89a3-081c14e81e43 @@ -89,10 +87,10 @@ def test_positive_CRUD(): from_ip = re.sub(r'\d+$', str(pool[0]), network) to_ip = re.sub(r'\d+$', str(pool[1]), network) domains_amount = random.randint(2, 3) - domains = [make_domain() for _ in range(domains_amount)] + domains = [module_target_sat.cli_factory.make_domain() for _ in range(domains_amount)] gateway = gen_ipaddr(ip3=True) ipam_type = SUBNET_IPAM_TYPES['dhcp'] - subnet = make_subnet( + subnet = module_target_sat.cli_factory.make_subnet( { 'name': name, 'from': from_ip, @@ -105,7 +103,7 @@ def test_positive_CRUD(): } ) # Check if Subnet can be listed - subnets_ids = [subnet_['id'] for subnet_ in Subnet.list()] + subnets_ids = [subnet_['id'] for subnet_ in module_target_sat.cli.Subnet.list()] assert subnet['id'] in subnets_ids assert subnet['name'] == name assert subnet['start-of-ip-range'] == from_ip @@ -125,7 +123,7 @@ def test_positive_CRUD(): ip_from = re.sub(r'\d+$', str(pool[0]), new_network) ip_to = re.sub(r'\d+$', str(pool[1]), new_network) ipam_type = SUBNET_IPAM_TYPES['internal'] - Subnet.update( + module_target_sat.cli.Subnet.update( { 'new-name': new_name, 'from': ip_from, @@ -137,7 +135,7 @@ def test_positive_CRUD(): 'domain-ids': "", # delete domains needed for subnet delete } ) - subnet = Subnet.info({'id': subnet['id']}) + subnet = module_target_sat.cli.Subnet.info({'id': subnet['id']}) assert subnet['name'] == new_name assert subnet['start-of-ip-range'] == ip_from assert subnet['end-of-ip-range'] == ip_to @@ -146,14 +144,14 @@ def test_positive_CRUD(): assert ipam_type in subnet['ipam'] # delete subnet - Subnet.delete({'id': subnet['id']}) + module_target_sat.cli.Subnet.delete({'id': subnet['id']}) with pytest.raises(CLIReturnCodeError): - Subnet.info({'id': subnet['id']}) + module_target_sat.cli.Subnet.info({'id': subnet['id']}) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_missing_attributes())) -def test_negative_create_with_attributes(options): +def test_negative_create_with_attributes(options, module_target_sat): """Create subnet with invalid or missing required attributes :id: de468dd3-7ba8-463e-881a-fd1cb3cfc7b6 @@ -165,13 +163,13 @@ def test_negative_create_with_attributes(options): :CaseImportance: Medium """ with pytest.raises(CLIFactoryError, match='Could not create the subnet:'): - make_subnet(options) + module_target_sat.cli_factory.make_subnet(options) @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.parametrize('pool', **parametrized(invalid_addr_pools())) -def test_negative_create_with_address_pool(pool): +def test_negative_create_with_address_pool(pool, module_target_sat): """Create subnet with invalid address pool range :parametrized: yes @@ -189,13 +187,13 @@ def test_negative_create_with_address_pool(pool): for key, val in pool.items(): opts[key] = re.sub(r'\d+$', str(val), network) with pytest.raises(CLIFactoryError) as raise_ctx: - make_subnet(opts) + module_target_sat.cli_factory.make_subnet(opts) assert 'Could not create the subnet:' in str(raise_ctx.value) @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_missing_attributes())) -def test_negative_update_attributes(options): +def test_negative_update_attributes(options, module_target_sat): """Update subnet with invalid or missing required attributes :parametrized: yes @@ -206,19 +204,19 @@ def test_negative_update_attributes(options): :CaseImportance: Medium """ - subnet = make_subnet() + subnet = module_target_sat.cli_factory.make_subnet() options['id'] = subnet['id'] with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): - Subnet.update(options) + module_target_sat.cli.Subnet.update(options) # check - subnet is not updated - result = Subnet.info({'id': subnet['id']}) + result = module_target_sat.cli.Subnet.info({'id': subnet['id']}) for key in options.keys(): assert subnet[key] == result[key] @pytest.mark.tier2 @pytest.mark.parametrize('options', **parametrized(invalid_addr_pools())) -def test_negative_update_address_pool(options): +def test_negative_update_address_pool(options, module_target_sat): """Update subnet with invalid address pool :parametrized: yes @@ -229,15 +227,15 @@ def test_negative_update_address_pool(options): :CaseImportance: Medium """ - subnet = make_subnet() + subnet = module_target_sat.cli_factory.make_subnet() opts = {'id': subnet['id']} # generate pool range from network address for key, val in options.items(): opts[key] = re.sub(r'\d+$', str(val), subnet['network-addr']) with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): - Subnet.update(opts) + module_target_sat.cli.Subnet.update(opts) # check - subnet is not updated - result = Subnet.info({'id': subnet['id']}) + result = module_target_sat.cli.Subnet.info({'id': subnet['id']}) for key in ['start-of-ip-range', 'end-of-ip-range']: assert result[key] == subnet[key] diff --git a/tests/foreman/cli/test_subscription.py b/tests/foreman/cli/test_subscription.py index d6feeb1381d..f2491479e40 100644 --- a/tests/foreman/cli/test_subscription.py +++ b/tests/foreman/cli/test_subscription.py @@ -20,23 +20,20 @@ from nailgun import entities import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import make_activation_key, make_product, make_repository -from robottelo.cli.host import Host -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.subscription import Subscription from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.run_in_one_thread] @pytest.fixture(scope='module') -def golden_ticket_host_setup(request, module_sca_manifest_org): - new_product = make_product({'organization-id': module_sca_manifest_org.id}) - new_repo = make_repository({'product-id': new_product['id']}) - Repository.synchronize({'id': new_repo['id']}) - new_ak = make_activation_key( +def golden_ticket_host_setup(request, module_sca_manifest_org, module_target_sat): + new_product = module_target_sat.cli_factory.make_product( + {'organization-id': module_sca_manifest_org.id} + ) + new_repo = module_target_sat.cli_factory.make_repository({'product-id': new_product['id']}) + module_target_sat.cli.Repository.synchronize({'id': new_repo['id']}) + new_ak = module_target_sat.cli_factory.make_activation_key( { 'lifecycle-environment': 'Library', 'content-view': 'Default Organization View', @@ -48,7 +45,7 @@ def golden_ticket_host_setup(request, module_sca_manifest_org): @pytest.mark.tier1 -def test_positive_manifest_upload(function_entitlement_manifest_org): +def test_positive_manifest_upload(function_entitlement_manifest_org, module_target_sat): """upload manifest :id: e5a0e4f8-fed9-4896-87a0-ac33f6baa227 @@ -58,12 +55,14 @@ def test_positive_manifest_upload(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_manifest_delete(function_entitlement_manifest_org): +def test_positive_manifest_delete(function_entitlement_manifest_org, module_target_sat): """Delete uploaded manifest :id: 01539c07-00d5-47e2-95eb-c0fd4f39090f @@ -72,14 +71,20 @@ def test_positive_manifest_delete(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - Subscription.delete_manifest({'organization-id': function_entitlement_manifest_org.id}) - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.Subscription.delete_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): +def test_positive_enable_manifest_reposet(function_entitlement_manifest_org, module_target_sat): """enable repository set :id: cc0f8f40-5ea6-4fa7-8154-acdc2cb56b45 @@ -91,8 +96,10 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - RepositorySet.enable( + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.RepositorySet.enable( { 'basearch': 'x86_64', 'name': REPOSET['rhva6'], @@ -101,7 +108,7 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): 'releasever': '6Server', } ) - Repository.synchronize( + module_target_sat.cli.Repository.synchronize( { 'name': REPOS['rhva6']['name'], 'organization-id': function_entitlement_manifest_org.id, @@ -111,7 +118,7 @@ def test_positive_enable_manifest_reposet(function_entitlement_manifest_org): @pytest.mark.tier3 -def test_positive_manifest_history(function_entitlement_manifest_org): +def test_positive_manifest_history(function_entitlement_manifest_org, module_target_sat): """upload manifest and check history :id: 000ab0a0-ec1b-497a-84ff-3969a965b52c @@ -121,14 +128,14 @@ def test_positive_manifest_history(function_entitlement_manifest_org): :CaseImportance: Medium """ org = function_entitlement_manifest_org - Subscription.list({'organization-id': org.id}, per_page=None) - history = Subscription.manifest_history({'organization-id': org.id}) + module_target_sat.cli.Subscription.list({'organization-id': org.id}, per_page=None) + history = module_target_sat.cli.Subscription.manifest_history({'organization-id': org.id}) assert f'{org.name} file imported successfully.' in ''.join(history) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_manifest_refresh(function_entitlement_manifest_org): +def test_positive_manifest_refresh(function_entitlement_manifest_org, module_target_sat): """upload manifest and refresh :id: 579bbbf7-11cf-4d78-a3b1-16d73bd4ca57 @@ -137,13 +144,19 @@ def test_positive_manifest_refresh(function_entitlement_manifest_org): :CaseImportance: Critical """ - Subscription.list({'organization-id': function_entitlement_manifest_org.id}, per_page=False) - Subscription.refresh_manifest({'organization-id': function_entitlement_manifest_org.id}) - Subscription.delete_manifest({'organization-id': function_entitlement_manifest_org.id}) + module_target_sat.cli.Subscription.list( + {'organization-id': function_entitlement_manifest_org.id}, per_page=False + ) + module_target_sat.cli.Subscription.refresh_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) + module_target_sat.cli.Subscription.delete_manifest( + {'organization-id': function_entitlement_manifest_org.id} + ) @pytest.mark.tier2 -def test_positive_subscription_list(function_entitlement_manifest_org): +def test_positive_subscription_list(function_entitlement_manifest_org, module_target_sat): """Verify that subscription list contains start and end date :id: 4861bcbc-785a-436d-98ce-14cfef7d6907 @@ -156,7 +169,7 @@ def test_positive_subscription_list(function_entitlement_manifest_org): :CaseImportance: Medium """ - subscription_list = Subscription.list( + subscription_list = module_target_sat.cli.Subscription.list( {'organization-id': function_entitlement_manifest_org.id}, per_page=False ) for column in ['start-date', 'end-date']: @@ -189,14 +202,14 @@ def test_positive_delete_manifest_as_another_user(target_sat, function_entitleme ).create() # use the first admin to upload a manifest target_sat.put(f'{function_entitlement_manifest.path}', f'{function_entitlement_manifest.name}') - Subscription.with_user(username=user1.login, password=user1_password).upload( + target_sat.cli.Subscription.with_user(username=user1.login, password=user1_password).upload( {'file': f'{function_entitlement_manifest.name}', 'organization-id': f'{org.id}'} ) # try to search and delete the manifest with another admin - Subscription.with_user(username=user2.login, password=user2_password).delete_manifest( - {'organization-id': org.id} - ) - assert len(Subscription.list({'organization-id': org.id})) == 0 + target_sat.cli.Subscription.with_user( + username=user2.login, password=user2_password + ).delete_manifest({'organization-id': org.id}) + assert len(target_sat.cli.Subscription.list({'organization-id': org.id})) == 0 @pytest.mark.tier2 @@ -266,8 +279,8 @@ def test_positive_auto_attach_disabled_golden_ticket( rhel7_contenthost_class.install_katello_ca(target_sat) rhel7_contenthost_class.register_contenthost(module_org.label, golden_ticket_host_setup['name']) assert rhel7_contenthost_class.subscribed - host = Host.list({'search': rhel7_contenthost_class.hostname}) + host = target_sat.cli.Host.list({'search': rhel7_contenthost_class.hostname}) host_id = host[0]['id'] with pytest.raises(CLIReturnCodeError) as context: - Host.subscription_auto_attach({'host-id': host_id}) + target_sat.cli.Host.subscription_auto_attach({'host-id': host_id}) assert "This host's organization is in Simple Content Access mode" in str(context.value) diff --git a/tests/foreman/cli/test_syncplan.py b/tests/foreman/cli/test_syncplan.py index c22dcbe198e..322689a3a84 100644 --- a/tests/foreman/cli/test_syncplan.py +++ b/tests/foreman/cli/test_syncplan.py @@ -23,18 +23,8 @@ from nailgun import entities import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - CLIFactoryError, - make_product, - make_repository, - make_sync_plan, -) -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.syncplan import SyncPlan from robottelo.constants import PRDS, REPOS, REPOSET +from robottelo.exceptions import CLIFactoryError, CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.datafactory import ( filtered_datapoint, @@ -113,7 +103,7 @@ def validate_task_status(sat, repo_id, org_id, max_tries=10): ) -def validate_repo_content(repo, content_types, after_sync=True): +def validate_repo_content(sat, repo, content_types, after_sync=True): """Check whether corresponding content is present in repository before or after synchronization is performed @@ -123,7 +113,7 @@ def validate_repo_content(repo, content_types, after_sync=True): :param bool after_sync: Specify whether you perform validation before synchronization procedure is happened or after """ - repo = Repository.info({'id': repo['id']}) + repo = sat.cli.Repository.info({'id': repo['id']}) for content in content_types: count = int(repo['content-counts'][content]) assert count > 0 if after_sync else count == 0 @@ -131,7 +121,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Check if syncplan can be created with random names :id: dc0a86f7-4219-427e-92fd-29352dbdbfce @@ -142,14 +132,16 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - sync_plan = make_sync_plan({'enabled': 'false', 'name': name, 'organization-id': module_org.id}) - result = SyncPlan.info({'id': sync_plan['id']}) + sync_plan = module_target_sat.cli_factory.make_sync_plan( + {'enabled': 'false', 'name': name, 'organization-id': module_org.id} + ) + result = module_target_sat.cli.SyncPlan.info({'id': sync_plan['id']}) assert result['name'] == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, desc): +def test_positive_create_with_description(module_org, desc, module_target_sat): """Check if syncplan can be created with random description :id: a1bbe81b-60f5-4a19-b400-a02a23fa1dfa @@ -160,16 +152,16 @@ def test_positive_create_with_description(module_org, desc): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = module_target_sat.cli_factory.make_sync_plan( {'enabled': 'false', 'description': desc, 'organization-id': module_org.id} ) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['description'] == desc @pytest.mark.parametrize('test_data', **parametrized(valid_name_interval_create_tests())) @pytest.mark.tier1 -def test_positive_create_with_interval(module_org, test_data): +def test_positive_create_with_interval(module_org, test_data, module_target_sat): """Check if syncplan can be created with varied intervals :id: 32eb0c1d-0c9a-4fb5-a185-68d0d705fbce @@ -180,7 +172,7 @@ def test_positive_create_with_interval(module_org, test_data): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = module_target_sat.cli_factory.make_sync_plan( { 'enabled': 'false', 'interval': test_data['interval'], @@ -188,14 +180,14 @@ def test_positive_create_with_interval(module_org, test_data): 'organization-id': module_org.id, } ) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['name'] == test_data['name'] assert result['interval'] == test_data['interval'] @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_name(module_org, name): +def test_negative_create_with_name(module_org, name, module_target_sat): """Check if syncplan can be created with random invalid names :id: 4c1aee35-271e-4ed8-9369-d2abfea8cfd9 @@ -207,12 +199,14 @@ def test_negative_create_with_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(CLIFactoryError, match='Could not create the sync plan:'): - make_sync_plan({'enabled': 'false', 'name': name, 'organization-id': module_org.id}) + module_target_sat.cli_factory.make_sync_plan( + {'enabled': 'false', 'name': name, 'organization-id': module_org.id} + ) @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_org, new_desc): +def test_positive_update_description(module_org, new_desc, module_target_sat): """Check if syncplan description can be updated :id: 00a279cd-1f49-4ebb-a59a-6f0b4e4cb83c @@ -221,9 +215,11 @@ def test_positive_update_description(module_org, new_desc): :expectedresults: Sync plan is created and description is updated """ - new_sync_plan = make_sync_plan({'enabled': 'false', 'organization-id': module_org.id}) - SyncPlan.update({'description': new_desc, 'id': new_sync_plan['id']}) - result = SyncPlan.info({'id': new_sync_plan['id']}) + new_sync_plan = module_target_sat.cli_factory.make_sync_plan( + {'enabled': 'false', 'organization-id': module_org.id} + ) + module_target_sat.cli.SyncPlan.update({'description': new_desc, 'id': new_sync_plan['id']}) + result = module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['description'] == new_desc @@ -240,7 +236,7 @@ def test_positive_update_interval(module_org, test_data, request, target_sat): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'false', 'interval': test_data['interval'], @@ -250,8 +246,10 @@ def test_positive_update_interval(module_org, test_data, request, target_sat): ) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - SyncPlan.update({'id': new_sync_plan['id'], 'interval': test_data['new-interval']}) - result = SyncPlan.info({'id': new_sync_plan['id']}) + target_sat.cli.SyncPlan.update( + {'id': new_sync_plan['id'], 'interval': test_data['new-interval']} + ) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['interval'] == test_data['new-interval'] @@ -271,7 +269,7 @@ def test_positive_update_sync_date(module_org, request, target_sat): # Set the sync date to today/right now today = datetime.now() sync_plan_name = gen_string('alphanumeric') - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'name': sync_plan_name, 'sync-date': today.strftime(SYNC_DATE_FMT), @@ -285,9 +283,11 @@ def test_positive_update_sync_date(module_org, request, target_sat): # Set sync date 5 days in the future future_date = today + timedelta(days=5) # Update sync interval - SyncPlan.update({'id': new_sync_plan['id'], 'sync-date': future_date.strftime(SYNC_DATE_FMT)}) + target_sat.cli.SyncPlan.update( + {'id': new_sync_plan['id'], 'sync-date': future_date.strftime(SYNC_DATE_FMT)} + ) # Fetch it - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result['start-date'] != new_sync_plan['start-date'] assert datetime.strptime(result['start-date'], '%Y/%m/%d %H:%M:%S') > datetime.strptime( new_sync_plan['start-date'], '%Y/%m/%d %H:%M:%S' @@ -297,7 +297,7 @@ def test_positive_update_sync_date(module_org, request, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_by_id(module_org, name): +def test_positive_delete_by_id(module_org, module_target_sat, name): """Check if syncplan can be created and deleted :id: b5d97c6b-aead-422b-8d9f-4a192bbe4a3b @@ -308,10 +308,12 @@ def test_positive_delete_by_id(module_org, name): :CaseImportance: Critical """ - new_sync_plan = make_sync_plan({'name': name, 'organization-id': module_org.id}) - SyncPlan.delete({'id': new_sync_plan['id']}) + new_sync_plan = module_target_sat.cli_factory.make_sync_plan( + {'name': name, 'organization-id': module_org.id} + ) + module_target_sat.cli.SyncPlan.delete({'id': new_sync_plan['id']}) with pytest.raises(CLIReturnCodeError): - SyncPlan.info({'id': new_sync_plan['id']}) + module_target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) @pytest.mark.tier1 @@ -324,16 +326,16 @@ def test_positive_info_enabled_field_is_displayed(module_org, request, target_sa :CaseImportance: Critical """ - new_sync_plan = make_sync_plan({'organization-id': module_org.id}) + new_sync_plan = target_sat.cli_factory.make_sync_plan({'organization-id': module_org.id}) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - result = SyncPlan.info({'id': new_sync_plan['id']}) + result = target_sat.cli.SyncPlan.info({'id': new_sync_plan['id']}) assert result.get('enabled') is not None @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_info_with_assigned_product(module_org): +def test_positive_info_with_assigned_product(module_org, module_target_sat): """Verify that sync plan info command returns list of products which are assigned to that sync plan @@ -352,7 +354,7 @@ def test_positive_info_with_assigned_product(module_org): """ prod1 = gen_string('alpha') prod2 = gen_string('alpha') - sync_plan = make_sync_plan( + sync_plan = module_target_sat.cli_factory.make_sync_plan( { 'enabled': 'false', 'organization-id': module_org.id, @@ -360,9 +362,13 @@ def test_positive_info_with_assigned_product(module_org): } ) for prod_name in [prod1, prod2]: - product = make_product({'organization-id': module_org.id, 'name': prod_name}) - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': sync_plan['id']}) - updated_plan = SyncPlan.info({'id': sync_plan['id']}) + product = module_target_sat.cli_factory.make_product( + {'organization-id': module_org.id, 'name': prod_name} + ) + module_target_sat.cli.Product.set_sync_plan( + {'id': product['id'], 'sync-plan-id': sync_plan['id']} + ) + updated_plan = module_target_sat.info({'id': sync_plan['id']}) assert len(updated_plan['products']) == 2 assert {prod['name'] for prod in updated_plan['products']} == {prod1, prod2} @@ -381,7 +387,7 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, :CaseLevel: System """ - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -390,9 +396,9 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, ) sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=2) @@ -414,9 +420,9 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) - new_sync_plan = make_sync_plan( + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'interval': 'hourly', @@ -429,7 +435,7 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -439,7 +445,7 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -448,7 +454,7 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier4 @@ -468,12 +474,12 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques cron_multiple = 5 # sync event is on every multiple of this value, starting from 00 mins delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 180 # do not start test less than 3 mins before the next sync event - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -486,9 +492,9 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Verify product is not synced and doesn't have any content - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -498,7 +504,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -507,7 +513,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier4 @@ -527,14 +533,18 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque cron_multiple = 5 # sync event is on every multiple of this value, starting from 00 mins delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 210 # do not start test less than 3.5 mins before the next sync event - products = [make_product({'organization-id': module_org.id}) for _ in range(2)] + products = [ + target_sat.cli_factory.make_product({'organization-id': module_org.id}) for _ in range(2) + ] repos = [ - make_repository({'product-id': product['id']}) for product in products for _ in range(2) + target_sat.cli_factory.make_repository({'product-id': product['id']}) + for product in products + for _ in range(2) ] # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, @@ -556,7 +566,9 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) # Associate sync plan with products for product in products: - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan( + {'id': product['id'], 'sync-plan-id': new_sync_plan['id']} + ) # Wait fifth of expected time logger.info( f"Waiting {(delay / 5)} seconds to check products {products[0]['name']}" @@ -576,7 +588,7 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque # Verify products were synced successfully for repo in repos: validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.run_in_one_thread @@ -600,7 +612,7 @@ def test_positive_synchronize_rh_product_past_sync_date( interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 org = function_entitlement_manifest_org - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], 'organization-id': org.id, @@ -609,11 +621,11 @@ def test_positive_synchronize_rh_product_past_sync_date( 'basearch': 'x86_64', } ) - product = Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) - repo = Repository.info( + product = target_sat.cli.Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) + repo = target_sat.cli.Repository.info( {'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org.id} ) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'interval': 'hourly', @@ -626,7 +638,7 @@ def test_positive_synchronize_rh_product_past_sync_date( sync_plan = entities.SyncPlan(organization=org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -636,7 +648,7 @@ def test_positive_synchronize_rh_product_past_sync_date( # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -645,7 +657,7 @@ def test_positive_synchronize_rh_product_past_sync_date( sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], org.id) - validate_repo_content(repo, ['errata', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'packages']) @pytest.mark.run_in_one_thread @@ -669,7 +681,7 @@ def test_positive_synchronize_rh_product_future_sync_date( delay = (cron_multiple) * 60 # delay for sync date in seconds guardtime = 180 # do not start test less than 2 mins before the next sync event org = function_entitlement_manifest_org - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'name': REPOSET['rhva6'], 'organization-id': org.id, @@ -678,14 +690,14 @@ def test_positive_synchronize_rh_product_future_sync_date( 'basearch': 'x86_64', } ) - product = Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) - repo = Repository.info( + product = target_sat.cli.Product.info({'name': PRDS['rhel'], 'organization-id': org.id}) + repo = target_sat.cli.Repository.info( {'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org.id} ) # if < 3 mins before the target event rather wait 3 mins for the next test window if int(datetime.utcnow().strftime('%M')) % (cron_multiple) > int(guardtime / 60): sleep(guardtime) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'organization-id': org.id, @@ -700,9 +712,9 @@ def test_positive_synchronize_rh_product_future_sync_date( # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait fifth of expected time logger.info( f"Waiting {(delay / 5)} seconds to check product {product['name']}" @@ -738,10 +750,10 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques :CaseLevel: System """ delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(days=1) + timedelta(seconds=delay) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'interval': 'daily', @@ -752,7 +764,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -762,7 +774,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -771,7 +783,7 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) @pytest.mark.tier3 @@ -789,10 +801,10 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque :CaseLevel: System """ delay = 2 * 60 - product = make_product({'organization-id': module_org.id}) - repo = make_repository({'product-id': product['id']}) + product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) + repo = target_sat.cli_factory.make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(weeks=1) + timedelta(seconds=delay) - new_sync_plan = make_sync_plan( + new_sync_plan = target_sat.cli_factory.make_sync_plan( { 'enabled': 'true', 'interval': 'weekly', @@ -803,7 +815,7 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque sync_plan = entities.SyncPlan(organization=module_org.id, id=new_sync_plan['id']).read() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) # Associate sync plan with product - Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) + target_sat.cli.Product.set_sync_plan({'id': product['id'], 'sync-plan-id': new_sync_plan['id']}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" @@ -813,7 +825,7 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(target_sat, repo['id'], module_org.id, max_tries=1) - validate_repo_content(repo, ['errata', 'packages'], after_sync=False) + validate_repo_content(target_sat, repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" @@ -822,4 +834,4 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(target_sat, repo['id'], module_org.id) - validate_repo_content(repo, ['errata', 'package-groups', 'packages']) + validate_repo_content(target_sat, repo, ['errata', 'package-groups', 'packages']) diff --git a/tests/foreman/cli/test_templatesync.py b/tests/foreman/cli/test_templatesync.py index c1fa8ab41dc..2facc9139de 100644 --- a/tests/foreman/cli/test_templatesync.py +++ b/tests/foreman/cli/test_templatesync.py @@ -21,8 +21,6 @@ import pytest import requests -from robottelo.cli.template import Template -from robottelo.cli.template_sync import TemplateSync from robottelo.config import settings from robottelo.constants import ( FOREMAN_TEMPLATE_IMPORT_URL, @@ -83,7 +81,7 @@ def test_positive_import_force_locked_template( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - TemplateSync.imports( + target_sat.cli.TemplateSync.imports( {'repo': dir_path, 'prefix': prefix, 'organization-ids': module_org.id, 'lock': 'true'} ) ptemplate = entities.ProvisioningTemplate().search( @@ -93,11 +91,13 @@ def test_positive_import_force_locked_template( assert ptemplate[0].read().locked update_txt = 'updated a little' target_sat.execute(f"echo {update_txt} >> {dir_path}/example_template.erb") - TemplateSync.imports( + target_sat.cli.TemplateSync.imports( {'repo': dir_path, 'prefix': prefix, 'organization-id': module_org.id} ) - assert update_txt not in Template.dump({'name': f'{prefix}example template'}) - TemplateSync.imports( + assert update_txt not in target_sat.cli.Template.dump( + {'name': f'{prefix}example template'} + ) + target_sat.cli.TemplateSync.imports( { 'repo': dir_path, 'prefix': prefix, @@ -105,7 +105,7 @@ def test_positive_import_force_locked_template( 'force': 'true', } ) - assert update_txt in Template.dump({'name': f'{prefix}example template'}) + assert update_txt in target_sat.cli.Template.dump({'name': f'{prefix}example template'}) else: pytest.fail('The template is not imported for force test') @@ -126,7 +126,9 @@ def test_positive_import_force_locked_template( indirect=True, ids=['non_empty_repo'], ) - def test_positive_update_templates_in_git(self, module_org, git_repository, git_branch, url): + def test_positive_update_templates_in_git( + self, module_org, git_repository, git_branch, url, module_target_sat + ): """Assure only templates with a given filter are pushed to git repository and existing template file is updated. @@ -159,7 +161,7 @@ def test_positive_update_templates_in_git(self, module_org, git_repository, git_ assert res.status_code == 201 # export template to git url = f'{url}/{git.username}/{git_repository["name"]}' - output = TemplateSync.exports( + output = module_target_sat.cli.TemplateSync.exports( { 'repo': url, 'branch': git_branch, @@ -192,7 +194,7 @@ def test_positive_update_templates_in_git(self, module_org, git_repository, git_ ids=['non_empty_repo', 'empty_repo'], ) def test_positive_export_filtered_templates_to_git( - self, module_org, git_repository, git_branch, url + self, module_org, git_repository, git_branch, url, module_target_sat ): """Assure only templates with a given filter regex are pushed to git repository. @@ -213,7 +215,7 @@ def test_positive_export_filtered_templates_to_git( """ dirname = 'export' url = f'{url}/{git.username}/{git_repository["name"]}' - output = TemplateSync.exports( + output = module_target_sat.cli.TemplateSync.exports( { 'repo': url, 'branch': git_branch, @@ -247,7 +249,7 @@ def test_positive_export_filtered_templates_to_temp_dir(self, module_org, target :CaseImportance: Medium """ dir_path = '/tmp' - output = TemplateSync.exports( + output = target_sat.cli.TemplateSync.exports( {'repo': dir_path, 'organization-id': module_org.id, 'filter': 'ansible'} ).split('\n') exported_count = [row == 'Exported: true' for row in output].count(True) diff --git a/tests/foreman/cli/test_user.py b/tests/foreman/cli/test_user.py index 9f3537b139a..6edc7969031 100644 --- a/tests/foreman/cli/test_user.py +++ b/tests/foreman/cli/test_user.py @@ -30,19 +30,9 @@ from nailgun import entities import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - make_filter, - make_location, - make_org, - make_role, - make_user, -) -from robottelo.cli.filter import Filter -from robottelo.cli.org import Org -from robottelo.cli.user import User from robottelo.config import settings from robottelo.constants import LOCALES +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils import gen_ssh_keypairs from robottelo.utils.datafactory import ( parametrized, @@ -56,7 +46,7 @@ class TestUser: """Implements Users tests in CLI""" @pytest.fixture(scope='module') - def module_roles(self): + def module_roles(self, module_target_sat): """ Initializes class attribute ``dct_roles`` with several random roles saved on sat. roles is a dict so keys are role's id respective value is @@ -69,14 +59,14 @@ def roles_helper(): tests """ for role_name in valid_usernames_list() + include_list: - yield make_role({'name': role_name}) + yield module_target_sat.cli_factory.make_role({'name': role_name}) stubbed_roles = {role['id']: role for role in roles_helper()} yield stubbed_roles @pytest.mark.parametrize('email', **parametrized(valid_emails_list())) @pytest.mark.tier2 - def test_positive_CRUD(self, email): + def test_positive_CRUD(self, email, module_target_sat): """Create User with various parameters, updating and deleting :id: 2d430243-8512-46ee-8d21-7ccf0c7af807 @@ -99,7 +89,7 @@ def test_positive_CRUD(self, email): 'mail': mail.replace('"', r'\"').replace('`', r'\`'), 'description': random.choice(list(valid_data_list().values())), } - user = make_user(user_params) + user = module_target_sat.cli_factory.make_user(user_params) user['firstname'], user['lastname'] = user['name'].split() user_params.pop('mail') user_params['email'] = mail @@ -107,14 +97,18 @@ def test_positive_CRUD(self, email): assert user_params[key] == user[key], f'values for key "{key}" do not match' # list by firstname and lastname - result = User.list({'search': 'firstname = {}'.format(user_params['firstname'])}) + result = module_target_sat.cli.User.list( + {'search': 'firstname = {}'.format(user_params['firstname'])} + ) # make sure user is in list result assert {user['id'], user['login'], user['name']} == { result[0]['id'], result[0]['login'], result[0]['name'], } - result = User.list({'search': 'lastname = {}'.format(user_params['lastname'])}) + result = module_target_sat.cli.User.list( + {'search': 'lastname = {}'.format(user_params['lastname'])} + ) # make sure user is in list result assert {user['id'], user['login'], user['name']} == { result[0]['id'], @@ -130,21 +124,21 @@ def test_positive_CRUD(self, email): 'description': random.choice(list(valid_data_list().values())), } user_params.update({'id': user['id']}) - User.update(user_params) - user = User.info({'login': user['login']}) + module_target_sat.cli.User.update(user_params) + user = module_target_sat.cli.User.info({'login': user['login']}) user['firstname'], user['lastname'] = user['name'].split() user_params.pop('mail') user_params['email'] = new_mail for key in user_params: assert user_params[key] == user[key], f'values for key "{key}" do not match' # delete - User.delete({'login': user['login']}) + module_target_sat.cli.User.delete({'login': user['login']}) with pytest.raises(CLIReturnCodeError): - User.info({'login': user['login']}) + module_target_sat.cli.User.info({'login': user['login']}) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_CRUD_admin(self): + def test_positive_CRUD_admin(self, target_sat): """Create an Admin user :id: 0d0384ad-d85a-492e-8630-7f48912a4fd5 @@ -153,23 +147,23 @@ def test_positive_CRUD_admin(self): :CaseImportance: Critical """ - user = make_user({'admin': '1'}) + user = target_sat.cli_factory.make_user({'admin': '1'}) assert user['admin'] == 'yes' # update to non admin by id - User.update({'id': user['id'], 'admin': '0'}) - user = User.info({'id': user['id']}) + target_sat.cli.User.update({'id': user['id'], 'admin': '0'}) + user = target_sat.cli.User.info({'id': user['id']}) assert user['admin'] == 'no' # update back to admin by name - User.update({'login': user['login'], 'admin': '1'}) - user = User.info({'login': user['login']}) + target_sat.cli.User.update({'login': user['login'], 'admin': '1'}) + user = target_sat.cli.User.info({'login': user['login']}) assert user['admin'] == 'yes' # delete user - User.delete({'login': user['login']}) + target_sat.cli.User.delete({'login': user['login']}) with pytest.raises(CLIReturnCodeError): - User.info({'id': user['id']}) + target_sat.cli.User.info({'id': user['id']}) @pytest.mark.tier1 - def test_positive_create_with_default_loc(self): + def test_positive_create_with_default_loc(self, target_sat): """Check if user with default location can be created :id: efe7256d-8c8f-444c-8d59-43500e1319c3 @@ -178,13 +172,15 @@ def test_positive_create_with_default_loc(self): :CaseImportance: Critical """ - location = make_location() - user = make_user({'default-location-id': location['id'], 'location-ids': location['id']}) + location = target_sat.cli_factory.make_location() + user = target_sat.cli_factory.make_user( + {'default-location-id': location['id'], 'location-ids': location['id']} + ) assert location['name'] in user['locations'] assert location['name'] == user['default-location'] @pytest.mark.tier1 - def test_positive_create_with_defaut_org(self): + def test_positive_create_with_defaut_org(self, module_target_sat): """Check if user with default organization can be created :id: cc692b6f-2519-429b-8ecb-c4bb51ed3544 @@ -194,13 +190,15 @@ def test_positive_create_with_defaut_org(self): :CaseImportance: Critical """ - org = make_org() - user = make_user({'default-organization-id': org['id'], 'organization-ids': org['id']}) + org = module_target_sat.cli_factory.make_org() + user = module_target_sat.cli_factory.make_user( + {'default-organization-id': org['id'], 'organization-ids': org['id']} + ) assert org['name'] in user['organizations'] assert org['name'] == user['default-organization'] @pytest.mark.tier2 - def test_positive_create_with_orgs_and_update(self): + def test_positive_create_with_orgs_and_update(self, module_target_sat): """Create User associated to multiple Organizations, update them :id: f537296c-a8a8-45ef-8996-c1d32b8f64de @@ -210,19 +208,23 @@ def test_positive_create_with_orgs_and_update(self): :CaseLevel: Integration """ orgs_amount = 2 - orgs = [make_org() for _ in range(orgs_amount)] - user = make_user({'organization-ids': [org['id'] for org in orgs]}) + orgs = [module_target_sat.cli_factory.make_org() for _ in range(orgs_amount)] + user = module_target_sat.cli_factory.make_user( + {'organization-ids': [org['id'] for org in orgs]} + ) assert len(user['organizations']) == orgs_amount for org in orgs: assert org['name'] in user['organizations'] - orgs = [make_org() for _ in range(orgs_amount)] - User.update({'id': user['id'], 'organization-ids': [org['id'] for org in orgs]}) - user = User.info({'id': user['id']}) + orgs = [module_target_sat.cli_factory.make_org() for _ in range(orgs_amount)] + module_target_sat.cli.User.update( + {'id': user['id'], 'organization-ids': [org['id'] for org in orgs]} + ) + user = module_target_sat.cli.User.info({'id': user['id']}) for org in orgs: assert org['name'] in user['organizations'] @pytest.mark.tier1 - def test_negative_delete_internal_admin(self): + def test_negative_delete_internal_admin(self, module_target_sat): """Attempt to delete internal admin user :id: 4fc92958-9e75-4bd2-bcbe-32f906e432f5 @@ -232,11 +234,11 @@ def test_negative_delete_internal_admin(self): :CaseImportance: Critical """ with pytest.raises(CLIReturnCodeError): - User.delete({'login': settings.server.admin_username}) - assert User.info({'login': settings.server.admin_username}) + module_target_sat.cli.User.delete({'login': settings.server.admin_username}) + assert module_target_sat.cli.User.info({'login': settings.server.admin_username}) @pytest.mark.tier2 - def test_positive_last_login_for_new_user(self): + def test_positive_last_login_for_new_user(self, module_target_sat): """Create new user with admin role and check last login updated for that user :id: 967282d3-92d0-42ce-9ef3-e542d2883408 @@ -253,17 +255,19 @@ def test_positive_last_login_for_new_user(self): password = gen_string('alpha') org_name = gen_string('alpha') - make_user({'login': login, 'password': password}) - User.add_role({'login': login, 'role': 'System admin'}) - result_before_login = User.list({'search': f'login = {login}'}) + module_target_sat.cli_factory.make_user({'login': login, 'password': password}) + module_target_sat.cli.User.add_role({'login': login, 'role': 'System admin'}) + result_before_login = module_target_sat.cli.User.list({'search': f'login = {login}'}) # this is because satellite uses the UTC timezone before_login_time = datetime.datetime.utcnow() assert result_before_login[0]['login'] == login assert result_before_login[0]['last-login'] == "" - Org.with_user(username=login, password=password).create({'name': org_name}) - result_after_login = User.list({'search': f'login = {login}'}) + module_target_sat.cli.Org.with_user(username=login, password=password).create( + {'name': org_name} + ) + result_after_login = module_target_sat.cli.User.list({'search': f'login = {login}'}) # checking user last login should not be empty assert result_after_login[0]['last-login'] != "" @@ -273,7 +277,7 @@ def test_positive_last_login_for_new_user(self): assert after_login_time > before_login_time @pytest.mark.tier1 - def test_positive_update_all_locales(self): + def test_positive_update_all_locales(self, module_target_sat): """Update Language in My Account :id: f0993495-5117-461d-a116-44867b820139 @@ -286,14 +290,14 @@ def test_positive_update_all_locales(self): :CaseImportance: Critical """ - user = make_user() + user = module_target_sat.cli_factory.make_user() for locale in LOCALES: - User.update({'id': user['id'], 'locale': locale}) - assert locale == User.info({'id': user['id']})['locale'] + module_target_sat.cli.User.update({'id': user['id'], 'locale': locale}) + assert locale == module_target_sat.cli.User.info({'id': user['id']})['locale'] @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_add_and_delete_roles(self, module_roles): + def test_positive_add_and_delete_roles(self, module_roles, module_target_sat): """Add multiple roles to User, then delete them For now add-role user sub command does not allow multiple role ids @@ -309,15 +313,15 @@ def test_positive_add_and_delete_roles(self, module_roles): :CaseLevel: Integration """ - user = make_user() + user = module_target_sat.cli_factory.make_user() original_role_names = set(user['roles']) expected_role_names = set(original_role_names) for role_id, role in module_roles.items(): - User.add_role({'login': user['login'], 'role-id': role_id}) + module_target_sat.cli.User.add_role({'login': user['login'], 'role-id': role_id}) expected_role_names.add(role['name']) - user_roles = User.info({'id': user['id']})['roles'] + user_roles = module_target_sat.cli.User.info({'id': user['id']})['roles'] assert len(expected_role_names) == len(user_roles) for role in expected_role_names: assert role in user_roles @@ -325,11 +329,11 @@ def test_positive_add_and_delete_roles(self, module_roles): roles_to_remove = expected_role_names - original_role_names for role_name in roles_to_remove: user_credentials = {'login': user['login'], 'role': role_name} - User.remove_role(user_credentials) - user = User.info({'id': user['id']}) + module_target_sat.cli.User.remove_role(user_credentials) + user = module_target_sat.cli.User.info({'id': user['id']}) assert role_name not in user['roles'] - user_roles = User.info({'id': user['id']})['roles'] + user_roles = module_target_sat.cli.User.info({'id': user['id']})['roles'] assert len(original_role_names) == len(user_roles) for role in original_role_names: assert role in user_roles @@ -346,7 +350,7 @@ def module_user(self): return entities.User().create() @pytest.mark.tier1 - def test_positive_CRD_ssh_key(self, module_user): + def test_positive_CRD_ssh_key(self, module_user, module_target_sat): """SSH Key can be added to a User, listed and deletd :id: 57304fca-8e0d-454a-be31-34423345c8b2 @@ -357,13 +361,19 @@ def test_positive_CRD_ssh_key(self, module_user): :CaseImportance: Critical """ ssh_name = gen_string('alpha') - User.ssh_keys_add({'user': module_user.login, 'key': self.ssh_key, 'name': ssh_name}) - result = User.ssh_keys_list({'user-id': module_user.id}) + module_target_sat.cli.User.ssh_keys_add( + {'user': module_user.login, 'key': self.ssh_key, 'name': ssh_name} + ) + result = module_target_sat.cli.User.ssh_keys_list({'user-id': module_user.id}) assert ssh_name in [i['name'] for i in result] - result = User.ssh_keys_info({'user-id': module_user.id, 'name': ssh_name}) + result = module_target_sat.cli.User.ssh_keys_info( + {'user-id': module_user.id, 'name': ssh_name} + ) assert self.ssh_key in result[0]['public-key'] - result = User.ssh_keys_delete({'user-id': module_user.id, 'name': ssh_name}) - result = User.ssh_keys_list({'user-id': module_user.id}) + result = module_target_sat.cli.User.ssh_keys_delete( + {'user-id': module_user.id, 'name': ssh_name} + ) + result = module_target_sat.cli.User.ssh_keys_list({'user-id': module_user.id}) assert ssh_name not in [i['name'] for i in result] @pytest.mark.tier1 @@ -380,10 +390,12 @@ def test_positive_create_ssh_key_super_admin_from_file(self, target_sat): ssh_name = gen_string('alpha') result = target_sat.execute(f"echo '{self.ssh_key}' > test_key.pub") assert result.status == 0, 'key file not created' - User.ssh_keys_add({'user': 'admin', 'key-file': 'test_key.pub', 'name': ssh_name}) - result = User.ssh_keys_list({'user': 'admin'}) + target_sat.cli.User.ssh_keys_add( + {'user': 'admin', 'key-file': 'test_key.pub', 'name': ssh_name} + ) + result = target_sat.cli.User.ssh_keys_list({'user': 'admin'}) assert ssh_name in [i['name'] for i in result] - result = User.ssh_keys_info({'user': 'admin', 'name': ssh_name}) + result = target_sat.cli.User.ssh_keys_info({'user': 'admin', 'name': ssh_name}) assert self.ssh_key == result[0]['public-key'] @@ -409,9 +421,9 @@ def test_personal_access_token_admin_user(self, target_sat): :CaseImportance: High """ - user = make_user({'admin': '1'}) + user = target_sat.cli_factory.make_user({'admin': '1'}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -419,7 +431,9 @@ def test_personal_access_token_admin_user(self, target_sat): command_output = target_sat.execute(curl_command) assert user['login'] in command_output.stdout assert user['email'] in command_output.stdout - User.access_token(action="revoke", options={'name': token_name, 'user-id': user['id']}) + target_sat.cli.User.access_token( + action="revoke", options={'name': token_name, 'user-id': user['id']} + ) command_output = target_sat.execute(curl_command) assert f'Unable to authenticate user {user["login"]}' in command_output.stdout @@ -445,10 +459,10 @@ def test_positive_personal_access_token_user_with_role(self, target_sat): :CaseImportance: High """ - user = make_user() - User.add_role({'login': user['login'], 'role': 'Viewer'}) + user = target_sat.cli_factory.make_user() + target_sat.cli.User.add_role({'login': user['login'], 'role': 'Viewer'}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -479,13 +493,13 @@ def test_expired_personal_access_token(self, target_sat): :CaseImportance: Medium """ - user = make_user() - User.add_role({'login': user['login'], 'role': 'Viewer'}) + user = target_sat.cli_factory.make_user() + target_sat.cli.User.add_role({'login': user['login'], 'role': 'Viewer'}) token_name = gen_alphanumeric() datetime_now = datetime.datetime.utcnow() datetime_expire = datetime_now + datetime.timedelta(seconds=20) datetime_expire = datetime_expire.strftime("%Y-%m-%d %H:%M:%S") - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id'], 'expires-at': datetime_expire}, ) @@ -521,20 +535,20 @@ def test_custom_personal_access_token_role(self, target_sat): :BZ: 1974685, 1996048 """ - role = make_role() + role = target_sat.cli_factory.make_role() permissions = [ permission['name'] - for permission in Filter.available_permissions( + for permission in target_sat.cli.Filter.available_permissions( {'search': 'resource_type=PersonalAccessToken'} ) ] permissions = ','.join(permissions) - make_filter({'role-id': role['id'], 'permissions': permissions}) - make_filter({'role-id': role['id'], 'permissions': 'view_users'}) - user = make_user() - User.add_role({'login': user['login'], 'role': role['name']}) + target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': permissions}) + target_sat.cli_factory.make_filter({'role-id': role['id'], 'permissions': 'view_users'}) + user = target_sat.cli_factory.make_user() + target_sat.cli.User.add_role({'login': user['login'], 'role': role['name']}) token_name = gen_alphanumeric() - result = User.access_token( + result = target_sat.cli.User.access_token( action="create", options={'name': token_name, 'user-id': user['id']} ) token_value = result[0]['message'].split(':')[-1] @@ -543,7 +557,9 @@ def test_custom_personal_access_token_role(self, target_sat): ) assert user['login'] in command_output.stdout assert user['email'] in command_output.stdout - User.access_token(action="revoke", options={'name': token_name, 'user-id': user['id']}) + target_sat.cli.User.access_token( + action="revoke", options={'name': token_name, 'user-id': user['id']} + ) command_output = target_sat.execute( f'curl -k -u {user["login"]}:{token_value} {target_sat.url}/api/v2/users' ) diff --git a/tests/foreman/cli/test_usergroup.py b/tests/foreman/cli/test_usergroup.py index 57e3c8b9b17..b4852a95588 100644 --- a/tests/foreman/cli/test_usergroup.py +++ b/tests/foreman/cli/test_usergroup.py @@ -20,29 +20,19 @@ import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.factory import ( - make_role, - make_user, - make_usergroup, - make_usergroup_external, -) -from robottelo.cli.ldapauthsource import LDAPAuthSource -from robottelo.cli.task import Task -from robottelo.cli.user import User -from robottelo.cli.usergroup import UserGroup, UserGroupExternal +from robottelo.exceptions import CLIReturnCodeError from robottelo.utils.datafactory import valid_usernames_list @pytest.fixture(scope='function') -def function_user_group(): +def function_user_group(target_sat): """Create new usergroup per each test""" - user_group = make_usergroup() + user_group = target_sat.cli_factory.make_usergroup() yield user_group @pytest.mark.tier1 -def test_positive_CRUD(): +def test_positive_CRUD(module_target_sat): """Create new user group with valid elements that attached group. List the user group, update and delete it. @@ -53,14 +43,14 @@ def test_positive_CRUD(): :CaseImportance: Critical """ - user = make_user() + user = module_target_sat.cli_factory.make_user() ug_name = random.choice(valid_usernames_list()) role_name = random.choice(valid_usernames_list()) - role = make_role({'name': role_name}) - sub_user_group = make_usergroup() + role = module_target_sat.cli_factory.make_role({'name': role_name}) + sub_user_group = module_target_sat.cli_factory.make_usergroup() # Create - user_group = make_usergroup( + user_group = module_target_sat.cli_factory.make_usergroup( { 'user-ids': user['id'], 'name': ug_name, @@ -76,24 +66,26 @@ def test_positive_CRUD(): assert user_group['user-groups'][0]['usergroup'] == sub_user_group['name'] # List - result_list = UserGroup.list({'search': 'name={}'.format(user_group['name'])}) + result_list = module_target_sat.cli.UserGroup.list( + {'search': 'name={}'.format(user_group['name'])} + ) assert len(result_list) > 0 - assert UserGroup.exists(search=('name', user_group['name'])) + assert module_target_sat.cli.UserGroup.exists(search=('name', user_group['name'])) # Update new_name = random.choice(valid_usernames_list()) - UserGroup.update({'id': user_group['id'], 'new-name': new_name}) - user_group = UserGroup.info({'id': user_group['id']}) + module_target_sat.cli.UserGroup.update({'id': user_group['id'], 'new-name': new_name}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert user_group['name'] == new_name # Delete - UserGroup.delete({'name': user_group['name']}) + module_target_sat.cli.UserGroup.delete({'name': user_group['name']}) with pytest.raises(CLIReturnCodeError): - UserGroup.info({'name': user_group['name']}) + module_target_sat.cli.UserGroup.info({'name': user_group['name']}) @pytest.mark.tier1 -def test_positive_create_with_multiple_elements(): +def test_positive_create_with_multiple_elements(module_target_sat): """Create new user group using multiple users, roles and user groups attached to that group. @@ -105,17 +97,19 @@ def test_positive_create_with_multiple_elements(): :CaseImportance: Critical """ count = 2 - users = [make_user()['login'] for _ in range(count)] - roles = [make_role()['name'] for _ in range(count)] - sub_user_groups = [make_usergroup()['name'] for _ in range(count)] - user_group = make_usergroup({'users': users, 'roles': roles, 'user-groups': sub_user_groups}) + users = [module_target_sat.cli_factory.make_user()['login'] for _ in range(count)] + roles = [module_target_sat.cli_factory.make_role()['name'] for _ in range(count)] + sub_user_groups = [module_target_sat.cli_factory.make_usergroup()['name'] for _ in range(count)] + user_group = module_target_sat.cli_factory.make_usergroup( + {'users': users, 'roles': roles, 'user-groups': sub_user_groups} + ) assert sorted(users) == sorted(user_group['users']) assert sorted(roles) == sorted(user_group['roles']) assert sorted(sub_user_groups) == sorted(ug['usergroup'] for ug in user_group['user-groups']) @pytest.mark.tier2 -def test_positive_add_and_remove_elements(): +def test_positive_add_and_remove_elements(module_target_sat): """Create new user group. Add and remove several element from the group. :id: a4ce8724-d3c8-4c00-9421-aaa40394134d @@ -127,17 +121,19 @@ def test_positive_add_and_remove_elements(): :CaseLevel: Integration """ - role = make_role() - user_group = make_usergroup() - user = make_user() - sub_user_group = make_usergroup() + role = module_target_sat.cli_factory.make_role() + user_group = module_target_sat.cli_factory.make_usergroup() + user = module_target_sat.cli_factory.make_user() + sub_user_group = module_target_sat.cli_factory.make_usergroup() # Add elements by id - UserGroup.add_role({'id': user_group['id'], 'role-id': role['id']}) - UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) - UserGroup.add_user_group({'id': user_group['id'], 'user-group-id': sub_user_group['id']}) + module_target_sat.cli.UserGroup.add_role({'id': user_group['id'], 'role-id': role['id']}) + module_target_sat.cli.UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) + module_target_sat.cli.UserGroup.add_user_group( + {'id': user_group['id'], 'user-group-id': sub_user_group['id']} + ) - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['roles']) == 1 assert user_group['roles'][0] == role['name'] assert len(user_group['users']) == 1 @@ -146,11 +142,13 @@ def test_positive_add_and_remove_elements(): assert user_group['user-groups'][0]['usergroup'] == sub_user_group['name'] # Remove elements by name - UserGroup.remove_role({'id': user_group['id'], 'role': role['name']}) - UserGroup.remove_user({'id': user_group['id'], 'user': user['login']}) - UserGroup.remove_user_group({'id': user_group['id'], 'user-group': sub_user_group['name']}) + module_target_sat.cli.UserGroup.remove_role({'id': user_group['id'], 'role': role['name']}) + module_target_sat.cli.UserGroup.remove_user({'id': user_group['id'], 'user': user['login']}) + module_target_sat.cli.UserGroup.remove_user_group( + {'id': user_group['id'], 'user-group': sub_user_group['name']} + ) - user_group = UserGroup.info({'id': user_group['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert len(user_group['roles']) == 0 assert len(user_group['users']) == 0 assert len(user_group['user-groups']) == 0 @@ -158,7 +156,7 @@ def test_positive_add_and_remove_elements(): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_user_assigned_to_usergroup(): +def test_positive_remove_user_assigned_to_usergroup(module_target_sat): """Create new user and assign it to user group. Then remove that user. :id: 2a2623ce-4723-4402-aae7-8675473fd8bd @@ -171,17 +169,17 @@ def test_positive_remove_user_assigned_to_usergroup(): :BZ: 1667704 """ - user = make_user() - user_group = make_usergroup() - UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) - User.delete({'id': user['id']}) - user_group = UserGroup.info({'id': user_group['id']}) + user = module_target_sat.cli_factory.make_user() + user_group = module_target_sat.cli_factory.make_usergroup() + module_target_sat.cli.UserGroup.add_user({'id': user_group['id'], 'user-id': user['id']}) + module_target_sat.cli.User.delete({'id': user['id']}) + user_group = module_target_sat.cli.UserGroup.info({'id': user_group['id']}) assert user['login'] not in user_group['users'] @pytest.mark.tier2 @pytest.mark.parametrize("ldap_auth_source", ["AD"], indirect=True) -def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): +def test_positive_automate_bz1426957(ldap_auth_source, function_user_group, target_sat): """Verify role is properly reflected on AD user. :id: 1c1209a6-5bb8-489c-a151-bb2fce4dbbfc @@ -196,7 +194,7 @@ def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): :BZ: 1426957, 1667704 """ - ext_user_group = make_usergroup_external( + ext_user_group = target_sat.cli_factory.make_usergroup_external( { 'auth-source-id': ldap_auth_source[1].id, 'user-group-id': function_user_group['id'], @@ -204,23 +202,26 @@ def test_positive_automate_bz1426957(ldap_auth_source, function_user_group): } ) assert ext_user_group['auth-source'] == ldap_auth_source[1].name - role = make_role() - UserGroup.add_role({'id': function_user_group['id'], 'role-id': role['id']}) - Task.with_user( + role = target_sat.cli_factory.make_role() + target_sat.cli.UserGroup.add_role({'id': function_user_group['id'], 'role-id': role['id']}) + target_sat.cli.Task.with_user( username=ldap_auth_source[0]['ldap_user_name'], password=ldap_auth_source[0]['ldap_user_passwd'], ).list() - UserGroupExternal.refresh({'user-group-id': function_user_group['id'], 'name': 'foobargroup'}) + target_sat.cli.UserGroupExternal.refresh( + {'user-group-id': function_user_group['id'], 'name': 'foobargroup'} + ) assert ( - role['name'] in User.info({'login': ldap_auth_source[0]['ldap_user_name']})['user-groups'] + role['name'] + in target_sat.cli.User.info({'login': ldap_auth_source[0]['ldap_user_name']})['user-groups'] ) - User.delete({'login': ldap_auth_source[0]['ldap_user_name']}) - LDAPAuthSource.delete({'id': ldap_auth_source[1].id}) + target_sat.cli.User.delete({'login': ldap_auth_source[0]['ldap_user_name']}) + target_sat.cli.LDAPAuthSource.delete({'id': ldap_auth_source[1].id}) @pytest.mark.tier2 @pytest.mark.parametrize("ldap_auth_source", ["AD"], indirect=True) -def test_negative_automate_bz1437578(ldap_auth_source, function_user_group): +def test_negative_automate_bz1437578(ldap_auth_source, function_user_group, module_target_sat): """Verify error message on usergroup create with 'Domain Users' on AD user. :id: d4caf33e-b9eb-4281-9e04-fbe1d5b035dc @@ -234,7 +235,7 @@ def test_negative_automate_bz1437578(ldap_auth_source, function_user_group): :BZ: 1437578 """ with pytest.raises(CLIReturnCodeError): - result = UserGroupExternal.create( + result = module_target_sat.cli.UserGroupExternal.create( { 'auth-source-id': ldap_auth_source[1].id, 'user-group-id': function_user_group['id'], diff --git a/tests/foreman/cli/test_vm_install_products_package.py b/tests/foreman/cli/test_vm_install_products_package.py index ad17cdaf5f0..e71a9e8c4e9 100644 --- a/tests/foreman/cli/test_vm_install_products_package.py +++ b/tests/foreman/cli/test_vm_install_products_package.py @@ -19,7 +19,6 @@ from broker import Broker import pytest -from robottelo.cli.factory import make_lifecycle_environment from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, @@ -31,8 +30,10 @@ @pytest.fixture -def lce(function_entitlement_manifest_org): - return make_lifecycle_environment({'organization-id': function_entitlement_manifest_org.id}) +def lce(function_entitlement_manifest_org, target_sat): + return target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': function_entitlement_manifest_org.id} + ) @pytest.mark.tier4 diff --git a/tests/foreman/cli/test_webhook.py b/tests/foreman/cli/test_webhook.py index 3ca83dbe70f..adba480637f 100644 --- a/tests/foreman/cli/test_webhook.py +++ b/tests/foreman/cli/test_webhook.py @@ -23,13 +23,12 @@ from fauxfactory import gen_alphanumeric import pytest -from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.webhook import Webhook from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS +from robottelo.exceptions import CLIReturnCodeError @pytest.fixture(scope='function') -def webhook_factory(request, class_org, class_location): +def webhook_factory(request, class_org, class_location, class_target_sat): def _create_webhook(org, loc, options=None): """Function for creating a new Webhook @@ -49,7 +48,7 @@ def _create_webhook(org, loc, options=None): if options.get('target-url') is None: options['target-url'] = 'http://localhost/some-path' - return Box(Webhook.create(options)) + return Box(class_target_sat.cli.Webhook.create(options)) return partial(_create_webhook, org=class_org, loc=class_location) @@ -63,7 +62,7 @@ def assert_created(options, hook): class TestWebhook: @pytest.mark.tier3 @pytest.mark.e2e - def test_positive_end_to_end(self, webhook_factory): + def test_positive_end_to_end(self, webhook_factory, class_target_sat): """Test creation, list, update and removal of webhook :id: d893d176-cbe9-421b-8631-7c7a1a462ea5 @@ -81,22 +80,28 @@ def test_positive_end_to_end(self, webhook_factory): assert webhook_options['event'] == webhook_item['event'].rsplit('.', 2)[0] # Find webhook by name - webhook_search = Webhook.info({'name': webhook_options['name']}) + webhook_search = class_target_sat.cli.Webhook.info({'name': webhook_options['name']}) # A non empty dict has been returned assert webhook_search # Test that webhook gets updated different_url = 'http://localhost/different-path' - Webhook.update({'name': webhook_options['name'], 'target-url': different_url}) - webhook_search_after_update = Webhook.info({'name': webhook_options['name']}) + class_target_sat.cli.Webhook.update( + {'name': webhook_options['name'], 'target-url': different_url} + ) + webhook_search_after_update = class_target_sat.cli.Webhook.info( + {'name': webhook_options['name']} + ) assert webhook_search_after_update['target-url'] == different_url # Test that webhook is deleted - Webhook.delete({'name': webhook_options['name']}) - webhook_deleted_search = Webhook.list({'search': webhook_options['name']}) + class_target_sat.cli.Webhook.delete({'name': webhook_options['name']}) + webhook_deleted_search = class_target_sat.cli.Webhook.list( + {'search': webhook_options['name']} + ) assert len(webhook_deleted_search) == 0 - def test_webhook_disabled_enabled(self, webhook_factory): + def test_webhook_disabled_enabled(self, webhook_factory, class_target_sat): """Test disable/enable the webhook :id: 4fef4320-0655-440d-90e7-150ffcdcd043 @@ -106,17 +111,17 @@ def test_webhook_disabled_enabled(self, webhook_factory): hook = webhook_factory() # The new webhook is enabled by default on creation - assert Webhook.info({'name': hook.name})['enabled'] == 'yes' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'yes' - Webhook.update({'name': hook.name, 'enabled': 'no'}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'enabled': 'no'}) # The webhook should be disabled now - assert Webhook.info({'name': hook.name})['enabled'] == 'no' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'no' - Webhook.update({'name': hook.name, 'enabled': 'yes'}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'enabled': 'yes'}) # The webhook should be enabled again - assert Webhook.info({'name': hook.name})['enabled'] == 'yes' + assert class_target_sat.cli.Webhook.info({'name': hook.name})['enabled'] == 'yes' - def test_negative_update_invalid_url(self, webhook_factory): + def test_negative_update_invalid_url(self, webhook_factory, class_target_sat): """Test webhook negative update - invalid target URL fails :id: 7a6c87f5-0e6c-4a55-b495-b1bfb24607bd @@ -129,4 +134,4 @@ def test_negative_update_invalid_url(self, webhook_factory): invalid_url = '$%^##@***' with pytest.raises(CLIReturnCodeError): - Webhook.update({'name': hook.name, 'target-url': invalid_url}) + class_target_sat.cli.Webhook.update({'name': hook.name, 'target-url': invalid_url}) diff --git a/tests/foreman/destructive/test_ldap_authentication.py b/tests/foreman/destructive/test_ldap_authentication.py index 39f5a6b4f65..450460e7d91 100644 --- a/tests/foreman/destructive/test_ldap_authentication.py +++ b/tests/foreman/destructive/test_ldap_authentication.py @@ -23,9 +23,9 @@ import pyotp import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings from robottelo.constants import CERT_PATH, HAMMER_CONFIG, HAMMER_SESSIONS, LDAP_ATTR +from robottelo.exceptions import CLIReturnCodeError from robottelo.logging import logger from robottelo.utils.datafactory import gen_string diff --git a/tests/foreman/destructive/test_ldapauthsource.py b/tests/foreman/destructive/test_ldapauthsource.py index fdbc2da11ba..4a87090acaa 100644 --- a/tests/foreman/destructive/test_ldapauthsource.py +++ b/tests/foreman/destructive/test_ldapauthsource.py @@ -20,9 +20,9 @@ import pytest -from robottelo.cli.base import CLIReturnCodeError from robottelo.config import settings from robottelo.constants import HAMMER_CONFIG +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.destructive] diff --git a/tests/foreman/destructive/test_realm.py b/tests/foreman/destructive/test_realm.py index de5a9b1ebc3..9b58910f74d 100644 --- a/tests/foreman/destructive/test_realm.py +++ b/tests/foreman/destructive/test_realm.py @@ -21,7 +21,7 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.base import CLIReturnCodeError +from robottelo.exceptions import CLIReturnCodeError pytestmark = [pytest.mark.run_in_one_thread, pytest.mark.destructive] diff --git a/tests/foreman/endtoend/test_cli_endtoend.py b/tests/foreman/endtoend/test_cli_endtoend.py index 0001e482043..cfde294a5dc 100644 --- a/tests/foreman/endtoend/test_cli_endtoend.py +++ b/tests/foreman/endtoend/test_cli_endtoend.py @@ -20,22 +20,6 @@ import pytest from robottelo import constants -from robottelo.cli.activationkey import ActivationKey -from robottelo.cli.computeresource import ComputeResource -from robottelo.cli.contentview import ContentView -from robottelo.cli.domain import Domain -from robottelo.cli.factory import make_user -from robottelo.cli.host import Host -from robottelo.cli.hostgroup import HostGroup -from robottelo.cli.lifecycleenvironment import LifecycleEnvironment -from robottelo.cli.location import Location -from robottelo.cli.org import Org -from robottelo.cli.product import Product -from robottelo.cli.repository import Repository -from robottelo.cli.repository_set import RepositorySet -from robottelo.cli.subnet import Subnet -from robottelo.cli.subscription import Subscription -from robottelo.cli.user import User from robottelo.config import setting_is_set, settings from robottelo.constants.repos import CUSTOM_RPM_REPO @@ -47,40 +31,40 @@ def fake_manifest_is_set(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_default_org(): +def test_positive_cli_find_default_org(module_target_sat): """Check if 'Default Organization' is present :id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a :expectedresults: 'Default Organization' is found """ - result = Org.info({'name': constants.DEFAULT_ORG}) + result = module_target_sat.cli.Org.info({'name': constants.DEFAULT_ORG}) assert result['name'] == constants.DEFAULT_ORG @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_default_loc(): +def test_positive_cli_find_default_loc(module_target_sat): """Check if 'Default Location' is present :id: 11cf0d06-78ff-47e8-9d50-407a2ea31988 :expectedresults: 'Default Location' is found """ - result = Location.info({'name': constants.DEFAULT_LOC}) + result = module_target_sat.cli.Location.info({'name': constants.DEFAULT_LOC}) assert result['name'] == constants.DEFAULT_LOC @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_cli_find_admin_user(): +def test_positive_cli_find_admin_user(module_target_sat): """Check if Admin User is present :id: f6755189-05a6-4d2f-a3b8-98be0cfacaee :expectedresults: Admin User is found and has Admin role """ - result = User.info({'login': 'admin'}) + result = module_target_sat.cli.User.info({'login': 'admin'}) assert result['login'] == 'admin' assert result['admin'] == 'yes' @@ -126,34 +110,36 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel """ # step 1: Create a new user with admin permissions password = gen_alphanumeric() - user = make_user({'admin': 'true', 'password': password}) + user = target_sat.cli_factory.make_user({'admin': 'true', 'password': password}) user['password'] = password # step 2.1: Create a new organization - org = _create(user, Org, {'name': gen_alphanumeric()}) + org = _create(user, target_sat.cli.Org, {'name': gen_alphanumeric()}) target_sat.cli.SimpleContentAccess.disable({'organization-id': org['id']}) # step 2.2: Clone and upload manifest target_sat.put(f'{function_entitlement_manifest.path}', f'{function_entitlement_manifest.name}') - Subscription.upload( + target_sat.cli.Subscription.upload( {'file': f'{function_entitlement_manifest.name}', 'organization-id': org['id']} ) # step 2.3: Create a new lifecycle environment lifecycle_environment = _create( user, - LifecycleEnvironment, + target_sat.cli.LifecycleEnvironment, {'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'}, ) # step 2.4: Create a custom product - product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']}) + product = _create( + user, target_sat.cli.Product, {'name': gen_alphanumeric(), 'organization-id': org['id']} + ) repositories = [] # step 2.5: Create custom YUM repository custom_repo = _create( user, - Repository, + target_sat.cli.Repository, { 'content-type': 'yum', 'name': gen_alphanumeric(), @@ -165,7 +151,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel repositories.append(custom_repo) # step 2.6: Enable a Red Hat repository - RepositorySet.enable( + target_sat.cli.RepositorySet.enable( { 'basearch': 'x86_64', 'name': constants.REPOSET['rhst7'], @@ -174,7 +160,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel 'releasever': None, } ) - rhel_repo = Repository.info( + rhel_repo = target_sat.cli.Repository.info( { 'name': constants.REPOS['rhst7']['name'], 'organization-id': org['id'], @@ -185,16 +171,18 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel # step 2.7: Synchronize these two repositories for repo in repositories: - Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']}) + target_sat.cli.Repository.with_user(user['login'], user['password']).synchronize( + {'id': repo['id']} + ) # step 2.8: Create content view content_view = _create( - user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']} + user, target_sat.cli.ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']} ) # step 2.9: Associate the YUM and Red Hat repositories to new content view for repo in repositories: - ContentView.add_repository( + target_sat.cli.ContentView.add_repository( { 'id': content_view['id'], 'organization-id': org['id'], @@ -203,26 +191,28 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel ) # step 2.10: Publish content view - ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']}) + target_sat.cli.ContentView.with_user(user['login'], user['password']).publish( + {'id': content_view['id']} + ) # step 2.11: Promote content view to the lifecycle environment - content_view = ContentView.with_user(user['login'], user['password']).info( + content_view = target_sat.cli.ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 - cv_version = ContentView.with_user(user['login'], user['password']).version_info( + cv_version = target_sat.cli.ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 1 - ContentView.with_user(user['login'], user['password']).version_promote( + target_sat.cli.ContentView.with_user(user['login'], user['password']).version_promote( {'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']} ) # check that content view exists in lifecycle - content_view = ContentView.with_user(user['login'], user['password']).info( + content_view = target_sat.cli.ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 - cv_version = ContentView.with_user(user['login'], user['password']).version_info( + cv_version = target_sat.cli.ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 2 @@ -231,7 +221,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel # step 2.12: Create a new activation key activation_key = _create( user, - ActivationKey, + target_sat.cli.ActivationKey, { 'content-view-id': content_view['id'], 'lifecycle-environment-id': lifecycle_environment['id'], @@ -241,12 +231,14 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel ) # step 2.13: Add the products to the activation key - subscription_list = Subscription.with_user(user['login'], user['password']).list( + subscription_list = target_sat.cli.Subscription.with_user(user['login'], user['password']).list( {'organization-id': org['id']}, per_page=False ) for subscription in subscription_list: if subscription['name'] == constants.DEFAULT_SUBSCRIPTION_NAME: - ActivationKey.with_user(user['login'], user['password']).add_subscription( + target_sat.cli.ActivationKey.with_user( + user['login'], user['password'] + ).add_subscription( { 'id': activation_key['id'], 'quantity': 1, @@ -255,7 +247,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel ) # step 2.13.1: Enable product content - ActivationKey.with_user(user['login'], user['password']).content_override( + target_sat.cli.ActivationKey.with_user(user['login'], user['password']).content_override( { 'content-label': constants.REPOS['rhst7']['id'], 'id': activation_key['id'], @@ -267,7 +259,9 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host_name = gen_alphanumeric() - content_host = Host.with_user(user['login'], user['password']).subscription_register( + content_host = target_sat.cli.Host.with_user( + user['login'], user['password'] + ).subscription_register( { 'content-view-id': content_view['id'], 'lifecycle-environment-id': lifecycle_environment['id'], @@ -276,7 +270,9 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel } ) - content_host = Host.with_user(user['login'], user['password']).info({'id': content_host['id']}) + content_host = target_sat.cli.Host.with_user(user['login'], user['password']).info( + {'id': content_host['id']} + ) # check that content view matches what we passed assert content_host['content-information']['content-view']['name'] == content_view['name'] @@ -289,7 +285,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel # step 2.14: Create a new libvirt compute resource _create( user, - ComputeResource, + target_sat.cli.ComputeResource, { 'name': gen_alphanumeric(), 'provider': 'Libvirt', @@ -300,7 +296,7 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel # step 2.15: Create a new subnet subnet = _create( user, - Subnet, + target_sat.cli.Subnet, { 'name': gen_alphanumeric(), 'network': gen_ipaddr(ip3=True), @@ -309,15 +305,15 @@ def test_positive_cli_end_to_end(function_entitlement_manifest, target_sat, rhel ) # step 2.16: Create a new domain - domain = _create(user, Domain, {'name': gen_alphanumeric()}) + domain = _create(user, target_sat.cli.Domain, {'name': gen_alphanumeric()}) # step 2.17: Create a new hostgroup and associate previous entities to it host_group = _create( user, - HostGroup, + target_sat.cli.HostGroup, {'domain-id': domain['id'], 'name': gen_alphanumeric(), 'subnet-id': subnet['id']}, ) - HostGroup.with_user(user['login'], user['password']).update( + target_sat.cli.HostGroup.with_user(user['login'], user['password']).update( { 'id': host_group['id'], 'organization-ids': org['id'], diff --git a/tests/foreman/longrun/test_oscap.py b/tests/foreman/longrun/test_oscap.py index 3d292092c86..00e8ad0bfe3 100644 --- a/tests/foreman/longrun/test_oscap.py +++ b/tests/foreman/longrun/test_oscap.py @@ -21,13 +21,6 @@ from nailgun import entities import pytest -from robottelo.cli.ansible import Ansible -from robottelo.cli.arfreport import Arfreport -from robottelo.cli.factory import make_hostgroup, make_scap_policy -from robottelo.cli.host import Host -from robottelo.cli.job_invocation import JobInvocation -from robottelo.cli.proxy import Proxy -from robottelo.cli.scapcontent import Scapcontent from robottelo.config import settings from robottelo.constants import ( OSCAP_DEFAULT_CONTENT, @@ -48,7 +41,7 @@ } -def fetch_scap_and_profile_id(scap_name, scap_profile): +def fetch_scap_and_profile_id(scap_name, scap_profile, sat): """Extracts the scap ID and scap profile id :param scap_name: Scap title @@ -57,7 +50,7 @@ def fetch_scap_and_profile_id(scap_name, scap_profile): :returns: scap_id and scap_profile_id """ - default_content = Scapcontent.info({'title': scap_name}, output_format='json') + default_content = sat.cli.Scapcontent.info({'title': scap_name}, output_format='json') scap_id = default_content['id'] scap_profile_ids = [ profile['id'] @@ -70,7 +63,7 @@ def fetch_scap_and_profile_id(scap_name, scap_profile): @pytest.fixture(scope='module') def default_proxy(module_target_sat): """Returns default capsule/proxy id""" - proxy = Proxy.list({'search': module_target_sat.hostname})[0] + proxy = module_target_sat.cli.Proxy.list({'search': module_target_sat.hostname})[0] p_features = set(proxy.get('features').split(', ')) if {'Ansible', 'Openscap'}.issubset(p_features): proxy_id = proxy.get('id') @@ -173,7 +166,7 @@ def test_positive_oscap_run_via_ansible( hgrp_name = gen_string('alpha') policy_name = gen_string('alpha') # Creates host_group for rhel7 - make_hostgroup( + target_sat.cli_factory.make_hostgroup( { 'content-source-id': default_proxy, 'name': hgrp_name, @@ -181,11 +174,11 @@ def test_positive_oscap_run_via_ansible( } ) # Creates oscap_policy. - scap_id, scap_profile_id = fetch_scap_and_profile_id(content, profile) - Ansible.roles_import({'proxy-id': default_proxy}) - Ansible.variables_import({'proxy-id': default_proxy}) - role_id = Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') - make_scap_policy( + scap_id, scap_profile_id = fetch_scap_and_profile_id(target_sat, content, profile) + target_sat.cli.Ansible.roles_import({'proxy-id': default_proxy}) + target_sat.cli.Ansible.variables_import({'proxy-id': default_proxy}) + role_id = target_sat.cli.Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') + target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_id, 'hostgroups': hgrp_name, @@ -204,7 +197,7 @@ def test_positive_oscap_run_via_ansible( vm.create_custom_repos(**rhel_repo) else: vm.create_custom_repos(**{distro: rhel_repo}) - Host.update( + target_sat.cli.Host.update( { 'name': vm.hostname.lower(), 'lifecycle-environment': lifecycle_env.name, @@ -215,15 +208,17 @@ def test_positive_oscap_run_via_ansible( 'ansible-role-ids': role_id, } ) - job_id = Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') + job_id = target_sat.cli.Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') target_sat.wait_for_tasks( f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"' ) try: - result = JobInvocation.info({'id': job_id})['success'] + result = target_sat.cli.JobInvocation.info({'id': job_id})['success'] assert result == '1' except AssertionError: - output = ' '.join(JobInvocation.get_output({'id': job_id, 'host': vm.hostname})) + output = ' '.join( + target_sat.cli.JobInvocation.get_output({'id': job_id, 'host': vm.hostname}) + ) result = f'host output: {output}' raise AssertionError(result) result = vm.run('cat /etc/foreman_scap_client/config.yaml | grep profile') @@ -233,7 +228,7 @@ def test_positive_oscap_run_via_ansible( vm.execute_foreman_scap_client() # Assert whether oscap reports are uploaded to # Satellite6. - result = Arfreport.list({'search': f'host={vm.hostname.lower()}'}) + result = target_sat.cli.Arfreport.list({'search': f'host={vm.hostname.lower()}'}) assert result is not None @@ -270,7 +265,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( hgrp_name = gen_string('alpha') policy_name = gen_string('alpha') # Creates host_group for rhel7 - make_hostgroup( + target_sat.cli_factory.make_hostgroup( { 'content-source-id': default_proxy, 'name': hgrp_name, @@ -279,12 +274,12 @@ def test_positive_oscap_run_via_ansible_bz_1814988( ) # Creates oscap_policy. scap_id, scap_profile_id = fetch_scap_and_profile_id( - OSCAP_DEFAULT_CONTENT['rhel7_content'], OSCAP_PROFILE['dsrhel7'] + target_sat, OSCAP_DEFAULT_CONTENT['rhel7_content'], OSCAP_PROFILE['dsrhel7'] ) - Ansible.roles_import({'proxy-id': default_proxy}) - Ansible.variables_import({'proxy-id': default_proxy}) - role_id = Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') - make_scap_policy( + target_sat.cli.Ansible.roles_import({'proxy-id': default_proxy}) + target_sat.cli.Ansible.variables_import({'proxy-id': default_proxy}) + role_id = target_sat.cli.Ansible.roles_list({'search': 'foreman_scap_client'})[0].get('id') + target_sat.cli_factory.make_scap_policy( { 'scap-content-id': scap_id, 'hostgroups': hgrp_name, @@ -307,7 +302,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( '--fetch-remote-resources --results-arf results.xml ' '/usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml', ) - Host.update( + target_sat.cli.Host.update( { 'name': vm.hostname.lower(), 'lifecycle-environment': lifecycle_env.name, @@ -318,15 +313,17 @@ def test_positive_oscap_run_via_ansible_bz_1814988( 'ansible-role-ids': role_id, } ) - job_id = Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') + job_id = target_sat.cli.Host.ansible_roles_play({'name': vm.hostname.lower()})[0].get('id') target_sat.wait_for_tasks( f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"' ) try: - result = JobInvocation.info({'id': job_id})['success'] + result = target_sat.cli.JobInvocation.info({'id': job_id})['success'] assert result == '1' except AssertionError: - output = ' '.join(JobInvocation.get_output({'id': job_id, 'host': vm.hostname})) + output = ' '.join( + target_sat.cli.JobInvocation.get_output({'id': job_id, 'host': vm.hostname}) + ) result = f'host output: {output}' raise AssertionError(result) result = vm.run('cat /etc/foreman_scap_client/config.yaml | grep profile') @@ -336,7 +333,7 @@ def test_positive_oscap_run_via_ansible_bz_1814988( vm.execute_foreman_scap_client() # Assert whether oscap reports are uploaded to # Satellite6. - result = Arfreport.list({'search': f'host={vm.hostname.lower()}'}) + result = target_sat.cli.Arfreport.list({'search': f'host={vm.hostname.lower()}'}) assert result is not None @@ -505,7 +502,7 @@ def test_positive_oscap_run_via_local_files( } ) # Creates oscap_policy. - scap_id, scap_profile_id = fetch_scap_and_profile_id(content, profile) + scap_id, scap_profile_id = fetch_scap_and_profile_id(module_target_sat, content, profile) with Broker( nick=distro, host_class=ContentHost, diff --git a/tests/foreman/ui/test_activationkey.py b/tests/foreman/ui/test_activationkey.py index 04546d9ba76..dd2496a8aff 100644 --- a/tests/foreman/ui/test_activationkey.py +++ b/tests/foreman/ui/test_activationkey.py @@ -25,7 +25,6 @@ import pytest from robottelo import constants -from robottelo.cli.factory import setup_org_for_a_custom_repo from robottelo.config import settings from robottelo.hosts import ContentHost from robottelo.utils.datafactory import parametrized, valid_data_list @@ -1051,7 +1050,7 @@ def test_positive_host_associations(session, target_sat): :CaseLevel: System """ org = entities.Organization().create() - org_entities = setup_org_for_a_custom_repo( + org_entities = target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_1.url, 'organization-id': org.id} ) ak1 = entities.ActivationKey(id=org_entities['activationkey-id']).read() @@ -1115,7 +1114,7 @@ def test_positive_service_level_subscription_with_custom_product( :CaseLevel: System """ org = function_entitlement_manifest_org - entities_ids = setup_org_for_a_custom_repo( + entities_ids = target_sat.cli_factory.setup_org_for_a_custom_repo( {'url': settings.repos.yum_1.url, 'organization-id': org.id} ) product = entities.Product(id=entities_ids['product-id']).read() diff --git a/tests/foreman/ui/test_contenthost.py b/tests/foreman/ui/test_contenthost.py index 97cdef261fd..198c3643262 100644 --- a/tests/foreman/ui/test_contenthost.py +++ b/tests/foreman/ui/test_contenthost.py @@ -25,7 +25,6 @@ from nailgun import entities import pytest -from robottelo.cli.factory import CLIFactoryError, make_fake_host, make_virt_who_config from robottelo.config import setting_is_set, settings from robottelo.constants import ( DEFAULT_SYSPURPOSE_ATTRIBUTES, @@ -41,6 +40,7 @@ VDC_SUBSCRIPTION_NAME, VIRT_WHO_HYPERVISOR_TYPES, ) +from robottelo.exceptions import CLIFactoryError from robottelo.utils.issue_handlers import is_open from robottelo.utils.virtwho import create_fake_hypervisor_content @@ -900,7 +900,7 @@ def test_positive_virt_who_hypervisor_subscription_status( # TODO move this to either hack around virt-who service or use an env-* compute resource provisioning_server = settings.libvirt.libvirt_hostname # Create a new virt-who config - virt_who_config = make_virt_who_config( + virt_who_config = target_sat.cli_factory.make_virt_who_config( { 'organization-id': org.id, 'hypervisor-type': VIRT_WHO_HYPERVISOR_TYPES['libvirt'], @@ -1723,7 +1723,7 @@ def test_syspurpose_mismatched(session, default_location, vm_module_streams): @pytest.mark.tier3 -def test_pagination_multiple_hosts_multiple_pages(session, module_host_template): +def test_pagination_multiple_hosts_multiple_pages(session, module_host_template, target_sat): """Create hosts to fill more than one page, sort on OS, check pagination. Search for hosts based on operating system and assert that more than one page @@ -1748,7 +1748,7 @@ def test_pagination_multiple_hosts_multiple_pages(session, module_host_template) # Create more than one page of fake hosts. Need two digits in name to ensure sort order. for count in range(host_num): host_name = f'test-{count + 1:0>2}' - make_fake_host( + target_sat.cli_factory.make_fake_host( { 'name': host_name, 'organization-id': module_host_template.organization.id, diff --git a/tests/foreman/ui/test_settings.py b/tests/foreman/ui/test_settings.py index ade4cd9afbe..34bbe0b8d6a 100644 --- a/tests/foreman/ui/test_settings.py +++ b/tests/foreman/ui/test_settings.py @@ -23,7 +23,6 @@ from nailgun import entities import pytest -from robottelo.cli.user import User from robottelo.config import settings from robottelo.utils.datafactory import filtered_datapoint, gen_string @@ -252,7 +251,7 @@ def test_positive_update_login_page_footer_text(session, setting_update): @pytest.mark.tier3 -def test_negative_settings_access_to_non_admin(): +def test_negative_settings_access_to_non_admin(module_target_sat): """Check non admin users can't access Administer -> Settings tab :id: 34bb9376-c5fe-431a-ac0d-ef030c0ab50e @@ -282,7 +281,7 @@ def test_negative_settings_access_to_non_admin(): 'from a Satellite administrator: view_settings Back' ) finally: - User.delete({'login': login}) + module_target_sat.cli.User.delete({'login': login}) @pytest.mark.stubbed diff --git a/tests/foreman/ui/test_subscription.py b/tests/foreman/ui/test_subscription.py index 2196eabb60b..21cbadb4d15 100644 --- a/tests/foreman/ui/test_subscription.py +++ b/tests/foreman/ui/test_subscription.py @@ -24,7 +24,6 @@ from nailgun import entities import pytest -from robottelo.cli.factory import make_virt_who_config from robottelo.config import settings from robottelo.constants import ( DEFAULT_SUBSCRIPTION_NAME, @@ -369,7 +368,7 @@ def test_positive_view_vdc_guest_subscription_products( rh_product_repository = target_sat.cli_factory.RHELAnsibleEngineRepository(cdn=True) product_name = rh_product_repository.data['product'] # Create a new virt-who config - virt_who_config = make_virt_who_config( + virt_who_config = target_sat.cli_factory.make_virt_who_config( { 'organization-id': org.id, 'hypervisor-type': VIRT_WHO_HYPERVISOR_TYPES['libvirt'], diff --git a/tests/foreman/virtwho/cli/test_esx.py b/tests/foreman/virtwho/cli/test_esx.py index fe2b1827bf6..0e0ced215b9 100644 --- a/tests/foreman/virtwho/cli/test_esx.py +++ b/tests/foreman/virtwho/cli/test_esx.py @@ -22,7 +22,6 @@ import pytest import requests -from robottelo.cli.user import User from robottelo.config import settings from robottelo.utils.virtwho import ( ETC_VIRTWHO_CONFIG, @@ -306,7 +305,7 @@ def test_positive_rhsm_option(self, default_org, form_data, virtwho_config, targ command = get_configure_command(virtwho_config['id'], default_org.name) deploy_configure_by_command(command, form_data['hypervisor-type'], org=default_org.label) rhsm_username = get_configure_option('rhsm_username', config_file) - assert not User.exists(search=('login', rhsm_username)) + assert not target_sat.cli.User.exists(search=('login', rhsm_username)) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert get_configure_option('rhsm_prefix', config_file) == '/rhsm' diff --git a/tests/foreman/virtwho/cli/test_esx_sca.py b/tests/foreman/virtwho/cli/test_esx_sca.py index df15cce8105..39a599418a1 100644 --- a/tests/foreman/virtwho/cli/test_esx_sca.py +++ b/tests/foreman/virtwho/cli/test_esx_sca.py @@ -20,7 +20,6 @@ import pytest import requests -from robottelo.cli.user import User from robottelo.config import settings from robottelo.utils.virtwho import ( ETC_VIRTWHO_CONFIG, @@ -399,7 +398,7 @@ def test_positive_rhsm_option( command, form_data['hypervisor-type'], org=module_sca_manifest_org.label ) rhsm_username = get_configure_option('rhsm_username', config_file) - assert not User.exists(search=('login', rhsm_username)) + assert not target_sat.cli.User.exists(search=('login', rhsm_username)) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert get_configure_option('rhsm_prefix', config_file) == '/rhsm' diff --git a/tests/robottelo/test_cli.py b/tests/robottelo/test_cli.py index 78b0f6f0cf8..94922f72068 100644 --- a/tests/robottelo/test_cli.py +++ b/tests/robottelo/test_cli.py @@ -4,8 +4,8 @@ import pytest -from robottelo.cli.base import ( - Base, +from robottelo.cli.base import Base +from robottelo.exceptions import ( CLIBaseError, CLIDataBaseError, CLIError, diff --git a/tests/upgrades/test_virtwho.py b/tests/upgrades/test_virtwho.py index 826e4e1b059..99606f2f062 100644 --- a/tests/upgrades/test_virtwho.py +++ b/tests/upgrades/test_virtwho.py @@ -19,9 +19,6 @@ from fauxfactory import gen_string import pytest -from robottelo.cli.host import Host -from robottelo.cli.subscription import Subscription -from robottelo.cli.virt_who_config import VirtWhoConfig from robottelo.config import settings from robottelo.utils.issue_handlers import is_open from robottelo.utils.virtwho import ( @@ -101,8 +98,10 @@ def test_pre_create_virt_who_configuration( (guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'), ] for hostname, sku in hosts: - host = Host.list({'search': hostname})[0] - subscriptions = Subscription.list({'organization-id': org.id, 'search': sku}) + host = target_sat.cli.Host.list({'search': hostname})[0] + subscriptions = target_sat.cli.Subscription.list( + {'organization-id': org.id, 'search': sku} + ) vdc_id = subscriptions[0]['id'] if 'type=STACK_DERIVED' in sku: for item in subscriptions: @@ -154,8 +153,13 @@ def test_post_crud_virt_who_configuration(self, form_data, pre_upgrade_data, tar if not is_open('BZ:1802395'): assert vhd.status == 'ok' # Verify virt-who status via CLI as we cannot check it via API now - vhd_cli = VirtWhoConfig.exists(search=('name', form_data['name'])) - assert VirtWhoConfig.info({'id': vhd_cli['id']})['general-information']['status'] == 'OK' + vhd_cli = target_sat.cli.VirtWhoConfig.exists(search=('name', form_data['name'])) + assert ( + target_sat.cli.VirtWhoConfig.info({'id': vhd_cli['id']})['general-information'][ + 'status' + ] + == 'OK' + ) # Vefify the connection of the guest on Content host hypervisor_name = pre_upgrade_data.get('hypervisor_name')