From 0ddb4b712f659b78791e5a6437dca4bf140e8ce6 Mon Sep 17 00:00:00 2001 From: dosas Date: Mon, 9 Oct 2023 11:21:19 +0200 Subject: [PATCH 01/96] Nailgun ssl cert verification (#12813) * Add config option to verify nailgun requests against ssl cert * Use ssl verification for all instances of ServerConfig * Use dynaconf validator --------- Co-authored-by: dosas --- conf/server.yaml.template | 4 +++ robottelo/config/__init__.py | 6 ++-- robottelo/config/validators.py | 1 + robottelo/hosts.py | 2 +- tests/foreman/api/test_role.py | 45 ++++++++++++++++++-------- tests/foreman/api/test_subscription.py | 5 +-- tests/foreman/api/test_user.py | 12 ++++--- 7 files changed, 52 insertions(+), 23 deletions(-) diff --git a/conf/server.yaml.template b/conf/server.yaml.template index 5c08431532b..7f876bccaba 100644 --- a/conf/server.yaml.template +++ b/conf/server.yaml.template @@ -48,6 +48,10 @@ SERVER: ADMIN_USERNAME: admin # Admin password when accessing API and UI ADMIN_PASSWORD: changeme + # Set to true to verify against the certificate given in REQUESTS_CA_BUNDLE + # Or specify path to certificate path or directory + # see: https://requests.readthedocs.io/en/latest/user/advanced/#ssl-cert-verification + VERIFY_CA: false SSH_CLIENT: # Specify port number for ssh client, Default: 22 diff --git a/robottelo/config/__init__.py b/robottelo/config/__init__.py index 5bd85ab5737..e078d2fcc63 100644 --- a/robottelo/config/__init__.py +++ b/robottelo/config/__init__.py @@ -110,7 +110,7 @@ def user_nailgun_config(username=None, password=None): """ creds = (username, password) - return ServerConfig(get_url(), creds, verify=False) + return ServerConfig(get_url(), creds, verify=settings.server.verify_ca) def setting_is_set(option): @@ -153,7 +153,9 @@ def configure_nailgun(): from nailgun.config import ServerConfig entity_mixins.CREATE_MISSING = True - entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig(get_url(), get_credentials(), verify=False) + entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig( + get_url(), get_credentials(), verify=settings.server.verify_ca + ) gpgkey_init = entities.GPGKey.__init__ def patched_gpgkey_init(self, server_config=None, **kwargs): diff --git a/robottelo/config/validators.py b/robottelo/config/validators.py index b605a1cd229..383abfb7aed 100644 --- a/robottelo/config/validators.py +++ b/robottelo/config/validators.py @@ -29,6 +29,7 @@ Validator('server.port', default=443), Validator('server.ssh_username', default='root'), Validator('server.ssh_password', default=None), + Validator('server.verify_ca', default=False), ], content_host=[ Validator('content_host.default_rhel_version', must_exist=True), diff --git a/robottelo/hosts.py b/robottelo/hosts.py index 9b6923d7449..6ee09915587 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -1776,7 +1776,7 @@ class DecClass(cls): self.nailgun_cfg = ServerConfig( auth=(settings.server.admin_username, settings.server.admin_password), url=f'{self.url}', - verify=False, + verify=settings.server.verify_ca, ) # add each nailgun entity to self.api, injecting our server config for name, obj in entities.__dict__.items(): diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index d75ff03e84b..4b42408114d 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -26,6 +26,7 @@ from requests.exceptions import HTTPError from robottelo.cli.ldapauthsource import LDAPAuthSource +from robottelo.config import settings from robottelo.constants import LDAP_ATTR, LDAP_SERVER_TYPE from robottelo.utils.datafactory import gen_string, generate_strings_list, parametrized from robottelo.utils.issue_handlers import is_open @@ -154,7 +155,9 @@ def user_config(self, user, satellite): :param user: The nailgun.entities.User object of an user with passwd parameter """ - return ServerConfig(auth=(user.login, user.passwd), url=satellite.url, verify=False) + return ServerConfig( + auth=(user.login, user.passwd), url=satellite.url, verify=settings.server.verify_ca + ) @pytest.fixture def role_taxonomies(self): @@ -991,7 +994,9 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta location=[role_taxonomies['loc'].id], ).create() for login, password in ((userone_login, userone_pass), (usertwo_login, usertwo_pass)): - sc = ServerConfig(auth=(login, password), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(login, password), url=target_sat.url, verify=settings.server.verify_ca + ) try: entities.Domain(sc).search( query={ @@ -1120,7 +1125,9 @@ def test_negative_assign_taxonomies_by_org_admin( location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) # Getting the domain from user1 dom = entities.Domain(sc, id=dom.id).read() dom.organization = [filter_taxonomies['org']] @@ -1279,7 +1286,9 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) role_name = gen_string('alpha') with pytest.raises(HTTPError): entities.Role( @@ -1344,7 +1353,9 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) with pytest.raises(HTTPError): entities.User(sc, id=1).read() @@ -1389,7 +1400,9 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc_user = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc_user = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') user = entities.User( @@ -1470,7 +1483,9 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): ) user.role = [org_admin] user = user.update(['role']) - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) name = gen_string('alphanumeric') location = entities.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() assert location.name == name @@ -1534,7 +1549,9 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s location=[role_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) with pytest.raises(HTTPError): entities.Organization(sc, name=gen_string('alpha')).create() if not is_open("BZ:1825698"): @@ -1578,7 +1595,9 @@ def test_positive_access_all_global_entities_by_org_admin( location=[role_taxonomies['loc'], filter_taxonomies['loc']], ).create() assert user_login == user.login - sc = ServerConfig(auth=(user_login, user_pass), url=target_sat.url, verify=False) + sc = ServerConfig( + auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca + ) try: for entity in [ entities.Architecture, @@ -1627,7 +1646,7 @@ def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, cre sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() @@ -1670,7 +1689,7 @@ def test_negative_access_entities_from_ldap_user( sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() @@ -1734,7 +1753,7 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre sc = ServerConfig( auth=(user.login, password), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) # Accessing the Domain resource entities.Domain(sc, id=domain.id).read() @@ -1790,7 +1809,7 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta sc = ServerConfig( auth=(user.login, password), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) # Trying to access the Domain resource with pytest.raises(HTTPError): diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 8b1da648d81..377d43555dd 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -28,6 +28,7 @@ from requests.exceptions import HTTPError from robottelo.cli.subscription import Subscription +from robottelo.config import settings from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME, PRDS, REPOS, REPOSET pytestmark = [pytest.mark.run_in_one_thread] @@ -191,7 +192,7 @@ def test_positive_delete_manifest_as_another_user( sc1 = ServerConfig( auth=(user1.login, user1_password), url=target_sat.url, - verify=False, + verify=settings.server.verify_ca, ) user2_password = gen_string('alphanumeric') user2 = target_sat.api.User( @@ -203,7 +204,7 @@ def test_positive_delete_manifest_as_another_user( sc2 = ServerConfig( auth=(user2.login, user2_password), url=target_sat.url, - verify=False, + verify=settings.server.verify_ca, ) # use the first admin to upload a manifest with function_entitlement_manifest as manifest: diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index cab7b49d653..eff47fbba0f 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -418,7 +418,9 @@ def test_positive_table_preferences(self, module_target_sat): user = entities.User(role=existing_roles, password=password).create() name = "hosts" columns = ["power_status", "name", "comment"] - sc = ServerConfig(auth=(user.login, password), url=module_target_sat.url, verify=False) + sc = ServerConfig( + auth=(user.login, password), url=module_target_sat.url, verify=settings.server.verify_ca + ) entities.TablePreferences(sc, user=user, name=name, columns=columns).create() table_preferences = entities.TablePreferences(sc, user=user).search() assert len(table_preferences) == 1 @@ -726,7 +728,7 @@ def test_positive_ad_basic_no_roles(self, create_ldap): sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() @@ -775,7 +777,7 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_ sc = ServerConfig( auth=(create_ldap['ldap_user_name'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() @@ -857,7 +859,7 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): sc = ServerConfig( auth=(create_ldap['username'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() @@ -896,7 +898,7 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): sc = ServerConfig( auth=(create_ldap['username'], create_ldap['ldap_user_passwd']), url=create_ldap['sat_url'], - verify=False, + verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): entities.Architecture(sc).search() From 3d06becfb9794f682e0a836f915a90cdd54f5de6 Mon Sep 17 00:00:00 2001 From: Samuel Bible Date: Mon, 9 Oct 2023 13:03:10 -0500 Subject: [PATCH 02/96] Change read to read_legacy_ui (#12844) --- tests/foreman/ui/test_activationkey.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/foreman/ui/test_activationkey.py b/tests/foreman/ui/test_activationkey.py index 2f50eca2d10..d0664d86e27 100644 --- a/tests/foreman/ui/test_activationkey.py +++ b/tests/foreman/ui/test_activationkey.py @@ -78,7 +78,12 @@ def test_positive_end_to_end_crud(session, module_org): indirect=True, ) def test_positive_end_to_end_register( - session, function_entitlement_manifest_org, repos_collection, rhel7_contenthost, target_sat + session, + function_entitlement_manifest_org, + default_location, + repos_collection, + rhel7_contenthost, + target_sat, ): """Create activation key and use it during content host registering @@ -98,10 +103,13 @@ def test_positive_end_to_end_register( repos_collection.setup_content(org.id, lce.id, upload_manifest=False) ak_name = repos_collection.setup_content_data['activation_key']['name'] - repos_collection.setup_virtual_machine(rhel7_contenthost) + repos_collection.setup_virtual_machine(rhel7_contenthost, install_katello_agent=False) with session: session.organization.select(org.name) - chost = session.contenthost.read(rhel7_contenthost.hostname, widget_names='details') + session.location.select(default_location.name) + chost = session.contenthost.read_legacy_ui( + rhel7_contenthost.hostname, widget_names='details' + ) assert chost['details']['registered_by'] == f'Activation Key {ak_name}' ak_values = session.activationkey.read(ak_name, widget_names='content_hosts') assert len(ak_values['content_hosts']['table']) == 1 From fbf3855295380dcb966acca56ee0d5bdf54df36b Mon Sep 17 00:00:00 2001 From: David Moore <109112035+damoore044@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:08:38 -0400 Subject: [PATCH 03/96] Fixes for stream, 6.14.z; failure ErrataManagement in api:test_errata (#12706) Fix no repos being avaliable to host --- tests/foreman/api/test_errata.py | 41 +++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index 504cdf3d390..ba64ce1c9a0 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -17,6 +17,7 @@ :Upstream: No """ # For ease of use hc refers to host-collection throughout this document +from copy import copy from time import sleep from nailgun import entities @@ -158,11 +159,11 @@ def test_positive_install_in_hc(module_org, activation_key, custom_repo, target_ @pytest.mark.tier3 -@pytest.mark.rhel_ver_list([7, 8, 9]) +@pytest.mark.rhel_ver_match(r'^(?!6$)\d+$') @pytest.mark.no_containers @pytest.mark.e2e def test_positive_install_multiple_in_host( - module_org, activation_key, custom_repo, rhel_contenthost, target_sat + function_org, rhel_contenthost, target_sat, function_lce ): """For a host with multiple applicable errata install one and ensure the rest of errata is still available @@ -183,22 +184,44 @@ def test_positive_install_multiple_in_host( :CaseLevel: System """ + ak = target_sat.api.ActivationKey( + organization=function_org, + environment=function_lce, + ).create() + cv = target_sat.api.ContentView(organization=function_org).create() + # Associate custom repos with org, cv, lce, ak: + target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': settings.repos.yum_9.url, + 'organization-id': function_org.id, + 'content-view-id': cv.id, + 'lifecycle-environment-id': function_lce.id, + 'activationkey-id': ak.id, + } + ) + # Install katello-ca, register content-host, enable all repos: rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(module_org.label, activation_key.name) + rhel_contenthost.register_contenthost(function_org.name, ak.name, function_lce.name) assert rhel_contenthost.subscribed + # Installing outdated custom packages: for package in constants.FAKE_9_YUM_OUTDATED_PACKAGES: + rhel_contenthost.run(f'yum remove -y {str(package.split("-", 1)[0])}') assert rhel_contenthost.run(f'yum install -y {package}').status == 0 - applicable_errata_count = rhel_contenthost.applicable_errata_count - assert applicable_errata_count > 1 + assert rhel_contenthost.run(f'rpm -q {package}').status == 0 rhel_contenthost.add_rex_key(satellite=target_sat) - for errata in settings.repos.yum_9.errata[1:4]: + rhel_contenthost.run(r'subscription-manager repos --enable \*') + # Each errata will be installed sequentially, + # after each install, applicable-errata-count should drop by one. + for errata in constants.FAKE_9_YUM_SECURITY_ERRATUM: + pre_errata_count = copy(rhel_contenthost.applicable_errata_count) + assert pre_errata_count >= 1 task_id = target_sat.api.JobInvocation().run( data={ 'feature': 'katello_errata_install', 'inputs': {'errata': str(errata)}, 'targeting_type': 'static_query', 'search_query': f'name = {rhel_contenthost.hostname}', - 'organization_id': module_org.id, + 'organization_id': function_org.id, }, )['id'] target_sat.wait_for_tasks( @@ -206,8 +229,8 @@ def test_positive_install_multiple_in_host( search_rate=20, max_tries=15, ) - applicable_errata_count -= 1 - assert rhel_contenthost.applicable_errata_count == applicable_errata_count + sleep(10) + assert rhel_contenthost.applicable_errata_count == pre_errata_count - 1 @pytest.mark.tier3 From 2f3484e18a10873eb8bed975fd78bdc059fa4ca7 Mon Sep 17 00:00:00 2001 From: vsedmik <46570670+vsedmik@users.noreply.github.com> Date: Mon, 9 Oct 2023 23:58:03 +0200 Subject: [PATCH 04/96] Fix lce id option in host registration (#12849) --- robottelo/hosts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/robottelo/hosts.py b/robottelo/hosts.py index 6ee09915587..aa72002146f 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -781,7 +781,7 @@ def register( raise ValueError('Global registration method can be used with Satellite/Capsule only') if lifecycle_environment is not None: - options['lifecycle_environment_id'] = lifecycle_environment.id + options['lifecycle-environment-id'] = lifecycle_environment.id if operating_system is not None: options['operatingsystem-id'] = operating_system.id if hostgroup is not None: From a0841b591771a512f6086d9a8c181cda4d35f975 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Mon, 9 Oct 2023 18:00:49 -0400 Subject: [PATCH 05/96] Fixed test_positive_assign_http_proxy_to_products_repositories failure (#12850) final commit for test fixes --- tests/foreman/ui/test_http_proxy.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/foreman/ui/test_http_proxy.py b/tests/foreman/ui/test_http_proxy.py index 2f48559cac2..abe02f12002 100644 --- a/tests/foreman/ui/test_http_proxy.py +++ b/tests/foreman/ui/test_http_proxy.py @@ -110,6 +110,8 @@ def test_positive_assign_http_proxy_to_products_repositories( # Create repositories from UI. with target_sat.ui_session() as session: repo_a1_name = gen_string('alpha') + session.organization.select(org_name=module_org.name) + session.location.select(loc_name=module_location.name) session.repository.create( product_a.name, { From 27823641cda419510149097350ae23927b7310fd Mon Sep 17 00:00:00 2001 From: Pavel Novotny Date: Tue, 10 Oct 2023 00:04:05 +0200 Subject: [PATCH 06/96] Hosts: move from RHEL 7 to RHEL 8 for glob. registration (#12810) Move from RHEL 7 to RHEL 8 hosts for non-crucial global registration tests. Also fixed a forgotten assert in `test_global_registration_token_restriction`. --- tests/foreman/ui/test_host.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index 1d3b734e2ea..27ec9ca39cd 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -1313,7 +1313,7 @@ def test_global_registration_form_populate( def test_global_registration_with_capsule_host( session, capsule_configured, - rhel7_contenthost, + rhel8_contenthost, module_org, module_location, module_product, @@ -1342,7 +1342,7 @@ def test_global_registration_with_capsule_host( :CaseAutomation: Automated """ - client = rhel7_contenthost + client = rhel8_contenthost repo = target_sat.api.Repository( url=settings.repos.yum_1.url, content_type=REPO_TYPE['yum'], @@ -1412,7 +1412,7 @@ def test_global_registration_with_capsule_host( @pytest.mark.usefixtures('enable_capsule_for_registration') @pytest.mark.no_containers def test_global_registration_with_gpg_repo_and_default_package( - session, module_activation_key, default_os, default_smart_proxy, rhel7_contenthost + session, module_activation_key, default_os, default_smart_proxy, rhel8_contenthost ): """Host registration form produces a correct registration command and host is registered successfully with gpg repo enabled and have default package @@ -1435,7 +1435,7 @@ def test_global_registration_with_gpg_repo_and_default_package( :parametrized: yes """ - client = rhel7_contenthost + client = rhel8_contenthost repo_name = 'foreman_register' repo_url = settings.repos.gr_yum_repo.url repo_gpg_url = settings.repos.gr_yum_repo.gpg_url @@ -1455,7 +1455,15 @@ def test_global_registration_with_gpg_repo_and_default_package( # rhel repo required for insights client installation, # syncing it to the satellite would take too long - client.create_custom_repos(rhel7=settings.repos.rhel7_os) + rhelver = client.os_version.major + if rhelver > 7: + repos = {f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']['baseos']} + else: + repos = { + 'rhel7_os': settings.repos['rhel7_os'], + 'rhel7_extras': settings.repos['rhel7_extras'], + } + client.create_custom_repos(**repos) # run curl result = client.execute(cmd) assert result.status == 0 @@ -1571,7 +1579,7 @@ def test_global_re_registration_host_with_force_ignore_error_options( @pytest.mark.tier2 @pytest.mark.usefixtures('enable_capsule_for_registration') def test_global_registration_token_restriction( - session, module_activation_key, rhel7_contenthost, default_os, default_smart_proxy, target_sat + session, module_activation_key, rhel8_contenthost, default_os, default_smart_proxy, target_sat ): """Global registration token should be only used for registration call, it should be restricted for any other api calls. @@ -1589,7 +1597,7 @@ def test_global_registration_token_restriction( :parametrized: yes """ - client = rhel7_contenthost + client = rhel8_contenthost with session: cmd = session.host.get_register_command( { @@ -1609,7 +1617,7 @@ def test_global_registration_token_restriction( for curl_cmd in (curl_users, curl_hosts): result = client.execute(curl_cmd) assert result.status == 0 - 'Unable to authenticate user' in result.stdout + assert 'Unable to authenticate user' in result.stdout @pytest.mark.tier4 From 4ad7af6552fbbe8beb63f3e1682f60fec009a83d Mon Sep 17 00:00:00 2001 From: vijay sawant Date: Tue, 10 Oct 2023 13:01:02 +0530 Subject: [PATCH 07/96] update case component with correct name (#12859) --- tests/upgrades/test_hostcontent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/upgrades/test_hostcontent.py b/tests/upgrades/test_hostcontent.py index 54b102679b9..172039c1930 100644 --- a/tests/upgrades/test_hostcontent.py +++ b/tests/upgrades/test_hostcontent.py @@ -6,7 +6,7 @@ :CaseLevel: Acceptance -:CaseComponent: Host-Content +:CaseComponent: Hosts-Content :Team: Phoenix-subscriptions From 059830c9194d1db1230285b0ab05846452cfd6d0 Mon Sep 17 00:00:00 2001 From: yanpliu Date: Tue, 10 Oct 2023 03:34:08 -0400 Subject: [PATCH 08/96] fixture support for virt-who config api : data_form deploy_type virtwho_config (#12620) * fixture support for virt-who config api : data_form deploy_type virtwho_config * remove unused import --- tests/foreman/virtwho/api/test_esx.py | 209 ++++++++---------- tests/foreman/virtwho/api/test_esx_sca.py | 161 +++++--------- tests/foreman/virtwho/api/test_hyperv.py | 62 +----- tests/foreman/virtwho/api/test_hyperv_sca.py | 62 +----- tests/foreman/virtwho/api/test_kubevirt.py | 128 +---------- .../foreman/virtwho/api/test_kubevirt_sca.py | 60 +---- tests/foreman/virtwho/api/test_libvirt.py | 61 +---- tests/foreman/virtwho/api/test_libvirt_sca.py | 61 +---- tests/foreman/virtwho/api/test_nutanix.py | 124 ++++------- tests/foreman/virtwho/api/test_nutanix_sca.py | 87 ++------ 10 files changed, 278 insertions(+), 737 deletions(-) diff --git a/tests/foreman/virtwho/api/test_esx.py b/tests/foreman/virtwho/api/test_esx.py index 32a0b69e374..1e0396d4ce5 100644 --- a/tests/foreman/virtwho/api/test_esx.py +++ b/tests/foreman/virtwho/api/test_esx.py @@ -16,7 +16,6 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings @@ -25,54 +24,18 @@ create_http_proxy, deploy_configure_by_command, deploy_configure_by_command_check, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, - get_guest_info, ) -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_server': settings.virtwho.esx.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def delete_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('delete_host') +@pytest.mark.delete_host class TestVirtWhoConfigforEsx: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, target_sat, virtwho_config_api, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -84,23 +47,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -132,7 +83,9 @@ def test_positive_deploy_configure_by_id_script( assert result['subscription_status_label'] == 'Fully entitled' @pytest.mark.tier2 - def test_positive_debug_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_debug_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify debug option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -147,16 +100,18 @@ def test_positive_debug_option(self, default_org, form_data, virtwho_config, tar """ options = {'true': '1', 'false': '0', '1': '1', '0': '0'} for key, value in sorted(options.items(), key=lambda item: item[0]): - virtwho_config.debug = key - virtwho_config.update(['debug']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.debug = key + virtwho_config_api.update(['debug']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 - def test_positive_interval_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_interval_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify interval option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -180,17 +135,17 @@ def test_positive_interval_option(self, default_org, form_data, virtwho_config, '4320': '259200', } for key, value in sorted(options.items(), key=lambda item: int(item[0])): - virtwho_config.interval = key - virtwho_config.update(['interval']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.interval = key + virtwho_config_api.update(['interval']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -207,17 +162,19 @@ def test_positive_hypervisor_id_option( # esx and rhevm support hwuuid option values = ['uuid', 'hostname', 'hwuuid'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 - def test_positive_filter_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_filter_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify filter option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -236,26 +193,30 @@ def test_positive_filter_option(self, default_org, form_data, virtwho_config, ta whitelist['filter_host_parents'] = '.*redhat.com' blacklist['exclude_host_parents'] = '.*redhat.com' # Update Whitelist and check the result - virtwho_config.filtering_mode = whitelist['filtering_mode'] - virtwho_config.whitelist = whitelist['whitelist'] - virtwho_config.filter_host_parents = whitelist['filter_host_parents'] - virtwho_config.update(whitelist.keys()) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.filtering_mode = whitelist['filtering_mode'] + virtwho_config_api.whitelist = whitelist['whitelist'] + virtwho_config_api.filter_host_parents = whitelist['filter_host_parents'] + virtwho_config_api.update(whitelist.keys()) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('filter_hosts', config_file) == whitelist['whitelist'] assert ( get_configure_option('filter_host_parents', config_file) == whitelist['filter_host_parents'] ) # Update Blacklist and check the result - virtwho_config.filtering_mode = blacklist['filtering_mode'] - virtwho_config.blacklist = blacklist['blacklist'] - virtwho_config.exclude_host_parents = blacklist['exclude_host_parents'] - virtwho_config.update(blacklist.keys()) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.filtering_mode = blacklist['filtering_mode'] + virtwho_config_api.blacklist = blacklist['blacklist'] + virtwho_config_api.exclude_host_parents = blacklist['exclude_host_parents'] + virtwho_config_api.update(blacklist.keys()) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('exclude_hosts', config_file) == blacklist['blacklist'] assert ( get_configure_option('exclude_host_parents', config_file) @@ -263,7 +224,9 @@ def test_positive_filter_option(self, default_org, form_data, virtwho_config, ta ) @pytest.mark.tier2 - def test_positive_proxy_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_proxy_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """Verify http_proxy option by "PUT /foreman_virt_who_configure/api/v2/configs/:id"" @@ -278,8 +241,10 @@ def test_positive_proxy_option(self, default_org, form_data, virtwho_config, tar :BZ: 1902199 """ - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) # Check default NO_PROXY option assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == '*' # Check HTTTP Proxy and No_PROXY option @@ -287,23 +252,27 @@ def test_positive_proxy_option(self, default_org, form_data, virtwho_config, tar http_type='http', org=default_org ) no_proxy = 'test.satellite.com' - virtwho_config.http_proxy_id = http_proxy_id - virtwho_config.no_proxy = no_proxy - virtwho_config.update(['http_proxy_id', 'no_proxy']) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.http_proxy_id = http_proxy_id + virtwho_config_api.no_proxy = no_proxy + virtwho_config_api.update(['http_proxy_id', 'no_proxy']) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check HTTTPs Proxy option https_proxy_url, https_proxy_name, https_proxy_id = create_http_proxy(org=default_org) - virtwho_config.http_proxy_id = https_proxy_id - virtwho_config.update(['http_proxy_id']) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.http_proxy_id = https_proxy_id + virtwho_config_api.update(['http_proxy_id']) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url @pytest.mark.tier2 def test_positive_configure_organization_list( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify "GET /foreman_virt_who_configure/ @@ -317,14 +286,16 @@ def test_positive_configure_organization_list( :CaseImportance: Medium """ - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) - search_result = virtwho_config.get_organization_configs(data={'per_page': '1000'}) - assert [item for item in search_result['results'] if item['name'] == form_data['name']] + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) + search_result = virtwho_config_api.get_organization_configs(data={'per_page': '1000'}) + assert [item for item in search_result['results'] if item['name'] == form_data_api['name']] @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, default_org, form_data, target_sat + self, default_org, form_data_api, target_sat ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -341,25 +312,25 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor_password'] = "Tes't" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - assert virtwho_config.status == 'unknown' - command = get_configure_command(virtwho_config.id, default_org.name) + form_data_api['hypervisor_password'] = "Tes't" + virtwho_config_api = target_sat.api.VirtWhoConfig(**form_data_api).create() + assert virtwho_config_api.status == 'unknown' + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_status = deploy_configure_by_command_check(command) assert deploy_status == 'Finished successfully' - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) assert get_configure_option('rhsm_hostname', config_file) == target_sat.hostname assert ( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - virtwho_config.delete() + virtwho_config_api.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # check the hypervisor password contains backtick - form_data['hypervisor_password'] = "my`password" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "my`password" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, default_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -372,11 +343,13 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 - def test_positive_remove_env_option(self, default_org, form_data, virtwho_config, target_sat): + def test_positive_remove_env_option( + self, default_org, form_data_api, virtwho_config_api, target_sat + ): """remove option 'env=' from the virt-who configuration file and without any error :id: 981b6828-a7ed-46d9-9c6c-9fb22af4011e @@ -394,19 +367,19 @@ def test_positive_remove_env_option(self, default_org, form_data, virtwho_config :BZ: 1834897 """ - command = get_configure_command(virtwho_config.id, default_org.name) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) diff --git a/tests/foreman/virtwho/api/test_esx_sca.py b/tests/foreman/virtwho/api/test_esx_sca.py index cd9cc3d3140..3a967cbded1 100644 --- a/tests/foreman/virtwho/api/test_esx_sca.py +++ b/tests/foreman/virtwho/api/test_esx_sca.py @@ -14,7 +14,6 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings @@ -23,45 +22,18 @@ create_http_proxy, deploy_configure_by_command, deploy_configure_by_command_check, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_server': settings.virtwho.esx.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforEsx: @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, target_sat, virtwho_config_api, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -73,30 +45,17 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_debug_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify debug option by "PUT @@ -112,17 +71,17 @@ def test_positive_debug_option( """ options = {'0': '0', '1': '1', 'false': '0', 'true': '1'} for key, value in options.items(): - virtwho_config.debug = key - virtwho_config.update(['debug']) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.debug = key + virtwho_config_api.update(['debug']) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_interval_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify interval option by "PUT @@ -147,17 +106,17 @@ def test_positive_interval_option( '4320': '259200', } for key, value in options.items(): - virtwho_config.interval = key - virtwho_config.update(['interval']) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.interval = key + virtwho_config_api.update(['interval']) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -172,12 +131,12 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @@ -185,7 +144,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.parametrize('filter_type', ['whitelist', 'blacklist']) @pytest.mark.parametrize('option_type', ['edit', 'create']) def test_positive_filter_option( - self, module_sca_manifest_org, form_data, target_sat, filter_type, option_type + self, module_sca_manifest_org, form_data_api, target_sat, filter_type, option_type ): """Verify filter option by "PUT @@ -204,7 +163,7 @@ def test_positive_filter_option( regex = '.*redhat.com' whitelist = {'filtering_mode': '1', 'whitelist': regex} blacklist = {'filtering_mode': '2', 'blacklist': regex} - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() if option_type == "edit": if filter_type == "whitelist": whitelist['filter_host_parents'] = regex @@ -220,7 +179,7 @@ def test_positive_filter_option( virtwho_config.update(blacklist.keys()) command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(virtwho_config.id) result = target_sat.api.VirtWhoConfig().search( @@ -245,20 +204,20 @@ def test_positive_filter_option( elif option_type == "create": virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) if filter_type == "whitelist": - form_data['filtering_mode'] = 1 - form_data['whitelist'] = regex - form_data['filter_host_parents'] = regex + form_data_api['filtering_mode'] = 1 + form_data_api['whitelist'] = regex + form_data_api['filter_host_parents'] = regex elif filter_type == "blacklist": - form_data['filtering_mode'] = 2 - form_data['blacklist'] = regex - form_data['exclude_host_parents'] = regex - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['filtering_mode'] = 2 + form_data_api['blacklist'] = regex + form_data_api['exclude_host_parents'] = regex + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(virtwho_config.id) result = target_sat.api.VirtWhoConfig().search( @@ -283,7 +242,7 @@ def test_positive_filter_option( assert result.exclude_host_parents == regex @pytest.mark.tier2 - def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_sat): + def test_positive_proxy_option(self, module_sca_manifest_org, form_data_api, target_sat): """Verify http_proxy option by "PUT /foreman_virt_who_configure/api/v2/configs/:id"" @@ -300,10 +259,10 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ :BZ: 1902199 """ - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) # Check default NO_PROXY option assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == '*' @@ -317,7 +276,7 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ virtwho_config.update(['http_proxy_id', 'no_proxy']) command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy @@ -332,21 +291,21 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ virtwho_config.http_proxy_id = https_proxy_id virtwho_config.update(['http_proxy_id']) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy_url virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # Check the http proxy option, create virt-who config via http proxy id - form_data['http_proxy_id'] = http_proxy_id - form_data['no_proxy'] = no_proxy - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['http_proxy_id'] = http_proxy_id + form_data_api['no_proxy'] = no_proxy + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy_url assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy @@ -356,12 +315,12 @@ def test_positive_proxy_option(self, module_sca_manifest_org, form_data, target_ assert result.no_proxy == no_proxy virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 def test_positive_configure_organization_list( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify "GET /foreman_virt_who_configure/ @@ -375,16 +334,16 @@ def test_positive_configure_organization_list( :CaseImportance: Medium """ - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) - search_result = virtwho_config.get_organization_configs(data={'per_page': '1000'}) - assert [item for item in search_result['results'] if item['name'] == form_data['name']] + search_result = virtwho_config_api.get_organization_configs(data={'per_page': '1000'}) + assert [item for item in search_result['results'] if item['name'] == form_data_api['name']] @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, module_sca_manifest_org, form_data, target_sat + self, module_sca_manifest_org, form_data_api, target_sat ): """Verify "hammer virt-who-config deploy hypervisor with special characters" @@ -401,8 +360,8 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ # check the hypervisor password contains single quotes - form_data['hypervisor_password'] = "Tes't" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "Tes't" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -415,11 +374,11 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) # check the hypervisor password contains backtick - form_data['hypervisor_password'] = "my`password" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['hypervisor_password'] = "my`password" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_status = deploy_configure_by_command_check(command) @@ -432,12 +391,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( ) virtwho_config.delete() assert not target_sat.api.VirtWhoConfig().search( - query={'search': f"name={form_data['name']}"} + query={'search': f"name={form_data_api['name']}"} ) @pytest.mark.tier2 def test_positive_remove_env_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """remove option 'env=' from the virt-who configuration file and without any error @@ -456,19 +415,19 @@ def test_positive_remove_env_option( :BZ: 1834897 """ - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) diff --git a/tests/foreman/virtwho/api/test_hyperv.py b/tests/foreman/virtwho/api/test_hyperv.py index ec7c37ae6e7..e76e12f2669 100644 --- a/tests/foreman/virtwho/api/test_hyperv.py +++ b/tests/foreman/virtwho/api/test_hyperv.py @@ -16,50 +16,22 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_server': settings.virtwho.hyperv.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -71,23 +43,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -120,7 +80,7 @@ def test_positive_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -136,11 +96,11 @@ def test_positive_hypervisor_id_option( """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_hyperv_sca.py b/tests/foreman/virtwho/api/test_hyperv_sca.py index 67f18934c8d..68dd7c0e4f1 100644 --- a/tests/foreman/virtwho/api/test_hyperv_sca.py +++ b/tests/foreman/virtwho/api/test_hyperv_sca.py @@ -16,50 +16,21 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_server': settings.virtwho.hyperv.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -71,30 +42,17 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -109,11 +67,11 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_kubevirt.py b/tests/foreman/virtwho/api/test_kubevirt.py index e2fbe884f9a..88292e772db 100644 --- a/tests/foreman/virtwho/api/test_kubevirt.py +++ b/tests/foreman/virtwho/api/test_kubevirt.py @@ -16,58 +16,23 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, - get_guest_info, ) -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'kubeconfig_path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def delete_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('delete_host') +@pytest.mark.delete_host class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -79,80 +44,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) - virt_who_instance = ( - target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] - .status - ) - assert virt_who_instance == 'ok' - hosts = [ - ( - hypervisor_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL', - ), - ( - guest_name, - f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED', - ), - ] - for hostname, sku in hosts: - host = target_sat.cli.Host.list({'search': hostname})[0] - subscriptions = target_sat.cli.Subscription.list( - {'organization': default_org.name, 'search': sku} - ) - vdc_id = subscriptions[0]['id'] - if 'type=STACK_DERIVED' in sku: - for item in subscriptions: - if hypervisor_name.lower() in item['type']: - vdc_id = item['id'] - break - target_sat.api.HostSubscription(host=host['id']).add_subscriptions( - data={'subscriptions': [{'id': vdc_id, 'quantity': 'Automatic'}]} - ) - result = target_sat.api.Host().search(query={'search': hostname})[0].read_json() - assert result['subscription_status_label'] == 'Fully entitled' - - @pytest.mark.tier2 - def test_positive_deploy_configure_by_script( - self, default_org, form_data, virtwho_config, target_sat - ): - """Verify "GET /foreman_virt_who_configure/api/ - - v2/configs/:id/deploy_script" - - :id: 77100dc7-644a-44a4-802a-2da562246cba - - :expectedresults: Config can be created and deployed - - :CaseLevel: Integration - - :CaseImportance: High - """ - assert virtwho_config.status == 'unknown' - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -185,7 +81,7 @@ def test_positive_deploy_configure_by_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -201,11 +97,11 @@ def test_positive_hypervisor_id_option( """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_kubevirt_sca.py b/tests/foreman/virtwho/api/test_kubevirt_sca.py index f64b719e9c4..364a637be5c 100644 --- a/tests/foreman/virtwho/api/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/api/test_kubevirt_sca.py @@ -14,48 +14,21 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'kubeconfig_path': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -67,30 +40,17 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -105,11 +65,11 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_libvirt.py b/tests/foreman/virtwho/api/test_libvirt.py index eb0806e5753..2d05ebd5e49 100644 --- a/tests/foreman/virtwho/api/test_libvirt.py +++ b/tests/foreman/virtwho/api/test_libvirt.py @@ -16,49 +16,22 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_server': settings.virtwho.libvirt.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -70,23 +43,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -119,7 +80,7 @@ def test_positive_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -135,11 +96,11 @@ def test_positive_hypervisor_id_option( """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_libvirt_sca.py b/tests/foreman/virtwho/api/test_libvirt_sca.py index bead5b8e95f..d805b4da5ef 100644 --- a/tests/foreman/virtwho/api/test_libvirt_sca.py +++ b/tests/foreman/virtwho/api/test_libvirt_sca.py @@ -14,49 +14,21 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_option, ) -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_server': settings.virtwho.libvirt.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, module_sca_manifest_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -68,30 +40,17 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT /foreman_virt_who_configure/api/v2/configs/:id" @@ -104,11 +63,11 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/api/test_nutanix.py b/tests/foreman/virtwho/api/test_nutanix.py index 5e4d4c51f34..d9ddc34938f 100644 --- a/tests/foreman/virtwho/api/test_nutanix.py +++ b/tests/foreman/virtwho/api/test_nutanix.py @@ -16,7 +16,6 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings @@ -27,53 +26,16 @@ get_configure_command, get_configure_file, get_configure_option, - get_guest_info, get_hypervisor_ahv_mapping, ) -@pytest.fixture() -def form_data(default_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_server': settings.virtwho.ahv.hypervisor_server, - 'organization_id': default_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_password': settings.virtwho.ahv.hypervisor_password, - 'prism_flavor': settings.virtwho.ahv.prism_flavor, - 'ahv_internal_debug': 'false', - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - -@pytest.fixture(autouse=True) -def delete_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('delete_host') +@pytest.mark.delete_host class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -85,23 +47,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, default_org.name) - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - hypervisor_name, guest_name = deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=default_org.label, - ) + assert virtwho_config_api.status == 'unknown' + hypervisor_name, guest_name = deploy_type_api virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @@ -138,7 +88,7 @@ def test_positive_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -154,19 +104,19 @@ def test_positive_hypervisor_id_option( """ values = ['uuid', 'hostname'] for value in values: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_api['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, default_org, form_data, target_sat, deploy_type + self, default_org, form_data_api, target_sat, deploy_type ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" on nutanix prism central mode @@ -180,19 +130,19 @@ def test_positive_prism_central_deploy_configure_by_id_script( :CaseImportance: High """ - form_data['prism_flavor'] = "central" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['prism_flavor'] = "central" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' if deploy_type == "id": command = get_configure_command(virtwho_config.id, default_org.name) hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) elif deploy_type == "script": script = virtwho_config.deploy_script() hypervisor_name, guest_name = deploy_configure_by_script( script['virt_who_config_script'], - form_data['hypervisor_type'], + form_data_api['hypervisor_type'], debug=True, org=default_org.label, ) @@ -238,7 +188,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_central_prism_central_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify prism_flavor option by "PUT @@ -253,16 +203,18 @@ def test_positive_prism_central_prism_central_option( :CaseImportance: Medium """ value = 'central' - virtwho_config.prism_flavor = value - virtwho_config.update(['prism_flavor']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, default_org.name) - deploy_configure_by_command(command, form_data['hypervisor_type'], org=default_org.label) + virtwho_config_api.prism_flavor = value + virtwho_config_api.update(['prism_flavor']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, default_org.name) + deploy_configure_by_command( + command, form_data_api['hypervisor_type'], org=default_org.label + ) assert get_configure_option("prism_central", config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, default_org, form_data, virtwho_config, target_sat + self, default_org, form_data_api, virtwho_config_api, target_sat ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -284,18 +236,18 @@ def test_positive_ahv_internal_debug_option( :customerscenario: true """ - command = get_configure_command(virtwho_config.id, default_org.name) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label ) result = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .ahv_internal_debug ) assert str(result) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" try: @@ -308,21 +260,23 @@ def test_positive_ahv_internal_debug_option( # Update ahv_internal_debug option to true value = 'true' - virtwho_config.ahv_internal_debug = value - virtwho_config.update(['ahv_internal_debug']) - command = get_configure_command(virtwho_config.id, default_org.name) + virtwho_config_api.ahv_internal_debug = value + virtwho_config_api.update(['ahv_internal_debug']) + command = get_configure_command(virtwho_config_api.id, default_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_api['hypervisor_type'], debug=True, org=default_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_api['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' result = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .ahv_internal_debug ) assert str(result) == 'True' # ahv_internal_debug bas been set to true in virt-who-config-X.conf - config_file = get_configure_file(virtwho_config.id) + config_file = get_configure_file(virtwho_config_api.id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' # check message does not exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' diff --git a/tests/foreman/virtwho/api/test_nutanix_sca.py b/tests/foreman/virtwho/api/test_nutanix_sca.py index 05287279231..c075db7f795 100644 --- a/tests/foreman/virtwho/api/test_nutanix_sca.py +++ b/tests/foreman/virtwho/api/test_nutanix_sca.py @@ -16,10 +16,8 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, deploy_configure_by_script, @@ -29,38 +27,11 @@ ) -@pytest.fixture() -def form_data(module_sca_manifest_org, target_sat): - form = { - 'name': gen_string('alpha'), - 'debug': 1, - 'interval': '60', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_server': settings.virtwho.ahv.hypervisor_server, - 'organization_id': module_sca_manifest_org.id, - 'filtering_mode': 'none', - 'satellite_url': target_sat.hostname, - 'hypervisor_username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_password': settings.virtwho.ahv.hypervisor_password, - 'prism_flavor': settings.virtwho.ahv.prism_flavor, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat): - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() - yield virtwho_config - virtwho_config.delete() - assert not target_sat.api.VirtWhoConfig().search(query={'search': f"name={form_data['name']}"}) - - class TestVirtWhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_api', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat, deploy_type + self, default_org, virtwho_config_api, target_sat, deploy_type_api ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" @@ -72,30 +43,17 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - assert virtwho_config.status == 'unknown' - if deploy_type == "id": - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) - deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label - ) - elif deploy_type == "script": - script = virtwho_config.deploy_script() - deploy_configure_by_script( - script['virt_who_config_script'], - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) + assert virtwho_config_api.status == 'unknown' virt_who_instance = ( target_sat.api.VirtWhoConfig() - .search(query={'search': f'name={virtwho_config.name}'})[0] + .search(query={'search': f'name={virtwho_config_api.name}'})[0] .status ) assert virt_who_instance == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify hypervisor_id option by "PUT @@ -110,19 +68,19 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ for value in ['uuid', 'hostname']: - virtwho_config.hypervisor_id = value - virtwho_config.update(['hypervisor_id']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.hypervisor_id = value + virtwho_config_api.update(['hypervisor_id']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, module_sca_manifest_org, form_data, target_sat, deploy_type + self, module_sca_manifest_org, form_data_api, target_sat, deploy_type ): """Verify "POST /foreman_virt_who_configure/api/v2/configs" on nutanix prism central mode @@ -136,19 +94,22 @@ def test_positive_prism_central_deploy_configure_by_id_script( :CaseImportance: High """ - form_data['prism_flavor'] = "central" - virtwho_config = target_sat.api.VirtWhoConfig(**form_data).create() + form_data_api['prism_flavor'] = "central" + virtwho_config = target_sat.api.VirtWhoConfig(**form_data_api).create() assert virtwho_config.status == 'unknown' if deploy_type == "id": command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, + form_data_api['hypervisor_type'], + debug=True, + org=module_sca_manifest_org.label, ) elif deploy_type == "script": script = virtwho_config.deploy_script() deploy_configure_by_script( script['virt_who_config_script'], - form_data['hypervisor_type'], + form_data_api['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -164,7 +125,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( @pytest.mark.tier2 def test_positive_prism_central_prism_central_option( - self, module_sca_manifest_org, form_data, virtwho_config, target_sat + self, module_sca_manifest_org, form_data_api, virtwho_config_api, target_sat ): """Verify prism_flavor option by "PUT @@ -179,11 +140,11 @@ def test_positive_prism_central_prism_central_option( :CaseImportance: Medium """ value = 'central' - virtwho_config.prism_flavor = value - virtwho_config.update(['prism_flavor']) - config_file = get_configure_file(virtwho_config.id) - command = get_configure_command(virtwho_config.id, module_sca_manifest_org.name) + virtwho_config_api.prism_flavor = value + virtwho_config_api.update(['prism_flavor']) + config_file = get_configure_file(virtwho_config_api.id) + command = get_configure_command(virtwho_config_api.id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_api['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option("prism_central", config_file) == 'true' From 9e851610c43e50ab8e472c1fd341394c0b16fd3c Mon Sep 17 00:00:00 2001 From: Jitendra Yejare Date: Tue, 10 Oct 2023 13:26:28 +0530 Subject: [PATCH 09/96] Optional pytest Vault login and code nitpicking (#12822) * Optional pytest Vault login and code formating * Multiple options supported for non-vault pytest session --- robottelo/utils/vault.py | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/robottelo/utils/vault.py b/robottelo/utils/vault.py index a4b5d48adb4..b0fc77d861e 100644 --- a/robottelo/utils/vault.py +++ b/robottelo/utils/vault.py @@ -19,25 +19,31 @@ class Vault: def __init__(self, env_file='.env'): self.env_path = robottelo_root_dir.joinpath(env_file) + self.envdata = None + self.vault_enabled = None def setup(self): - self.export_vault_addr() + if self.env_path.exists(): + self.envdata = self.env_path.read_text() + is_enabled = re.findall('\nVAULT_ENABLED_FOR_DYNACONF=(.*)', self.envdata) + if is_enabled: + self.vault_enabled = is_enabled[0] + self.export_vault_addr() def teardown(self): del os.environ['VAULT_ADDR'] def export_vault_addr(self): - envdata = self.env_path.read_text() - vaulturl = re.findall('VAULT_URL_FOR_DYNACONF=(.*)', envdata)[0] + vaulturl = re.findall('VAULT_URL_FOR_DYNACONF=(.*)', self.envdata)[0] # Set Vault CLI Env Var os.environ['VAULT_ADDR'] = vaulturl # Dynaconf Vault Env Vars - if re.findall('VAULT_ENABLED_FOR_DYNACONF=(.*)', envdata)[0] == 'true': + if self.vault_enabled and self.vault_enabled in ['True', 'true']: if 'localhost:8200' in vaulturl: raise InvalidVaultURLForOIDC( - f"{vaulturl} doesnt supports OIDC login," + f"{vaulturl} doesn't support OIDC login," "please change url to corp vault in env file!" ) @@ -63,7 +69,11 @@ def exec_vault_command(self, command: str, **kwargs): return vcommand def login(self, **kwargs): - if 'VAULT_SECRET_ID_FOR_DYNACONF' not in os.environ: + if ( + self.vault_enabled + and self.vault_enabled in ['True', 'true'] + and 'VAULT_SECRET_ID_FOR_DYNACONF' not in os.environ + ): if self.status(**kwargs).returncode != 0: logger.warning( "Warning! The browser is about to open for vault OIDC login, " @@ -81,22 +91,22 @@ def login(self, **kwargs): ).stdout token = json.loads(str(token.decode('UTF-8')))['data']['id'] # Setting new token in env file - envdata = self.env_path.read_text() - envdata = re.sub( - '.*VAULT_TOKEN_FOR_DYNACONF=.*', f"VAULT_TOKEN_FOR_DYNACONF={token}", envdata + _envdata = re.sub( + '.*VAULT_TOKEN_FOR_DYNACONF=.*', + f"VAULT_TOKEN_FOR_DYNACONF={token}", + self.envdata, ) - self.env_path.write_text(envdata) + self.env_path.write_text(_envdata) logger.info( "Success! New OIDC token added to .env file to access secrets from vault!" ) def logout(self): # Teardown - Setting dymmy token in env file - envdata = self.env_path.read_text() - envdata = re.sub( - '.*VAULT_TOKEN_FOR_DYNACONF=.*', "# VAULT_TOKEN_FOR_DYNACONF=myroot", envdata + _envdata = re.sub( + '.*VAULT_TOKEN_FOR_DYNACONF=.*', "# VAULT_TOKEN_FOR_DYNACONF=myroot", self.envdata ) - self.env_path.write_text(envdata) + self.env_path.write_text(_envdata) self.exec_vault_command('vault token revoke -self') logger.info("Success! OIDC token removed from Env file successfully!") From 29fc81ab09becc40a3e6e57b8a5ac2fec80cd8fb Mon Sep 17 00:00:00 2001 From: Griffin Sullivan Date: Thu, 5 Oct 2023 11:14:28 -0400 Subject: [PATCH 10/96] Fixing loadbalancer e2e test --- .../destructive/test_capsule_loadbalancer.py | 77 +++++++------------ 1 file changed, 27 insertions(+), 50 deletions(-) diff --git a/tests/foreman/destructive/test_capsule_loadbalancer.py b/tests/foreman/destructive/test_capsule_loadbalancer.py index 3d9fdf02604..04ba4614438 100644 --- a/tests/foreman/destructive/test_capsule_loadbalancer.py +++ b/tests/foreman/destructive/test_capsule_loadbalancer.py @@ -17,6 +17,7 @@ :Upstream: No """ import pytest +from wrapanapi import VmState from robottelo.config import settings from robottelo.constants import CLIENT_PORT, DataFile @@ -175,20 +176,20 @@ def loadbalancer_setup( @pytest.mark.e2e @pytest.mark.tier1 -def test_loadbalancer_register_client_using_ak_to_ha_proxy(loadbalancer_setup, rhel7_contenthost): - """Register the client using ak to the haproxy +def test_loadbalancer_install_package( + loadbalancer_setup, setup_capsules, rhel7_contenthost, module_org, module_location, request +): + r"""Install packages on a content host regardless of the registered capsule being available :id: bd3c2e50-18e2-4be7-8a7f-c32472e17c61 :Steps: 1. run `subscription-manager register --org=Your_Organization \ - --activationkey=Your_Activation_Key \ - --serverurl=https://loadbalancer.example.com:8443/rhsm \ - --baseurl=https://loadbalancer.example.com/pulp/content` - 2. Check which capsule the host got registered. - 3. Try package installation - 4. Remove the package and unregister the host - 5. Again register, verify it's the other capsule this time. + --activationkey=Your_Activation_Key \` + 2. Try package installation + 3. Check which capsule the host got registered. + 4. Remove the package + 5. Take down the capsule that the host was registered to 6. Try package installation again :expectedresults: The client should be get the package irrespective of the capsule @@ -196,20 +197,17 @@ def test_loadbalancer_register_client_using_ak_to_ha_proxy(loadbalancer_setup, r :CaseLevel: Integration """ - url = f'https://{loadbalancer_setup["setup_haproxy"]["haproxy"].hostname}' - server_url = f'{url}:8443/rhsm' - base_url = f'{url}/pulp/content' - - result = rhel7_contenthost.download_install_rpm( - repo_url=f'{url}/pub', package_name='katello-ca-consumer-latest.noarch' - ) - assert result.status == 0 - rhel7_contenthost.register_contenthost( - org=loadbalancer_setup['module_org'].label, - activation_key=loadbalancer_setup['content_for_client']['client_ak'].name, - serverurl=server_url, - baseurl=base_url, + # Register content host + result = rhel7_contenthost.register( + org=module_org, + loc=module_location, + activation_keys=loadbalancer_setup['content_for_client']['client_ak'].name, + target=setup_capsules['capsule_1'], + force=True, ) + assert result.status == 0, f'Failed to register host: {result.stderr}' + + # Try package installation result = rhel7_contenthost.execute('yum install -y tree') assert result.status == 0 @@ -227,42 +225,21 @@ def test_loadbalancer_register_client_using_ak_to_ha_proxy(loadbalancer_setup, r if loadbalancer_setup['setup_capsules']['capsule_1'].hostname in result.stdout else loadbalancer_setup['setup_capsules']['capsule_2'] ) - # Find the other capsule - for capsule in loadbalancer_setup['setup_capsules'].values(): - if registered_to_capsule != capsule: - other_capsule = capsule + # Remove the packages from the client result = rhel7_contenthost.execute('yum remove -y tree') assert result.status == 0 - # For other capsule - rhel7_contenthost.remove_katello_ca() - rhel7_contenthost.unregister() - - result = rhel7_contenthost.execute('rm -f katello-ca-consumer-latest.noarch.rpm') - assert result.status == 0 - - result = rhel7_contenthost.download_install_rpm( - repo_url=f'{url}/pub', package_name='katello-ca-consumer-latest.noarch' - ) - assert result.status == 0 - - rhel7_contenthost.register_contenthost( - org=loadbalancer_setup['module_org'].label, - activation_key=loadbalancer_setup['content_for_client']['client_ak'].name, - serverurl=server_url, - baseurl=base_url, - ) - result = rhel7_contenthost.execute('rpm -qa | grep katello-ca-consumer') - assert other_capsule.hostname in result.stdout + # Power off the capsule that the client is registered to + registered_to_capsule.power_control(state=VmState.STOPPED, ensure=True) + # Try package installation again result = rhel7_contenthost.execute('yum install -y tree') assert result.status == 0 - hosts = loadbalancer_setup['module_target_sat'].cli.Host.list( - {'organization-id': loadbalancer_setup['module_org'].id} - ) - assert rhel7_contenthost.hostname in [host['name'] for host in hosts] + @request.addfinalizer + def _finalize(): + registered_to_capsule.power_control(state=VmState.RUNNING, ensure=True) @pytest.mark.rhel_ver_match('[^6]') From a1dade860f307866900cd533b3663cfb4ed4b6fc Mon Sep 17 00:00:00 2001 From: yanpliu Date: Sat, 16 Sep 2023 05:06:59 -0400 Subject: [PATCH 11/96] fixture support for virt-who config ui : data_form deploy_type virtwho_config --- tests/foreman/virtwho/ui/test_esx.py | 346 ++++++++---------- tests/foreman/virtwho/ui/test_esx_sca.py | 289 ++++++--------- tests/foreman/virtwho/ui/test_hyperv.py | 74 +--- tests/foreman/virtwho/ui/test_hyperv_sca.py | 62 +--- tests/foreman/virtwho/ui/test_kubevirt.py | 72 +--- tests/foreman/virtwho/ui/test_kubevirt_sca.py | 60 +-- tests/foreman/virtwho/ui/test_libvirt.py | 73 +--- tests/foreman/virtwho/ui/test_libvirt_sca.py | 61 +-- tests/foreman/virtwho/ui/test_nutanix.py | 148 +++----- tests/foreman/virtwho/ui/test_nutanix_sca.py | 117 ++---- 10 files changed, 457 insertions(+), 845 deletions(-) diff --git a/tests/foreman/virtwho/ui/test_esx.py b/tests/foreman/virtwho/ui/test_esx.py index f38590b8ddf..2715febb34f 100644 --- a/tests/foreman/virtwho/ui/test_esx.py +++ b/tests/foreman/virtwho/ui/test_esx.py @@ -31,57 +31,22 @@ delete_configure_option, deploy_configure_by_command, deploy_configure_by_command_check, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, get_configure_option, - get_guest_info, get_virtwho_status, restart_virtwho_service, update_configure_option, ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.esx.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - -@pytest.fixture(autouse=True) -def delete_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.mark.usefixtures('delete_host') +@pytest.mark.delete_host class TestVirtwhoConfigforEsx: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, virtwho_config, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id|script. @@ -98,37 +63,28 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_debug_option(self, default_org, virtwho_config, session, form_data): + def test_positive_debug_option(self, default_org, virtwho_config_ui, org_session, form_data_ui): """Verify debug checkbox and the value changes of VIRTWHO_DEBUG :id: adb435c4-d02b-47b6-89f5-dce9a4ff7939 @@ -141,23 +97,25 @@ def test_positive_debug_option(self, default_org, virtwho_config, session, form_ :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '1' - session.virtwho_configure.edit(name, {'debug': False}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'debug': False}) + results = org_session.virtwho_configure.read(name) assert results['overview']['debug'] is False deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '0' @pytest.mark.tier2 - def test_positive_interval_option(self, default_org, virtwho_config, session, form_data): + def test_positive_interval_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify interval dropdown options and the value changes of VIRTWHO_INTERVAL. :id: 731f8361-38d4-40b9-9530-8d785d61eaab @@ -170,7 +128,7 @@ def test_positive_interval_option(self, default_org, virtwho_config, session, fo :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) intervals = { @@ -184,16 +142,18 @@ def test_positive_interval_option(self, default_org, virtwho_config, session, fo 'Every 3 days': '259200', } for option, value in sorted(intervals.items(), key=lambda item: int(item[1])): - session.virtwho_configure.edit(name, {'interval': option}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'interval': option}) + results = org_session.virtwho_configure.read(name) assert results['overview']['interval'] == option deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: cc494bd9-51d9-452a-bfa9-5cdcafef5197 @@ -206,23 +166,25 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) # esx and rhevm support hwuuid option values = ['uuid', 'hostname', 'hwuuid'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 - def test_positive_filtering_option(self, default_org, virtwho_config, session, form_data): + def test_positive_filtering_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Filtering dropdown options. :id: e17dda14-79cd-4cd2-8f29-60970b24a905 @@ -237,7 +199,7 @@ def test_positive_filtering_option(self, default_org, virtwho_config, session, f :BZ: 1735670 """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) @@ -248,28 +210,28 @@ def test_positive_filtering_option(self, default_org, virtwho_config, session, f whitelist['filtering_content.filter_host_parents'] = regex blacklist['filtering_content.exclude_host_parents'] = regex # Update Whitelist and check the result - session.virtwho_configure.edit(name, whitelist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, whitelist) + results = org_session.virtwho_configure.read(name) assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert regex == get_configure_option('filter_hosts', config_file) assert regex == get_configure_option('filter_host_parents', config_file) # Update Blacklist and check the result - session.virtwho_configure.edit(name, blacklist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) assert results['overview']['exclude_hosts'] == regex assert results['overview']['exclude_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert regex == get_configure_option('exclude_hosts', config_file) assert regex == get_configure_option('exclude_host_parents', config_file) @pytest.mark.tier2 - def test_positive_proxy_option(self, default_org, virtwho_config, session, form_data): + def test_positive_proxy_option(self, default_org, virtwho_config_ui, org_session, form_data_ui): """Verify 'HTTP Proxy' and 'Ignore Proxy' options. :id: 6659d577-0135-4bf0-81af-14b930011536 @@ -285,31 +247,31 @@ def test_positive_proxy_option(self, default_org, virtwho_config, session, form_ http_proxy, http_proxy_name, http_proxy_id = create_http_proxy( http_type='http', org=default_org ) - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) no_proxy = 'test.satellite.com' # Check the https proxy and No_PROXY settings - session.virtwho_configure.edit(name, {'proxy': https_proxy, 'no_proxy': no_proxy}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'proxy': https_proxy, 'no_proxy': no_proxy}) + results = org_session.virtwho_configure.read(name) assert results['overview']['proxy'] == https_proxy assert results['overview']['no_proxy'] == no_proxy deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('https_proxy', ETC_VIRTWHO_CONFIG) == https_proxy assert get_configure_option('no_proxy', ETC_VIRTWHO_CONFIG) == no_proxy # Check the http proxy setting - session.virtwho_configure.edit(name, {'proxy': http_proxy}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'proxy': http_proxy}) + results = org_session.virtwho_configure.read(name) assert results['overview']['proxy'] == http_proxy deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('http_proxy', ETC_VIRTWHO_CONFIG) == http_proxy @pytest.mark.tier2 - def test_positive_virtwho_roles(self, session): + def test_positive_virtwho_roles(self, org_session): """Verify the default roles for virtwho configure :id: cd6a5363-f9ba-4b52-892c-905634168fc5 @@ -337,14 +299,14 @@ def test_positive_virtwho_roles(self, session): }, 'Virt-who Viewer': {'Satellite virt who configure/config': ['view_virt_who_config']}, } - with session: + with org_session: for role_name, role_filters in roles.items(): - assert session.role.search(role_name)[0]['Name'] == role_name - assigned_permissions = session.filter.read_permissions(role_name) + assert org_session.role.search(role_name)[0]['Name'] == role_name + assigned_permissions = org_session.filter.read_permissions(role_name) assert sorted(assigned_permissions) == sorted(role_filters) @pytest.mark.tier2 - def test_positive_virtwho_configs_widget(self, default_org, session, form_data): + def test_positive_virtwho_configs_widget(self, default_org, org_session, form_data_ui): """Check if Virt-who Configurations Status Widget is working in the Dashboard UI :id: 5d61ce00-a640-4823-89d4-7b1d02b50ea6 @@ -363,38 +325,40 @@ def test_positive_virtwho_configs_widget(self, default_org, session, form_data): """ org_name = gen_string('alpha') name = gen_string('alpha') - form_data['name'] = name - with session: - session.organization.create({'name': org_name}) - session.organization.select(org_name) - session.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.organization.create({'name': org_name}) + org_session.organization.select(org_name) + org_session.virtwho_configure.create(form_data_ui) expected_values = [ {'Configuration Status': 'No Reports', 'Count': '1'}, {'Configuration Status': 'No Change', 'Count': '0'}, {'Configuration Status': 'OK', 'Count': '0'}, {'Configuration Status': 'Total Configurations', 'Count': '1'}, ] - values = session.dashboard.read('VirtWhoConfigStatus') + values = org_session.dashboard.read('VirtWhoConfigStatus') assert values['config_status'] == expected_values assert values['latest_config'] == 'No configuration found' # Check the 'Status' changed after deployed the virt-who config config_id = get_configure_id(name) config_command = get_configure_command(config_id, org_name) - deploy_configure_by_command(config_command, form_data['hypervisor_type'], org=org_name) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' + deploy_configure_by_command( + config_command, form_data_ui['hypervisor_type'], org=org_name + ) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' expected_values = [ {'Configuration Status': 'No Reports', 'Count': '0'}, {'Configuration Status': 'No Change', 'Count': '0'}, {'Configuration Status': 'OK', 'Count': '1'}, {'Configuration Status': 'Total Configurations', 'Count': '1'}, ] - values = session.dashboard.read('VirtWhoConfigStatus') + values = org_session.dashboard.read('VirtWhoConfigStatus') assert values['config_status'] == expected_values assert values['latest_config'] == 'No configuration found' - session.organization.select("Default Organization") + org_session.organization.select("Default Organization") @pytest.mark.tier2 - def test_positive_delete_configure(self, default_org, session, form_data): + def test_positive_delete_configure(self, default_org, org_session, form_data_ui): """Verify when a config is deleted the associated user is deleted. :id: 0e66dcf6-dc64-4fb2-b8a9-518f5adfa800 @@ -410,22 +374,24 @@ def test_positive_delete_configure(self, default_org, session, form_data): """ name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) restart_virtwho_service() assert get_virtwho_status() == 'logerror' @pytest.mark.tier2 - def test_positive_virtwho_reporter_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_reporter_role( + self, default_org, org_session, test_name, form_data_ui + ): """Verify the virt-who reporter role can TRULY work. :id: cd235ab0-d89c-464b-98d6-9d090ac40d8f @@ -438,9 +404,9 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -450,14 +416,14 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Update the virt-who config file config_id = get_configure_id(config_name) config_file = get_configure_file(config_id) @@ -467,19 +433,19 @@ def test_positive_virtwho_reporter_role(self, default_org, session, test_name, f restart_virtwho_service() assert get_virtwho_status() == 'logerror' # Check the permissioin of Virt-who Reporter - session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) - assert session.user.search(username)[0]['Username'] == username - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) + assert org_session.user.search(username)[0]['Username'] == username + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Reporter'] restart_virtwho_service() assert get_virtwho_status() == 'running' with Session(test_name, username, password) as newsession: assert not newsession.virtwho_configure.check_create_permission()['can_view'] - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_virtwho_viewer_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_viewer_role(self, default_org, org_session, test_name, form_data_ui): """Verify the virt-who viewer role can TRULY work. :id: bf3be2e4-3853-41cc-9b3e-c8677f0b8c5f @@ -492,9 +458,9 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -504,17 +470,17 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Viewer - session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Viewer'] # Update the virt-who config file config_id = get_configure_id(config_name) @@ -535,11 +501,11 @@ def test_positive_virtwho_viewer_role(self, default_org, session, test_name, for assert not update_permission['can_edit'] newsession.virtwho_configure.read(config_name) # Delete the created user - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_virtwho_manager_role(self, default_org, session, test_name, form_data): + def test_positive_virtwho_manager_role(self, default_org, org_session, test_name, form_data_ui): """Verify the virt-who manager role can TRULY work. :id: a72023fb-7b23-4582-9adc-c5227dc7859c @@ -551,9 +517,9 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session: + with org_session: # Create an user - session.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -563,28 +529,28 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo } ) # Create a virt-who config plugin - form_data['name'] = config_name - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) - assert session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Manager - session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) - user = session.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Manager'] with Session(test_name, username, password) as newsession: # create_virt_who_config new_virt_who_name = gen_string('alpha') - form_data['name'] = new_virt_who_name - newsession.virtwho_configure.create(form_data) + form_data_ui['name'] = new_virt_who_name + newsession.virtwho_configure.create(form_data_ui) # view_virt_who_config values = newsession.virtwho_configure.read(new_virt_who_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=default_org.label + command, form_data_ui['hypervisor_type'], org=default_org.label ) assert newsession.virtwho_configure.search(new_virt_who_name)[0]['Status'] == 'ok' # edit_virt_who_config @@ -595,11 +561,11 @@ def test_positive_virtwho_manager_role(self, default_org, session, test_name, fo newsession.virtwho_configure.delete(modify_name) assert not newsession.virtwho_configure.search(modify_name) # Delete the created user - session.user.delete(username) - assert not session.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 - def test_positive_overview_label_name(self, default_org, form_data, session): + def test_positive_overview_label_name(self, default_org, form_data_ui, org_session): """Verify the label name on virt-who config Overview Page. :id: 21df8175-bb41-422e-a263-8677bc3a9565 @@ -613,20 +579,20 @@ def test_positive_overview_label_name(self, default_org, form_data, session): :CaseImportance: Medium """ name = gen_string('alpha') - form_data['name'] = name - hypervisor_type = form_data['hypervisor_type'] + form_data_ui['name'] = name + hypervisor_type = form_data_ui['hypervisor_type'] http_proxy_url, proxy_name, proxy_id = create_http_proxy(org=default_org) - form_data['proxy'] = http_proxy_url - form_data['no_proxy'] = 'test.satellite.com' + form_data_ui['proxy'] = http_proxy_url + form_data_ui['no_proxy'] = 'test.satellite.com' regex = '.*redhat.com' whitelist = {'filtering': 'Whitelist', 'filtering_content.filter_hosts': regex} blacklist = {'filtering': 'Blacklist', 'filtering_content.exclude_hosts': regex} if hypervisor_type == 'esx': whitelist['filtering_content.filter_host_parents'] = regex blacklist['filtering_content.exclude_host_parents'] = regex - form_data = dict(form_data, **whitelist) - with session: - session.virtwho_configure.create(form_data) + form_data = dict(form_data_ui, **whitelist) + with org_session: + org_session.virtwho_configure.create(form_data) fields = { 'status_label': 'Status', 'hypervisor_type_label': 'Hypervisor Type', @@ -647,11 +613,11 @@ def test_positive_overview_label_name(self, default_org, form_data, session): fields['kubeconfig_path_label'] = 'Kubeconfig Path' if hypervisor_type == 'esx': fields['filter_host_parents_label'] = 'Filter Host Parents' - results = session.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) for key, value in fields.items(): assert results['overview'][key] == value - session.virtwho_configure.edit(name, blacklist) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) del fields['filter_hosts_label'] if hypervisor_type == 'esx': del fields['filter_host_parents_label'] @@ -661,7 +627,9 @@ def test_positive_overview_label_name(self, default_org, form_data, session): assert results['overview'][key] == value @pytest.mark.tier2 - def test_positive_last_checkin_status(self, default_org, virtwho_config, form_data, session): + def test_positive_last_checkin_status( + self, default_org, virtwho_config_ui, form_data_ui, org_session + ): """Verify the Last Checkin status on Content Hosts Page. :id: 7448d482-d05c-4727-8980-176586e9e4a7 @@ -676,15 +644,15 @@ def test_positive_last_checkin_status(self, default_org, virtwho_config, form_da :CaseImportance: Medium """ - name = form_data['name'] - values = session.virtwho_configure.read(name, widget_names='deploy.command') + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name, widget_names='deploy.command') command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - time_now = session.browser.get_client_datetime() - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - checkin_time = session.contenthost.search(hypervisor_name)[0]['Last Checkin'] + time_now = org_session.browser.get_client_datetime() + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + checkin_time = org_session.contenthost.search(hypervisor_name)[0]['Last Checkin'] # 10 mins margin to check the Last Checkin time assert ( abs( @@ -698,7 +666,7 @@ def test_positive_last_checkin_status(self, default_org, virtwho_config, form_da @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, default_org, form_data, target_sat, session + self, default_org, form_data_ui, target_sat, org_session ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -715,12 +683,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name - with session: + form_data_ui['name'] = name + with org_session: # check the hypervisor password contains single quotes - form_data['hypervisor_content.password'] = "Tes't" - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "Tes't" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -731,12 +699,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) # check the hypervisor password contains backtick - form_data['hypervisor_content.password'] = "my`password" - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "my`password" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -747,12 +715,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_remove_env_option( - self, default_org, virtwho_config, form_data, target_sat, session + self, default_org, virtwho_config_ui, form_data_ui, target_sat, org_session ): """remove option 'env=' from the virt-who configuration file and without any error @@ -770,13 +738,13 @@ def test_positive_remove_env_option( :customerscenario: true """ - name = form_data['name'] - values = session.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_id = get_configure_id(name) diff --git a/tests/foreman/virtwho/ui/test_esx_sca.py b/tests/foreman/virtwho/ui/test_esx_sca.py index 6abda09c6e4..f8eb03fea98 100644 --- a/tests/foreman/virtwho/ui/test_esx_sca.py +++ b/tests/foreman/virtwho/ui/test_esx_sca.py @@ -28,58 +28,23 @@ delete_configure_option, deploy_configure_by_command, deploy_configure_by_command_check, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, get_configure_option, - get_guest_info, get_virtwho_status, restart_virtwho_service, update_configure_option, ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.esx.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.esx.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.esx.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.esx.hypervisor_password, - } - return form - - -@pytest.fixture(autouse=True) -def clean_host(form_data, target_sat): - guest_name, _ = get_guest_info(form_data['hypervisor_type']) - results = target_sat.api.Host().search(query={'search': guest_name}) - if results: - target_sat.api.Host(id=results[0].read_json()['id']).delete() - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - -@pytest.mark.usefixtures('clean_host') +@pytest.mark.delete_host class TestVirtwhoConfigforEsx: @pytest.mark.tier2 @pytest.mark.upgrade - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -96,29 +61,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_debug_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify debug checkbox and the value changes of VIRTWHO_DEBUG @@ -132,24 +79,24 @@ def test_positive_debug_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '1' - session_sca.virtwho_configure.edit(name, {'debug': False}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'debug': False}) + results = org_session.virtwho_configure.read(name) assert results['overview']['debug'] is False deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('debug', ETC_VIRTWHO_CONFIG) == '0' @pytest.mark.tier2 def test_positive_interval_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify interval dropdown options and the value changes of VIRTWHO_INTERVAL. @@ -163,7 +110,7 @@ def test_positive_interval_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) intervals = { @@ -177,17 +124,17 @@ def test_positive_interval_option( 'Every 3 days': '259200', } for option, value in intervals.items(): - session_sca.virtwho_configure.edit(name, {'interval': option}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'interval': option}) + results = org_session.virtwho_configure.read(name) assert results['overview']['interval'] == option deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('interval', ETC_VIRTWHO_CONFIG) == value @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -201,17 +148,17 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) # esx and rhevm support hwuuid option for value in ['uuid', 'hostname', 'hwuuid']: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @@ -219,7 +166,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.parametrize('filter_type', ['whitelist', 'blacklist']) @pytest.mark.parametrize('option_type', ['edit', 'create']) def test_positive_filtering_option( - self, module_sca_manifest_org, session_sca, form_data, filter_type, option_type + self, module_sca_manifest_org, org_session, form_data_ui, filter_type, option_type ): """Verify Filtering dropdown options. @@ -243,11 +190,11 @@ def test_positive_filtering_option( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name + form_data_ui['name'] = name regex = '.*redhat.com' - with session_sca: + with org_session: if option_type == "edit": - session_sca.virtwho_configure.create(form_data) + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) @@ -256,19 +203,21 @@ def test_positive_filtering_option( # esx support filter-host-parents and exclude-host-parents options whitelist['filtering_content.filter_host_parents'] = regex # Update Whitelist and check the result - session_sca.virtwho_configure.edit(name, whitelist) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, whitelist) + results = org_session.virtwho_configure.read(name) assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex elif filter_type == "blacklist": blacklist = {'filtering': 'Blacklist', 'filtering_content.exclude_hosts': regex} blacklist['filtering_content.exclude_host_parents'] = regex - session_sca.virtwho_configure.edit(name, blacklist) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, blacklist) + results = org_session.virtwho_configure.read(name) assert results['overview']['exclude_hosts'] == regex assert results['overview']['exclude_host_parents'] == regex deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, + form_data_ui['hypervisor_type'], + org=module_sca_manifest_org.label, ) if filter_type == "whitelist": assert regex == get_configure_option('filter_hosts', config_file) @@ -276,25 +225,25 @@ def test_positive_filtering_option( elif filter_type == "blacklist": assert regex == get_configure_option('exclude_hosts', config_file) assert regex == get_configure_option('exclude_host_parents', config_file) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) elif option_type == "create": if filter_type == "whitelist": - form_data['filtering'] = "Whitelist" - form_data['filtering_content.filter_hosts'] = regex - form_data['filtering_content.filter_host_parents'] = regex + form_data_ui['filtering'] = "Whitelist" + form_data_ui['filtering_content.filter_hosts'] = regex + form_data_ui['filtering_content.filter_host_parents'] = regex elif filter_type == "blacklist": - form_data['filtering'] = "Blacklist" - form_data['filtering_content.exclude_hosts'] = regex - form_data['filtering_content.exclude_host_parents'] = regex - session_sca.virtwho_configure.create(form_data) + form_data_ui['filtering'] = "Blacklist" + form_data_ui['filtering_content.exclude_hosts'] = regex + form_data_ui['filtering_content.exclude_host_parents'] = regex + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) config_file = get_configure_file(config_id) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) if filter_type == "whitelist": assert results['overview']['filter_hosts'] == regex assert results['overview']['filter_host_parents'] == regex @@ -308,7 +257,7 @@ def test_positive_filtering_option( @pytest.mark.tier2 def test_positive_last_checkin_status( - self, module_sca_manifest_org, virtwho_config, form_data, session_sca + self, module_sca_manifest_org, virtwho_config_ui, form_data_ui, org_session ): """Verify the Last Checkin status on Content Hosts Page. @@ -324,15 +273,15 @@ def test_positive_last_checkin_status( :CaseImportance: Medium """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name, widget_names='deploy.command') + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name, widget_names='deploy.command') command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - time_now = session_sca.browser.get_client_datetime() - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - checkin_time = session_sca.contenthost.search(hypervisor_name)[0]['Last Checkin'] + time_now = org_session.browser.get_client_datetime() + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + checkin_time = org_session.contenthost.search(hypervisor_name)[0]['Last Checkin'] # 10 mins margin to check the Last Checkin time assert ( abs( @@ -346,7 +295,7 @@ def test_positive_last_checkin_status( @pytest.mark.tier2 def test_positive_remove_env_option( - self, module_sca_manifest_org, virtwho_config, form_data, target_sat, session_sca + self, module_sca_manifest_org, virtwho_config_ui, form_data_ui, target_sat, org_session ): """remove option 'env=' from the virt-who configuration file and without any error @@ -364,13 +313,13 @@ def test_positive_remove_env_option( :customerscenario: true """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' # Check the option "env=" should be removed from etc/virt-who.d/virt-who.conf option = "env" config_id = get_configure_id(name) @@ -388,7 +337,7 @@ def test_positive_remove_env_option( assert result.status == 1 @pytest.mark.tier2 - def test_positive_virtwho_roles(self, session_sca): + def test_positive_virtwho_roles(self, org_session): """Verify the default roles for virtwho configure :id: 3c2501d5-c122-49f0-baa4-4c0d678cb6fc @@ -416,14 +365,14 @@ def test_positive_virtwho_roles(self, session_sca): }, 'Virt-who Viewer': {'Satellite virt who configure/config': ['view_virt_who_config']}, } - with session_sca: + with org_session: for role_name, role_filters in roles.items(): - assert session_sca.role.search(role_name)[0]['Name'] == role_name - assigned_permissions = session_sca.filter.read_permissions(role_name) + assert org_session.role.search(role_name)[0]['Name'] == role_name + assigned_permissions = org_session.filter.read_permissions(role_name) assert sorted(assigned_permissions) == sorted(role_filters) @pytest.mark.tier2 - def test_positive_delete_configure(self, module_sca_manifest_org, session_sca, form_data): + def test_positive_delete_configure(self, module_sca_manifest_org, org_session, form_data_ui): """Verify when a config is deleted the associated user is deleted. :id: efc7253d-f455-4dc3-ae03-3ed5e215bd11 @@ -442,23 +391,23 @@ def test_positive_delete_configure(self, module_sca_manifest_org, session_sca, f :CaseImportance: Low """ name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) + form_data_ui['name'] = name + with org_session: + org_session.virtwho_configure.create(form_data_ui) config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) restart_virtwho_service() assert get_virtwho_status() == 'logerror' @pytest.mark.tier2 def test_positive_virtwho_reporter_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who reporter role can TRULY work. @@ -476,9 +425,9 @@ def test_positive_virtwho_reporter_role( username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -488,14 +437,14 @@ def test_positive_virtwho_reporter_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Update the virt-who config file config_id = get_configure_id(config_name) config_file = get_configure_file(config_id) @@ -505,20 +454,20 @@ def test_positive_virtwho_reporter_role( restart_virtwho_service() assert get_virtwho_status() == 'logerror' # Check the permissioin of Virt-who Reporter - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) - assert session_sca.user.search(username)[0]['Username'] == username - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Reporter']}) + assert org_session.user.search(username)[0]['Username'] == username + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Reporter'] restart_virtwho_service() assert get_virtwho_status() == 'running' with Session(test_name, username, password) as newsession: assert not newsession.virtwho_configure.check_create_permission()['can_view'] - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_virtwho_viewer_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who viewer role can TRULY work. @@ -536,9 +485,9 @@ def test_positive_virtwho_viewer_role( username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -548,17 +497,17 @@ def test_positive_virtwho_viewer_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Viewer - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Viewer']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Viewer'] # Update the virt-who config file config_id = get_configure_id(config_name) @@ -579,12 +528,12 @@ def test_positive_virtwho_viewer_role( assert not update_permission['can_edit'] newsession.virtwho_configure.read(config_name) # Delete the created user - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_virtwho_manager_role( - self, module_sca_manifest_org, session_sca, test_name, form_data + self, module_sca_manifest_org, org_session, test_name, form_data_ui ): """Verify the virt-who manager role can TRULY work. @@ -600,9 +549,9 @@ def test_positive_virtwho_manager_role( username = gen_string('alpha') password = gen_string('alpha') config_name = gen_string('alpha') - with session_sca: + with org_session: # Create an user - session_sca.user.create( + org_session.user.create( { 'user.login': username, 'user.mail': valid_emails_list()[0], @@ -612,28 +561,28 @@ def test_positive_virtwho_manager_role( } ) # Create a virt-who config plugin - form_data['name'] = config_name - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(config_name) + form_data_ui['name'] = config_name + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(config_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) - assert session_sca.virtwho_configure.search(config_name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(config_name)[0]['Status'] == 'ok' # Check the permissioin of Virt-who Manager - session_sca.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) - user = session_sca.user.read(username) + org_session.user.update(username, {'roles.resources.assigned': ['Virt-who Manager']}) + user = org_session.user.read(username) assert user['roles']['resources']['assigned'] == ['Virt-who Manager'] with Session(test_name, username, password) as newsession: # create_virt_who_config new_virt_who_name = gen_string('alpha') - form_data['name'] = new_virt_who_name - newsession.virtwho_configure.create(form_data) + form_data_ui['name'] = new_virt_who_name + newsession.virtwho_configure.create(form_data_ui) # view_virt_who_config values = newsession.virtwho_configure.read(new_virt_who_name) command = values['deploy']['command'] deploy_configure_by_command( - command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert newsession.virtwho_configure.search(new_virt_who_name)[0]['Status'] == 'ok' # edit_virt_who_config @@ -644,12 +593,12 @@ def test_positive_virtwho_manager_role( newsession.virtwho_configure.delete(modify_name) assert not newsession.virtwho_configure.search(modify_name) # Delete the created user - session_sca.user.delete(username) - assert not session_sca.user.search(username) + org_session.user.delete(username) + assert not org_session.user.search(username) @pytest.mark.tier2 def test_positive_deploy_configure_hypervisor_password_with_special_characters( - self, module_sca_manifest_org, form_data, target_sat, session_sca + self, module_sca_manifest_org, form_data_ui, target_sat, org_session ): """Verify " hammer virt-who-config deploy hypervisor with special characters" @@ -666,12 +615,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( :customerscenario: true """ name = gen_string('alpha') - form_data['name'] = name - with session_sca: + form_data_ui['name'] = name + with org_session: # check the hypervisor password contains single quotes - form_data['hypervisor_content.password'] = "Tes't" - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "Tes't" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -682,12 +631,12 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) # check the hypervisor password contains backtick - form_data['hypervisor_content.password'] = "my`password" - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['hypervisor_content.password'] = "my`password" + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_id = get_configure_id(name) deploy_status = deploy_configure_by_command_check(command) @@ -698,5 +647,5 @@ def test_positive_deploy_configure_hypervisor_password_with_special_characters( get_configure_option('username', config_file) == settings.virtwho.esx.hypervisor_username ) - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) diff --git a/tests/foreman/virtwho/ui/test_hyperv.py b/tests/foreman/virtwho/ui/test_hyperv.py index 56c2bbdb476..a35878e2fc2 100644 --- a/tests/foreman/virtwho/ui/test_hyperv.py +++ b/tests/foreman/virtwho/ui/test_hyperv.py @@ -16,13 +16,11 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -30,36 +28,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.hyperv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - class TestVirtwhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, virtwho_config, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -76,37 +49,30 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: f2efc018-d57e-4dc5-895e-53af320237de @@ -119,16 +85,16 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_hyperv_sca.py b/tests/foreman/virtwho/ui/test_hyperv_sca.py index d58d2c15ebd..3e7b0f01c5e 100644 --- a/tests/foreman/virtwho/ui/test_hyperv_sca.py +++ b/tests/foreman/virtwho/ui/test_hyperv_sca.py @@ -14,13 +14,10 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -28,36 +25,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.hyperv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.hyperv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.hyperv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.hyperv.hypervisor_password, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - class TestVirtwhoConfigforHyperv: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -74,29 +46,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -110,16 +64,16 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_kubevirt.py b/tests/foreman/virtwho/ui/test_kubevirt.py index 289e4f385f1..19d5bce7f63 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt.py +++ b/tests/foreman/virtwho/ui/test_kubevirt.py @@ -16,13 +16,11 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -30,34 +28,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'hypervisor_content.kubeconfig': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - class TestVirtwhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, virtwho_config, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -74,37 +49,30 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: 09826cc0-aa49-4355-8980-8097511eb7d7 @@ -117,16 +85,16 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_kubevirt_sca.py b/tests/foreman/virtwho/ui/test_kubevirt_sca.py index c4b893947e7..6ad0bdf1f96 100644 --- a/tests/foreman/virtwho/ui/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/ui/test_kubevirt_sca.py @@ -14,13 +14,10 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -28,34 +25,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.kubevirt.hypervisor_type, - 'hypervisor_content.kubeconfig': settings.virtwho.kubevirt.hypervisor_config_file, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - class TestVirtwhoConfigforKubevirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -72,29 +46,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -108,16 +64,16 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_libvirt.py b/tests/foreman/virtwho/ui/test_libvirt.py index 6c6b37fdc30..86b3d3e6532 100644 --- a/tests/foreman/virtwho/ui/test_libvirt.py +++ b/tests/foreman/virtwho/ui/test_libvirt.py @@ -16,13 +16,11 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -30,35 +28,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.libvirt.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - class TestVirtwhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, virtwho_config, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -75,37 +49,30 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: b8b2b272-89f2-45d0-b922-6e988b20808b @@ -118,16 +85,16 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_libvirt_sca.py b/tests/foreman/virtwho/ui/test_libvirt_sca.py index 415ff38a37b..b6d33669744 100644 --- a/tests/foreman/virtwho/ui/test_libvirt_sca.py +++ b/tests/foreman/virtwho/ui/test_libvirt_sca.py @@ -14,13 +14,10 @@ :Upstream: No """ -from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( deploy_configure_by_command, - deploy_configure_by_script, get_configure_command, get_configure_file, get_configure_id, @@ -28,35 +25,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.libvirt.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.libvirt.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.libvirt.hypervisor_username, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - class TestVirtwhoConfigforLibvirt: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -73,29 +46,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -109,16 +64,16 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('hypervisor_id', config_file) == value diff --git a/tests/foreman/virtwho/ui/test_nutanix.py b/tests/foreman/virtwho/ui/test_nutanix.py index d2a77961918..8bd1a3b23c7 100644 --- a/tests/foreman/virtwho/ui/test_nutanix.py +++ b/tests/foreman/virtwho/ui/test_nutanix.py @@ -32,38 +32,11 @@ ) -@pytest.fixture() -def form_data(): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.ahv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.ahv.hypervisor_password, - 'hypervisor_content.prism_flavor': "Prism Element", - 'ahv_internal_debug': False, - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session): - name = gen_string('alpha') - form_data['name'] = name - with session: - session.virtwho_configure.create(form_data) - yield virtwho_config - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) - - class TestVirtwhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, default_org, virtwho_config, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -80,37 +53,30 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - elif deploy_type == "script": - script = values['deploy']['script'] - hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label - ) - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + hypervisor_name, guest_name = deploy_type_ui + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + 'status' + ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) + assert org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' @pytest.mark.tier2 - def test_positive_hypervisor_id_option(self, default_org, virtwho_config, session, form_data): + def test_positive_hypervisor_id_option( + self, default_org, virtwho_config_ui, org_session, form_data_ui + ): """Verify Hypervisor ID dropdown options. :id: e076a305-88f4-42fb-8ef2-cb55e38eb912 @@ -123,25 +89,25 @@ def test_positive_hypervisor_id_option(self, default_org, virtwho_config, sessio :CaseImportance: Medium """ - name = form_data['name'] - values = session.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) config_id = get_configure_id(name) config_command = values['deploy']['command'] config_file = get_configure_file(config_id) values = ['uuid', 'hostname'] for value in values: - session.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('hypervisor_id', config_file) == value @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, default_org, session, form_data, deploy_type + self, default_org, org_session, form_data_ui, deploy_type ): """Verify configure created and deployed with id on nutanix prism central mode @@ -160,49 +126,51 @@ def test_positive_prism_central_deploy_configure_by_id_script( :CaseImportance: High """ name = gen_string('alpha') - form_data['name'] = name - form_data['hypervisor_content.prism_flavor'] = "Prism Central" - with session: - session.virtwho_configure.create(form_data) - values = session.virtwho_configure.read(name) + form_data_ui['name'] = name + form_data_ui['hypervisor_content.prism_flavor'] = "Prism Central" + with org_session: + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) if deploy_type == "id": command = values['deploy']['command'] hypervisor_name, guest_name = deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) elif deploy_type == "script": script = values['deploy']['script'] hypervisor_name, guest_name = deploy_configure_by_script( - script, form_data['hypervisor_type'], debug=True, org=default_org.label + script, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) # Check the option "prism_central=true" should be set in etc/virt-who.d/virt-who.conf config_id = get_configure_id(name) config_file = get_configure_file(config_id) assert get_configure_option("prism_central", config_file) == 'true' - assert session.virtwho_configure.search(name)[0]['Status'] == 'ok' - hypervisor_display_name = session.contenthost.search(hypervisor_name)[0]['Name'] + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + hypervisor_display_name = org_session.contenthost.search(hypervisor_name)[0]['Name'] vdc_physical = f'product_id = {settings.virtwho.sku.vdc_physical} and type=NORMAL' vdc_virtual = f'product_id = {settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED' assert ( - session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ + org_session.contenthost.read_legacy_ui(hypervisor_display_name)['subscriptions'][ 'status' ] == 'Unsubscribed hypervisor' ) - session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) - assert session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + org_session.contenthost.add_subscription(hypervisor_display_name, vdc_physical) assert ( - session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] + org_session.contenthost.search(hypervisor_name)[0]['Subscription Status'] == 'green' + ) + assert ( + org_session.contenthost.read_legacy_ui(guest_name)['subscriptions']['status'] == 'Unentitled' ) - session.contenthost.add_subscription(guest_name, vdc_virtual) - assert session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' - session.virtwho_configure.delete(name) - assert not session.virtwho_configure.search(name) + org_session.contenthost.add_subscription(guest_name, vdc_virtual) + assert org_session.contenthost.search(guest_name)[0]['Subscription Status'] == 'green' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_prism_central_prism_flavor_option( - self, default_org, virtwho_config, session, form_data + self, default_org, virtwho_config_ui, org_session, form_data_ui ): """Verify prism_flavor dropdown options. @@ -216,23 +184,25 @@ def test_positive_prism_central_prism_flavor_option( :CaseImportance: Medium """ - name = form_data['name'] - results = session.virtwho_configure.read(name) + name = form_data_ui['name'] + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "element" config_id = get_configure_id(name) config_command = get_configure_command(config_id, default_org.name) config_file = get_configure_file(config_id) - session.virtwho_configure.edit(name, {'hypervisor_content.prism_flavor': "Prism Central"}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit( + name, {'hypervisor_content.prism_flavor': "Prism Central"} + ) + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "central" deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=default_org.label + config_command, form_data_ui['hypervisor_type'], org=default_org.label ) assert get_configure_option('prism_central', config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, default_org, virtwho_config, session, form_data + self, default_org, virtwho_config_ui, org_session, form_data_ui ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -253,15 +223,15 @@ def test_positive_ahv_internal_debug_option( :BZ: 2141719 :customerscenario: true """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) - values = session.virtwho_configure.read(name) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label ) - results = session.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert str(results['overview']['ahv_internal_debug']) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' @@ -275,14 +245,16 @@ def test_positive_ahv_internal_debug_option( assert check_message_in_rhsm_log(message) == message # Update ahv_internal_debug option to true - session.virtwho_configure.edit(name, {'ahv_internal_debug': True}) - results = session.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'ahv_internal_debug': True}) + results = org_session.virtwho_configure.read(name) command = results['deploy']['command'] assert str(results['overview']['ahv_internal_debug']) == 'True' deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=default_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=default_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_ui['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' # ahv_internal_debug bas been set to true in virt-who-config-X.conf config_file = get_configure_file(config_id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' diff --git a/tests/foreman/virtwho/ui/test_nutanix_sca.py b/tests/foreman/virtwho/ui/test_nutanix_sca.py index 3b53d038d71..42de668055e 100644 --- a/tests/foreman/virtwho/ui/test_nutanix_sca.py +++ b/tests/foreman/virtwho/ui/test_nutanix_sca.py @@ -17,7 +17,6 @@ from fauxfactory import gen_string import pytest -from robottelo.config import settings from robottelo.utils.virtwho import ( check_message_in_rhsm_log, deploy_configure_by_command, @@ -30,37 +29,11 @@ ) -@pytest.fixture() -def form_data(target_sat, module_sca_manifest_org): - form = { - 'debug': True, - 'interval': 'Every hour', - 'hypervisor_id': 'hostname', - 'hypervisor_type': settings.virtwho.ahv.hypervisor_type, - 'hypervisor_content.server': settings.virtwho.ahv.hypervisor_server, - 'hypervisor_content.username': settings.virtwho.ahv.hypervisor_username, - 'hypervisor_content.password': settings.virtwho.ahv.hypervisor_password, - 'hypervisor_content.prism_flavor': "Prism Element", - } - return form - - -@pytest.fixture() -def virtwho_config(form_data, target_sat, session_sca): - name = gen_string('alpha') - form_data['name'] = name - with session_sca: - session_sca.virtwho_configure.create(form_data) - yield virtwho_config - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) - - class TestVirtwhoConfigforNutanix: @pytest.mark.tier2 - @pytest.mark.parametrize('deploy_type', ['id', 'script']) + @pytest.mark.parametrize('deploy_type_ui', ['id', 'script'], indirect=True) def test_positive_deploy_configure_by_id_script( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type_ui ): """Verify configure created and deployed with id. @@ -76,29 +49,11 @@ def test_positive_deploy_configure_by_id_script( :CaseImportance: High """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) - if deploy_type == "id": - command = values['deploy']['command'] - deploy_configure_by_command( - command, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - elif deploy_type == "script": - script = values['deploy']['script'] - deploy_configure_by_script( - script, - form_data['hypervisor_type'], - debug=True, - org=module_sca_manifest_org.label, - ) - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' + assert org_session.virtwho_configure.search(form_data_ui['name'])[0]['Status'] == 'ok' @pytest.mark.tier2 def test_positive_hypervisor_id_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify Hypervisor ID dropdown options. @@ -112,18 +67,18 @@ def test_positive_hypervisor_id_option( :CaseImportance: Medium """ - name = form_data['name'] - values = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + values = org_session.virtwho_configure.read(name) config_id = get_configure_id(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) for value in ['uuid', 'hostname']: - session_sca.virtwho_configure.edit(name, {'hypervisor_id': value}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'hypervisor_id': value}) + results = org_session.virtwho_configure.read(name) assert results['overview']['hypervisor_id'] == value deploy_configure_by_command( command, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -132,7 +87,7 @@ def test_positive_hypervisor_id_option( @pytest.mark.tier2 @pytest.mark.parametrize('deploy_type', ['id', 'script']) def test_positive_prism_central_deploy_configure_by_id_script( - self, module_sca_manifest_org, session_sca, form_data, deploy_type + self, module_sca_manifest_org, org_session, form_data_ui, deploy_type ): """Verify configure created and deployed with id on nutanix prism central mode @@ -151,16 +106,16 @@ def test_positive_prism_central_deploy_configure_by_id_script( :CaseImportance: High """ name = gen_string('alpha') - form_data['name'] = name - form_data['hypervisor_content.prism_flavor'] = "Prism Central" - with session_sca: - session_sca.virtwho_configure.create(form_data) - values = session_sca.virtwho_configure.read(name) + form_data_ui['name'] = name + form_data_ui['hypervisor_content.prism_flavor'] = "Prism Central" + with org_session: + org_session.virtwho_configure.create(form_data_ui) + values = org_session.virtwho_configure.read(name) if deploy_type == "id": command = values['deploy']['command'] deploy_configure_by_command( command, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -168,7 +123,7 @@ def test_positive_prism_central_deploy_configure_by_id_script( script = values['deploy']['script'] deploy_configure_by_script( script, - form_data['hypervisor_type'], + form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label, ) @@ -176,13 +131,13 @@ def test_positive_prism_central_deploy_configure_by_id_script( config_id = get_configure_id(name) config_file = get_configure_file(config_id) assert get_configure_option("prism_central", config_file) == 'true' - assert session_sca.virtwho_configure.search(name)[0]['Status'] == 'ok' - session_sca.virtwho_configure.delete(name) - assert not session_sca.virtwho_configure.search(name) + assert org_session.virtwho_configure.search(name)[0]['Status'] == 'ok' + org_session.virtwho_configure.delete(name) + assert not org_session.virtwho_configure.search(name) @pytest.mark.tier2 def test_positive_prism_central_prism_flavor_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify prism_flavor dropdown options. @@ -196,25 +151,25 @@ def test_positive_prism_central_prism_flavor_option( :CaseImportance: Medium """ - name = form_data['name'] - results = session_sca.virtwho_configure.read(name) + name = form_data_ui['name'] + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "element" config_id = get_configure_id(name) config_command = get_configure_command(config_id, module_sca_manifest_org.name) config_file = get_configure_file(config_id) - session_sca.virtwho_configure.edit( + org_session.virtwho_configure.edit( name, {'hypervisor_content.prism_flavor': "Prism Central"} ) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert results['overview']['prism_flavor'] == "central" deploy_configure_by_command( - config_command, form_data['hypervisor_type'], org=module_sca_manifest_org.label + config_command, form_data_ui['hypervisor_type'], org=module_sca_manifest_org.label ) assert get_configure_option('prism_central', config_file) == 'true' @pytest.mark.tier2 def test_positive_ahv_internal_debug_option( - self, module_sca_manifest_org, virtwho_config, session_sca, form_data + self, module_sca_manifest_org, virtwho_config_ui, org_session, form_data_ui ): """Verify ahv_internal_debug option by hammer virt-who-config" @@ -237,15 +192,15 @@ def test_positive_ahv_internal_debug_option( :customerscenario: true """ - name = form_data['name'] + name = form_data_ui['name'] config_id = get_configure_id(name) - values = session_sca.virtwho_configure.read(name) + values = org_session.virtwho_configure.read(name) command = values['deploy']['command'] config_file = get_configure_file(config_id) deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label ) - results = session_sca.virtwho_configure.read(name) + results = org_session.virtwho_configure.read(name) assert str(results['overview']['ahv_internal_debug']) == 'False' # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' @@ -259,14 +214,16 @@ def test_positive_ahv_internal_debug_option( assert check_message_in_rhsm_log(message) == message # Update ahv_internal_debug option to true - session_sca.virtwho_configure.edit(name, {'ahv_internal_debug': True}) - results = session_sca.virtwho_configure.read(name) + org_session.virtwho_configure.edit(name, {'ahv_internal_debug': True}) + results = org_session.virtwho_configure.read(name) command = results['deploy']['command'] assert str(results['overview']['ahv_internal_debug']) == 'True' deploy_configure_by_command( - command, form_data['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + command, form_data_ui['hypervisor_type'], debug=True, org=module_sca_manifest_org.label + ) + assert ( + get_hypervisor_ahv_mapping(form_data_ui['hypervisor_type']) == 'Host UUID found for VM' ) - assert get_hypervisor_ahv_mapping(form_data['hypervisor_type']) == 'Host UUID found for VM' # ahv_internal_debug bas been set to true in virt-who-config-X.conf config_file = get_configure_file(config_id) assert get_configure_option("ahv_internal_debug", config_file) == 'true' From ff06a9dcbbd2a738b4a7f524a78d3227b146f664 Mon Sep 17 00:00:00 2001 From: Adarsh dubey Date: Wed, 11 Oct 2023 16:35:26 +0530 Subject: [PATCH 12/96] Coverage for autoprovision and reboot of discovered host (#12815) * Coverage for autoprovision and reboot of discovered host * Added one more test case for autoprovision of multiple discoveredhosts * Making a discovery component helper module --- conftest.py | 1 + pytest_fixtures/component/discovery.py | 31 +++++++++++ tests/foreman/api/test_discoveredhost.py | 71 +++++++++++++++++++----- 3 files changed, 89 insertions(+), 14 deletions(-) create mode 100644 pytest_fixtures/component/discovery.py diff --git a/conftest.py b/conftest.py index cf2522051ca..632f6e2a0e4 100644 --- a/conftest.py +++ b/conftest.py @@ -38,6 +38,7 @@ 'pytest_fixtures.component.computeprofile', 'pytest_fixtures.component.contentview', 'pytest_fixtures.component.domain', + 'pytest_fixtures.component.discovery', 'pytest_fixtures.component.host', 'pytest_fixtures.component.hostgroup', 'pytest_fixtures.component.http_proxy', diff --git a/pytest_fixtures/component/discovery.py b/pytest_fixtures/component/discovery.py new file mode 100644 index 00000000000..e05b3859f2f --- /dev/null +++ b/pytest_fixtures/component/discovery.py @@ -0,0 +1,31 @@ +from fauxfactory import gen_string +import pytest + + +@pytest.fixture(scope='module') +def module_discovery_hostgroup(module_org, module_location, module_target_sat): + host = module_target_sat.api.Host(organization=module_org, location=module_location).create() + return module_target_sat.api.HostGroup( + organization=[module_org], + location=[module_location], + medium=host.medium, + root_pass=gen_string('alpha'), + operatingsystem=host.operatingsystem, + ptable=host.ptable, + domain=host.domain, + architecture=host.architecture, + ).create() + + +@pytest.fixture(scope='module') +def discovery_org(module_org, module_target_sat): + discovery_org = module_target_sat.update_setting('discovery_organization', module_org.name) + yield module_org + module_target_sat.update_setting('discovery_organization', discovery_org) + + +@pytest.fixture(scope='module') +def discovery_location(module_location, module_target_sat): + discovery_loc = module_target_sat.update_setting('discovery_location', module_location.name) + yield module_location + module_target_sat.update_setting('discovery_location', discovery_loc) diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index 9b4a5ae5e22..773175812cf 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -247,8 +247,6 @@ def test_positive_provision_pxe_less_host( :expectedresults: Host should be provisioned successfully - :CaseAutomation: NotAutomated - :CaseImportance: Critical """ sat = module_discovery_sat.sat @@ -274,9 +272,10 @@ def test_positive_provision_pxe_less_host( assert not sat.api.Host().search(query={"search": f'name={host.name}'}) pxeless_discovery_host.blank = True - @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_auto_provision_pxe_host(self): + def test_positive_auto_provision_pxe_host( + self, module_discovery_hostgroup, module_target_sat, discovery_org, discovery_location + ): """Auto provision a pxe-based host by executing discovery rules :id: c93fd7c9-41ef-4eb5-8042-f72e87e67e10 @@ -290,14 +289,24 @@ def test_positive_auto_provision_pxe_host(self): :expectedresults: Selected Host should be auto-provisioned successfully - :CaseAutomation: Automated - :CaseImportance: Critical """ + discovered_host = module_target_sat.api_factory.create_discovered_host() + + rule = module_target_sat.api.DiscoveryRule( + max_count=1, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + ).create() + result = module_target_sat.api.DiscoveredHost(id=discovered_host['id']).auto_provision() + assert f'provisioned with rule {rule.name}' in result['message'] - @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_auto_provision_all(self): + def test_positive_auto_provision_all( + self, module_discovery_hostgroup, module_target_sat, discovery_org, discovery_location + ): """Auto provision all host by executing discovery rules :id: 954d3688-62d9-47f7-9106-a4fff8825ffa @@ -314,6 +323,19 @@ def test_positive_auto_provision_all(self): :CaseImportance: High """ + module_target_sat.api.DiscoveryRule( + max_count=25, + hostgroup=module_discovery_hostgroup, + search_=f'location = "{discovery_location.name}"', + location=[discovery_location], + organization=[discovery_org], + ).create() + + for _ in range(2): + module_target_sat.api_factory.create_discovered_host() + + result = module_target_sat.api.DiscoveredHost().auto_provision_all() + assert '2 discovered hosts were provisioned' in result['message'] @pytest.mark.stubbed @pytest.mark.tier3 @@ -337,9 +359,19 @@ def test_positive_refresh_facts_pxe_host(self): :CaseImportance: High """ - @pytest.mark.stubbed + @pytest.mark.on_premises_provisioning + @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) + @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) + @pytest.mark.rhel_ver_match('9') @pytest.mark.tier3 - def test_positive_reboot_pxe_host(self): + def test_positive_reboot_pxe_host( + self, + module_provisioning_rhel_content, + module_discovery_sat, + provisioning_host, + provisioning_hostgroup, + pxe_loader, + ): """Rebooting a pxe based discovered host :id: 69c807f8-5646-4aa6-8b3c-5ecab69560fc @@ -352,10 +384,23 @@ def test_positive_reboot_pxe_host(self): :expectedresults: Selected host should be rebooted successfully - :CaseAutomation: Automated - :CaseImportance: Medium """ + sat = module_discovery_sat.sat + provisioning_host.power_control(ensure=False) + mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=240, + delay=20, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + result = sat.api.DiscoveredHost(id=discovered_host.id).reboot() + assert 'Unable to perform reboot' not in result @pytest.mark.stubbed @pytest.mark.tier3 @@ -381,8 +426,6 @@ def test_positive_reboot_all_pxe_hosts(self): class TestFakeDiscoveryTests: """Tests that use fake discovered host. - :CaseAutomation: Automated - :CaseImportance: High """ From 2799a3f57a296c98005f3237a81614dc1203cfa5 Mon Sep 17 00:00:00 2001 From: David Moore <109112035+damoore044@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:05:17 -0400 Subject: [PATCH 13/96] Update to errata-stream fix, use up to date register method for rhel_contenthost (#12851) * Update to use .register() method, the preferred way without katello-agent * Addressing comments * subscription-manager repos call, local pass --- robottelo/host_helpers/contenthost_mixins.py | 1 + tests/foreman/api/test_errata.py | 36 +++++++++----------- 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/robottelo/host_helpers/contenthost_mixins.py b/robottelo/host_helpers/contenthost_mixins.py index f009503666a..74566b0ffb0 100644 --- a/robottelo/host_helpers/contenthost_mixins.py +++ b/robottelo/host_helpers/contenthost_mixins.py @@ -154,6 +154,7 @@ class HostInfo: @property def applicable_errata_count(self): """return the applicable errata count for a host""" + self.run('subscription-manager repos') return self.nailgun_host.read().content_facet_attributes['errata_counts']['total'] diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index ba64ce1c9a0..8c6383a4c7a 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -17,7 +17,6 @@ :Upstream: No """ # For ease of use hc refers to host-collection throughout this document -from copy import copy from time import sleep from nailgun import entities @@ -159,12 +158,10 @@ def test_positive_install_in_hc(module_org, activation_key, custom_repo, target_ @pytest.mark.tier3 -@pytest.mark.rhel_ver_match(r'^(?!6$)\d+$') +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers @pytest.mark.e2e -def test_positive_install_multiple_in_host( - function_org, rhel_contenthost, target_sat, function_lce -): +def test_positive_install_multiple_in_host(target_sat, rhel_contenthost, module_org, module_lce): """For a host with multiple applicable errata install one and ensure the rest of errata is still available @@ -185,35 +182,34 @@ def test_positive_install_multiple_in_host( :CaseLevel: System """ ak = target_sat.api.ActivationKey( - organization=function_org, - environment=function_lce, + organization=module_org, + environment=module_lce, ).create() - cv = target_sat.api.ContentView(organization=function_org).create() - # Associate custom repos with org, cv, lce, ak: + # Associate custom repos with org, lce, ak: target_sat.cli_factory.setup_org_for_a_custom_repo( { 'url': settings.repos.yum_9.url, - 'organization-id': function_org.id, - 'content-view-id': cv.id, - 'lifecycle-environment-id': function_lce.id, + 'organization-id': module_org.id, + 'lifecycle-environment-id': module_lce.id, 'activationkey-id': ak.id, } ) - # Install katello-ca, register content-host, enable all repos: - rhel_contenthost.install_katello_ca(target_sat) - rhel_contenthost.register_contenthost(function_org.name, ak.name, function_lce.name) + rhel_contenthost.register( + activation_keys=ak.name, + target=target_sat, + org=module_org, + loc=None, + ) assert rhel_contenthost.subscribed # Installing outdated custom packages: for package in constants.FAKE_9_YUM_OUTDATED_PACKAGES: rhel_contenthost.run(f'yum remove -y {str(package.split("-", 1)[0])}') assert rhel_contenthost.run(f'yum install -y {package}').status == 0 assert rhel_contenthost.run(f'rpm -q {package}').status == 0 - rhel_contenthost.add_rex_key(satellite=target_sat) - rhel_contenthost.run(r'subscription-manager repos --enable \*') # Each errata will be installed sequentially, # after each install, applicable-errata-count should drop by one. for errata in constants.FAKE_9_YUM_SECURITY_ERRATUM: - pre_errata_count = copy(rhel_contenthost.applicable_errata_count) + pre_errata_count = rhel_contenthost.applicable_errata_count assert pre_errata_count >= 1 task_id = target_sat.api.JobInvocation().run( data={ @@ -221,7 +217,7 @@ def test_positive_install_multiple_in_host( 'inputs': {'errata': str(errata)}, 'targeting_type': 'static_query', 'search_query': f'name = {rhel_contenthost.hostname}', - 'organization_id': function_org.id, + 'organization_id': module_org.id, }, )['id'] target_sat.wait_for_tasks( @@ -229,7 +225,7 @@ def test_positive_install_multiple_in_host( search_rate=20, max_tries=15, ) - sleep(10) + sleep(20) assert rhel_contenthost.applicable_errata_count == pre_errata_count - 1 From cf98270ecb2e99a6665b7ad97346d756c0c63be5 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Wed, 11 Oct 2023 10:06:37 -0400 Subject: [PATCH 14/96] [Repository Rewrite] Upgrade test for large repo sync (#12710) * fixed failing ak upgrade test * added test for large repo sync after upgrade * added class to large repo sync tests * removed extra test * updated org_id * added customerscenario to docstring --- tests/upgrades/test_repository.py | 79 ++++++++++++++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/tests/upgrades/test_repository.py b/tests/upgrades/test_repository.py index 2119fa57816..998db385ad4 100644 --- a/tests/upgrades/test_repository.py +++ b/tests/upgrades/test_repository.py @@ -19,7 +19,12 @@ import pytest from robottelo.config import settings -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME +from robottelo.constants import ( + DEFAULT_ARCHITECTURE, + FAKE_0_CUSTOM_PACKAGE_NAME, + FAKE_4_CUSTOM_PACKAGE_NAME, + REPOS, +) from robottelo.hosts import ContentHost UPSTREAM_USERNAME = 'rTtest123' @@ -299,3 +304,75 @@ def test_post_scenario_custom_repo_sca_toggle(self, pre_upgrade_data): result = rhel_client.execute('subscription-manager repo-override --list') assert 'enabled: 1' in result.stdout assert f'{org_name}_{product_name}_{repo_name}' in result.stdout + + +class TestScenarioLargeRepoSyncCheck: + """Scenario test to verify that large repositories can be synced without + failure after an upgrade. + + Test Steps: + + 1. Before Satellite upgrade. + 2. Enable and sync large RH repository. + 3. Upgrade Satellite. + 4. Enable and sync a second large repository. + + BZ: 2043144 + + :customerscenario: true + """ + + @pytest.mark.pre_upgrade + def test_pre_scenario_sync_large_repo( + self, target_sat, module_entitlement_manifest_org, save_test_data + ): + """This is a pre-upgrade scenario to verify that users can sync large repositories + before an upgrade + + :id: afb957dc-c509-4009-ac85-4b71b64d3c74 + + :steps: + 1. Enable a large redhat repository + 2. Sync repository and assert sync succeeds + + :expectedresults: Large Repositories should succeed when synced + """ + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=REPOS['rhel8_bos']['product'], + repo=REPOS['rhel8_bos']['name'], + reposet=REPOS['rhel8_bos']['reposet'], + releasever=REPOS['rhel8_bos']['releasever'], + ) + repo = target_sat.api.Repository(id=rh_repo_id).read() + res = repo.sync(timeout=2000) + assert res['result'] == 'success' + save_test_data({'org_id': module_entitlement_manifest_org.id}) + + @pytest.mark.post_upgrade(depend_on=test_pre_scenario_sync_large_repo) + def test_post_scenario_sync_large_repo(self, target_sat, pre_upgrade_data): + """This is a post-upgrade scenario to verify that large repositories can be + synced after an upgrade + + :id: 7bdbb2ac-7197-4e1a-8163-5852943eb49b + + :steps: + 1. Sync large repository + 2. Upgrade satellite + 3. Sync a second large repository in that same organization + + :expectedresults: Large repositories should succeed after an upgrade + """ + org_id = pre_upgrade_data.get('org_id') + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=org_id, + product=REPOS['rhel8_aps']['product'], + repo=REPOS['rhel8_aps']['name'], + reposet=REPOS['rhel8_aps']['reposet'], + releasever=REPOS['rhel8_aps']['releasever'], + ) + repo = target_sat.api.Repository(id=rh_repo_id).read() + res = repo.sync(timeout=4000) + assert res['result'] == 'success' From 2211271e46c2dbeb5694177cc452d19dcbcf5256 Mon Sep 17 00:00:00 2001 From: Shubham Ganar <67952129+shubhamsg199@users.noreply.github.com> Date: Wed, 11 Oct 2023 23:25:24 +0530 Subject: [PATCH 15/96] Fix VMware tests (#12883) --- robottelo/constants/__init__.py | 2 +- tests/foreman/cli/test_computeresource_vmware.py | 2 +- tests/foreman/ui/test_computeresource_vmware.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index 01acc11b653..e7d2548758b 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -1726,7 +1726,7 @@ class Colored(Box): VMWARE_CONSTANTS = { 'folder': 'vm', - 'guest_os': 'Red Hat Enterprise Linux 8 (64-bit)', + 'guest_os': 'Red Hat Enterprise Linux 8 (64 bit)', 'scsicontroller': 'LSI Logic Parallel', 'virtualhw_version': 'Default', 'pool': 'Resources', diff --git a/tests/foreman/cli/test_computeresource_vmware.py b/tests/foreman/cli/test_computeresource_vmware.py index 84ebc98af7c..3606b0a077d 100644 --- a/tests/foreman/cli/test_computeresource_vmware.py +++ b/tests/foreman/cli/test_computeresource_vmware.py @@ -56,7 +56,7 @@ def test_positive_vmware_cr_end_to_end(target_sat, module_org, module_location): ) assert vmware_cr['name'] == cr_name assert vmware_cr['locations'][0] == module_location.name - assert vmware_cr['organizations'] == module_org.name + assert vmware_cr['organizations'][0] == module_org.name assert vmware_cr['server'] == settings.vmware.vcenter assert vmware_cr['datacenter'] == settings.vmware.datacenter # List diff --git a/tests/foreman/ui/test_computeresource_vmware.py b/tests/foreman/ui/test_computeresource_vmware.py index 1f93bbe673a..34d0127e983 100644 --- a/tests/foreman/ui/test_computeresource_vmware.py +++ b/tests/foreman/ui/test_computeresource_vmware.py @@ -557,6 +557,7 @@ def test_positive_virt_card(session, target_sat, module_location, module_org): settings.vmware.vm_name, hostgroup_name, module_location.name, + name=settings.vmware.vm_name, ) host_name = '.'.join([settings.vmware.vm_name, domain.name]) power_status = session.computeresource.vm_status(cr_name, settings.vmware.vm_name) From 4352c9bfb3843fd6ea77ca19ae2953b96fa5e4c0 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Thu, 12 Oct 2023 12:30:07 +0530 Subject: [PATCH 16/96] Validate testimony for upgrade tests (#12878) Signed-off-by: Gaurav Talreja --- Makefile | 1 + tests/upgrades/test_activation_key.py | 8 ++++---- tests/upgrades/test_bookmarks.py | 6 ++---- tests/upgrades/test_capsule.py | 3 +-- tests/upgrades/test_errata.py | 14 ++++++-------- tests/upgrades/test_satellitesync.py | 9 ++++----- tests/upgrades/test_syncplan.py | 8 +++----- tests/upgrades/test_usergroup.py | 2 +- 8 files changed, 22 insertions(+), 29 deletions(-) diff --git a/Makefile b/Makefile index 2f73c60d3b4..ff0920c70ee 100644 --- a/Makefile +++ b/Makefile @@ -73,6 +73,7 @@ test-docstrings: uuid-check testimony $(TESTIMONY_OPTIONS) validate tests/foreman/ui testimony $(TESTIMONY_OPTIONS) validate tests/foreman/virtwho testimony $(TESTIMONY_OPTIONS) validate tests/foreman/maintain + testimony $(TESTIMONY_OPTIONS) validate tests/upgrades test-robottelo: $(info "Running robottelo framework unit tests...") diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 982e9e945a7..7e7cdf9de21 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -60,14 +60,14 @@ def test_pre_create_activation_key(self, activation_key_setup, target_sat): :steps: 1. Create the activation key. 2. Add subscription in the activation key. - 3: Check the subscription id of the activation key and compare it with custom_repos - product id. + 3. Check the subscription id of the activation key and compare it with custom_repos + product id. 4. Update the host collection in the activation key. :parametrized: yes :expectedresults: Activation key should be created successfully and it's subscription id - should be same with custom repos product id. + should be same with custom repos product id. """ ak = activation_key_setup['ak'] org_subscriptions = target_sat.api.Subscription( @@ -95,7 +95,7 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): 3. Delete activation key. :expectedresults: Activation key's entities should be same after upgrade and activation - key update and delete should work. + key update and delete should work. """ pre_test_name = dependent_scenario_name org = target_sat.api.Organization().search(query={'search': f'name={pre_test_name}_org'}) diff --git a/tests/upgrades/test_bookmarks.py b/tests/upgrades/test_bookmarks.py index dde6b112a3b..a3f330e7569 100644 --- a/tests/upgrades/test_bookmarks.py +++ b/tests/upgrades/test_bookmarks.py @@ -77,7 +77,7 @@ def test_post_create_public_disable_bookmark(self, dependent_scenario_name, targ 2. Remove the bookmark. :expectedresults: Public disabled bookmarks details for all the system entities - should be unchanged after upgrade. + should be unchanged after upgrade. :CaseImportance: Critical """ @@ -106,7 +106,6 @@ def test_pre_create_public_enable_bookmark(self, request, target_sat): :id: preupgrade-93c419db-66b4-4c9a-a82a-a6a68703881f :Steps: - 1. Create public enable bookmarks before the upgrade for all system entities using available bookmark data. 2. Check the bookmark attribute(controller, name, query public) status @@ -142,12 +141,11 @@ def test_post_create_public_enable_bookmark(self, dependent_scenario_name, targe :id: postupgrade-93c419db-66b4-4c9a-a82a-a6a68703881f :Steps: - 1. Check the bookmark status after post-upgrade. 2. Remove the bookmark. :expectedresults: Public disabled bookmarks details for all the system entities - should be unchanged after upgrade. + should be unchanged after upgrade. :CaseImportance: Critical """ diff --git a/tests/upgrades/test_capsule.py b/tests/upgrades/test_capsule.py index 19f8ad87b9e..e4f2d8a0720 100644 --- a/tests/upgrades/test_capsule.py +++ b/tests/upgrades/test_capsule.py @@ -59,8 +59,7 @@ def test_pre_user_scenario_capsule_sync(self, target_sat, default_org, save_test :expectedresults: 1. The repo/rpm should be synced to satellite 2. Activation key's environment id should be available in the content views environment - id's list - + id's list """ ak_name = ( settings.upgrade.capsule_ak[settings.upgrade.os] diff --git a/tests/upgrades/test_errata.py b/tests/upgrades/test_errata.py index e35e8da634c..864f884c3da 100644 --- a/tests/upgrades/test_errata.py +++ b/tests/upgrades/test_errata.py @@ -115,17 +115,15 @@ def test_pre_scenario_generate_errata_for_client( :id: preupgrade-88fd28e6-b4df-46c0-91d6-784859fd1c21 :steps: - 1. Create Product and Custom Yum Repo 2. Create custom tools, rhel repos and sync them 3. Create content view and publish it 4. Create activation key and add subscription 5. Register RHEL host to Satellite - 7. Generate Errata by installing outdated/older packages - 8. Check that errata applicability generated expected errata list for the given client. + 6. Generate Errata by installing outdated/older packages + 7. Check that errata applicability generated expected errata list for the given client. :expectedresults: - 1. The content host is created 2. errata count, erratum list will be generated to Satellite content host 3. All the expected errata are ready-to-be-applied on the client @@ -204,10 +202,10 @@ def test_post_scenario_errata_count_installation(self, target_sat, pre_upgrade_d 1. Recover pre_upgrade data for post_upgrade verification 2. Verify errata count has not changed on Satellite - 4. Verify the errata_ids - 5. Verify installation of errata is successfull - 6. Verify that the errata application updated packages on client - 7. Verify that all expected erratas were installed on client. + 3. Verify the errata_ids + 4. Verify installation of errata is successfull + 5. Verify that the errata application updated packages on client + 6. Verify that all expected erratas were installed on client. :expectedresults: 1. errata count and erratum list should same after Satellite upgrade diff --git a/tests/upgrades/test_satellitesync.py b/tests/upgrades/test_satellitesync.py index 5024cf93c18..dcbc7866f86 100644 --- a/tests/upgrades/test_satellitesync.py +++ b/tests/upgrades/test_satellitesync.py @@ -33,13 +33,12 @@ def test_pre_version_cv_export_import(self, module_org, target_sat, save_test_da :id: preupgrade-f19e4928-94db-4df6-8ce8-b5e4afe34258 :steps: - - 1. Create a ContentView - 2. Publish and promote the Content View - 3. Check the package count of promoted content view. + 1. Create a ContentView + 2. Publish and promote the Content View + 3. Check the package count of promoted content view. :expectedresults: Before the upgrade, Content view published and promoted, and package - count should be greater than 0. + count should be greater than 0. """ product = target_sat.api.Product(organization=module_org).create() repo = target_sat.api.Repository( diff --git a/tests/upgrades/test_syncplan.py b/tests/upgrades/test_syncplan.py index 63fdb7ee164..ba14f9de831 100644 --- a/tests/upgrades/test_syncplan.py +++ b/tests/upgrades/test_syncplan.py @@ -41,7 +41,6 @@ def test_pre_sync_plan_migration(self, request, target_sat): 3. Assign sync plan to product and sync the repo :expectedresults: Run sync plan create, get, assign and verify it should pass - """ org = target_sat.api.Organization(name=f'{request.node.name}_org').create() sync_plan = target_sat.api.SyncPlan( @@ -71,7 +70,7 @@ def test_pre_disabled_sync_plan_logic(self, request, target_sat): 5. Re enable the sync plan :expectedresults: Sync plan is created and assigned to a product. The associated recurring - logic is cancelled and then the plan is re-enabled so that it gets a new recurring logic. + logic is cancelled and then the plan is re-enabled so that it gets a new recurring logic. :BZ: 1887511 @@ -114,8 +113,7 @@ def test_post_sync_plan_migration(self, request, dependent_scenario_name, target 2. Check the all available sync_interval type update with pre-created sync_plan :expectedresults: After upgrade, the sync plan should remain the same with their all - target_sat.api and sync_interval updated with their all supported sync interval type. - + target_sat.api and sync_interval updated with their all supported sync interval type. """ pre_test_name = dependent_scenario_name org = target_sat.api.Organization().search(query={'search': f'name="{pre_test_name}_org"'})[ @@ -156,7 +154,7 @@ def test_post_disabled_sync_plan_logic(self, request, dependent_scenario_name, t 2. Check the all available sync_interval type update with pre-created sync_plan. :expectedresults: Update proceedes without any errors. After upgrade, the sync plan - should remain the same with all entities + should remain the same with all entities :BZ: 1887511 diff --git a/tests/upgrades/test_usergroup.py b/tests/upgrades/test_usergroup.py index 838a074c363..0420a8a5e8c 100644 --- a/tests/upgrades/test_usergroup.py +++ b/tests/upgrades/test_usergroup.py @@ -101,7 +101,7 @@ def test_post_verify_user_group_membership( 2. Update ldap auth. :expectedresults: After upgrade, user group membership should remain the same and LDAP - auth update should work. + auth update should work. """ ad_data = ad_data() user_group = target_sat.api.UserGroup().search( From 6d1786fe0c6f5e25e11b908015e692b92c8c85c4 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Thu, 12 Oct 2023 08:49:25 -0400 Subject: [PATCH 17/96] [Repository Rewrite] Added test for publishing invalid repository (#12322) * Adding test for invalid repository publish * fixed flake 8 issues with too many chars --- tests/foreman/cli/test_repositories.py | 38 ++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index f35a93603ab..2a98e9dbd58 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -17,6 +17,7 @@ :Upstream: No """ import pytest +from requests.exceptions import HTTPError @pytest.mark.rhel_ver_match('[^6]') @@ -47,3 +48,40 @@ def test_positive_custom_products_disabled_by_default( assert rhel_contenthost.subscribed product_details = rhel_contenthost.run('subscription-manager repos --list') assert 'Enabled: 0' in product_details.stdout + + +def test_negative_invalid_repo_fails_publish( + module_repository, + module_org, + target_sat, +): + """Verify that an invalid repository fails when trying to publish in a content view + + :id: 64e03f28-8213-467a-a229-44c8cbfaaef1 + + :steps: + 1. Create custom product and upload repository + 2. Run Katello commands to make repository invalid + 3. Create content view and add repository + 4. Verify Publish fails + + :expectedresults: Publishing a content view with an invalid repository fails + + :customerscenario: true + + :BZ: 2032040 + """ + repo = module_repository + target_sat.execute( + 'echo "root = ::Katello::RootRepository.last; ::Katello::Resources::Candlepin::Product.' + 'remove_content(root.product.organization.label, root.product.cp_id, root.content_id); ' + '::Katello::Resources::Candlepin::Content.destroy(root.product.organization.label, ' + 'root.content_id)" | foreman-rake console' + ) + cv = target_sat.api.ContentView( + organization=module_org.name, + repository=[repo.id], + ).create() + with pytest.raises(HTTPError) as context: + cv.publish() + assert 'Remove the invalid repository before publishing again' in context.value.response.text From b1150c8af3b437fa300327a65f692141dff8bb5d Mon Sep 17 00:00:00 2001 From: Shubham Ganar <67952129+shubhamsg199@users.noreply.github.com> Date: Fri, 13 Oct 2023 13:46:25 +0530 Subject: [PATCH 18/96] Fix Puppet tests (#12893) Signed-off-by: Shubham Ganar --- robottelo/hosts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/robottelo/hosts.py b/robottelo/hosts.py index aa72002146f..cc17f606e38 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -1968,7 +1968,7 @@ def delete_puppet_class(self, puppetclass_name): for hostgroup in puppet_class.read().hostgroup: hostgroup.delete_puppetclass(data={'puppetclass_id': puppet_class.id}) # Search and remove puppet class from affected hosts - for host in self.api.Host().search(query={'search': f'class={puppet_class.name}'}): + for host in self.api.Host(puppetclass=f'{puppet_class.name}').search(): host.delete_puppetclass(data={'puppetclass_id': puppet_class.id}) # Remove puppet class entity puppet_class.delete() From 5b7f1cb93292ae3f55045c06e49a105b6126f212 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Fri, 13 Oct 2023 17:18:17 +0530 Subject: [PATCH 19/96] Fix typo for organization field in global registration (#12902) Signed-off-by: Gaurav Talreja --- tests/foreman/ui/test_host.py | 2 +- tests/foreman/ui/test_rhcloud_insights.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index 27ec9ca39cd..08ce12c02ff 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -1391,7 +1391,7 @@ def test_global_registration_with_capsule_host( session.location.select(loc_name=module_location.name) cmd = session.host.get_register_command( { - 'general.orgnization': module_org.name, + 'general.organization': module_org.name, 'general.location': module_location.name, 'general.operating_system': default_os.title, 'general.capsule': capsule_configured.hostname, diff --git a/tests/foreman/ui/test_rhcloud_insights.py b/tests/foreman/ui/test_rhcloud_insights.py index 6ba96238d21..153e2a11876 100644 --- a/tests/foreman/ui/test_rhcloud_insights.py +++ b/tests/foreman/ui/test_rhcloud_insights.py @@ -356,7 +356,7 @@ def test_insights_registration_with_capsule( cmd = session.host_new.get_register_command( { 'general.operating_system': default_os.title, - 'general.orgnization': org.name, + 'general.organization': org.name, 'general.capsule': rhcloud_capsule.hostname, 'general.activation_keys': ak.name, 'general.insecure': True, From 1ce733ff0115eca4c65f00352597bbe82ed9d31f Mon Sep 17 00:00:00 2001 From: Jacob Callahan Date: Wed, 11 Oct 2023 10:55:01 -0400 Subject: [PATCH 20/96] Add gitleaks to pre-commit hooks This will prevent secrets from being committed to the repository. Handling it in pre-commit because a check in actions is too late. --- .pre-commit-config.yaml | 56 ++++++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 26 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c86df504763..08c10775ce1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,29 +1,33 @@ # configuration for pre-commit git hooks repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: trailing-whitespace - exclude: tests/foreman/data/ - - id: check-yaml - - id: debug-statements -- repo: https://github.com/psf/black - rev: 22.10.0 - hooks: - - id: black -- repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.277 - hooks: - - id: ruff - args: [--fix, --exit-non-zero-on-fix] -- repo: local - hooks: - - id: fix-uuids - name: Robottelo Custom Fix UUIDs script - description: This hook runs the scripts/fix_uuids.sh script - language: script - entry: scripts/fix_uuids.sh - verbose: true - types: [text] - require_serial: true + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + exclude: tests/foreman/data/ + - id: check-yaml + - id: debug-statements + - repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.277 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - repo: local + hooks: + - id: fix-uuids + name: Robottelo Custom Fix UUIDs script + description: This hook runs the scripts/fix_uuids.sh script + language: script + entry: scripts/fix_uuids.sh + verbose: true + types: [text] + require_serial: true + - repo: https://github.com/gitleaks/gitleaks + rev: v8.18.0 + hooks: + - id: gitleaks From a044fd2b78516c1adb24709807b1d002873ec00c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 00:00:41 -0400 Subject: [PATCH 21/96] Bump pre-commit from 3.4.0 to 3.5.0 (#12910) --- requirements-optional.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-optional.txt b/requirements-optional.txt index 6da8ffd5441..8d8abe4d798 100644 --- a/requirements-optional.txt +++ b/requirements-optional.txt @@ -2,7 +2,7 @@ flake8==6.1.0 pytest-cov==4.1.0 redis==5.0.1 -pre-commit==3.4.0 +pre-commit==3.5.0 # For generating documentation. sphinx==7.2.6 From 2a013f8a829d7e73032d370bf9723a83f7aa0a99 Mon Sep 17 00:00:00 2001 From: Griffin Sullivan <48397354+Griffin-Sullivan@users.noreply.github.com> Date: Mon, 16 Oct 2023 09:20:01 -0400 Subject: [PATCH 22/96] Fix packages status (#12845) --- robottelo/cli/sm_packages.py | 1 + 1 file changed, 1 insertion(+) diff --git a/robottelo/cli/sm_packages.py b/robottelo/cli/sm_packages.py index d56a1156ec9..ece44a773e0 100644 --- a/robottelo/cli/sm_packages.py +++ b/robottelo/cli/sm_packages.py @@ -51,6 +51,7 @@ def is_locked(cls, options=None): def status(cls, options=None): """Build satellite-maintain packages status""" cls.command_sub = 'status' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) From 5912ef27dec7038cd6b878805f609f5ea7762076 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Mon, 16 Oct 2023 15:40:08 -0400 Subject: [PATCH 23/96] [Repository Rewrite] Adding test for available_repositories endpoint (#12888) * adding test for available repositories endpoint * updated repo to rhel7 extras * replaced PRDS with REPOS * updated product call --- tests/foreman/api/test_repositories.py | 41 ++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 58cbd9792b2..1303de40c27 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -291,3 +291,44 @@ def test_positive_sync_mulitple_large_repos(module_target_sat, module_entitlemen data={'ids': [rh_products.id]}, timeout=2000 ) assert res['result'] == 'success' + + +def test_positive_available_repositories_endpoint(module_sca_manifest_org, target_sat): + """Attempt to hit the /available_repositories endpoint with no failures + + :id: f4c9d4a0-9a82-4f06-b772-b1f7e3f45e7d + + :Steps: + 1. Enable a Red Hat Repository + 2. Attempt to hit the enpoint: + GET /katello/api/repository_sets/:id/available_repositories + 3. Verify Actions::Katello::RepositorySet::ScanCdn task is run + 4. Verify there are no failures when scanning for repository + + + :expectedresults: Actions::Katello::RepositorySet::ScanCdn task should succeed and + not fail when scanning for repositories + + :customerscenario: true + + :BZ: 2030445 + """ + rh_repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=constants.DEFAULT_ARCHITECTURE, + org_id=module_sca_manifest_org.id, + product=constants.REPOS['rhel7_extra']['product'], + repo=constants.REPOS['rhel7_extra']['name'], + reposet=constants.REPOS['rhel7_extra']['reposet'], + releasever=None, + ) + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() + product = target_sat.api.Product(id=rh_repo.product.id).read() + reposet = target_sat.api.RepositorySet( + name=constants.REPOSET['rhel7_extra'], product=product + ).search()[0] + touch_endpoint = target_sat.api.RepositorySet.available_repositories(reposet) + assert touch_endpoint['total'] != 0 + results = target_sat.execute('tail -15 /var/log/foreman/production.log').stdout + assert 'Actions::Katello::RepositorySet::ScanCdn' in results + assert 'result: success' in results + assert 'Failed at scanning for repository' not in results From b5d69870e6948c1ca78a4f984dd044961b11221d Mon Sep 17 00:00:00 2001 From: Griffin Sullivan Date: Mon, 16 Oct 2023 15:17:35 -0400 Subject: [PATCH 24/96] Adding provisioning params to installer --- tests/foreman/installer/test_installer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/foreman/installer/test_installer.py b/tests/foreman/installer/test_installer.py index 0abe50e2500..178707a8f9c 100644 --- a/tests/foreman/installer/test_installer.py +++ b/tests/foreman/installer/test_installer.py @@ -194,6 +194,8 @@ '--foreman-plugin-tasks-backup', '--foreman-plugin-tasks-cron-line', '--foreman-plugin-version', + '--foreman-provisioning-ct-location', + '--foreman-provisioning-fcct-location', '--foreman-proxy-autosignfile', '--foreman-proxy-bind-host', '--foreman-proxy-bmc', @@ -767,6 +769,8 @@ '--reset-foreman-plugin-tasks-backup', '--reset-foreman-plugin-tasks-cron-line', '--reset-foreman-plugin-version', + '--reset-foreman-provisioning-ct-location', + '--reset-foreman-provisioning-fcct-location', '--reset-foreman-proxy-autosignfile', '--reset-foreman-proxy-bind-host', '--reset-foreman-proxy-bmc', From 8c26344e40aaf91e5f7bc3aad59a23049855117e Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Tue, 19 Sep 2023 13:43:37 -0400 Subject: [PATCH 25/96] fixed failing ak upgrade test --- tests/upgrades/test_activation_key.py | 48 ++++++++++++++++++--------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 7e7cdf9de21..4ca26cab8ae 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -73,13 +73,22 @@ def test_pre_create_activation_key(self, activation_key_setup, target_sat): org_subscriptions = target_sat.api.Subscription( organization=activation_key_setup['org'] ).search() - for subscription in org_subscriptions: - ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) - ak_subscriptions = ak.product_content()['results'] - subscr_id = {subscr['product']['id'] for subscr in ak_subscriptions} - assert subscr_id == {activation_key_setup['custom_repo'].product.id} - ak.host_collection.append(target_sat.api.HostCollection().create()) - ak.update(['host_collection']) + ak_repos = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] + ak.content_override( + data={ + 'content_overrides': [ + {'content_label': ak_repos[0]['content']['label'], 'value': '1'} + ] + } + ) + ak_results = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] + assert ak_results[0]['overrides'][0]['name'] == 'enabled' + assert ak_results[0]['overrides'][0]['value'] is True + assert org_subscriptions[0].name == ak_results[0]['product']['name'] + ak.host_collection.append( + target_sat.api.HostCollection(organization=activation_key_setup['org']).create() + ) + ak.update(['host_collection', 'organization_id']) assert len(ak.host_collection) == 1 @pytest.mark.post_upgrade(depend_on=test_pre_create_activation_key) @@ -107,9 +116,9 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): ) assert f'{pre_test_name}_ak' == ak[0].name assert f'{pre_test_name}_cv' == cv[0].name - ak[0].host_collection.append(target_sat.api.HostCollection().create()) - ak[0].update(['host_collection']) - assert len(ak[0].host_collection) == 2 + ak.host_collection.append(target_sat.api.HostCollection(organization=org).create()) + ak.update(['host_collection', 'organization_id']) + assert len(ak.host_collection) == 2 custom_repo2 = target_sat.api.Repository( product=target_sat.api.Product(organization=org[0]).create() ).create() @@ -117,12 +126,19 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): cv2 = target_sat.api.ContentView(organization=org[0], repository=[custom_repo2.id]).create() cv2.publish() org_subscriptions = target_sat.api.Subscription(organization=org[0]).search() - for subscription in org_subscriptions: - provided_products_ids = [prod.id for prod in subscription.read().provided_product] - if custom_repo2.product.id in provided_products_ids: - ak[0].add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) - ak_subscriptions = ak[0].product_content()['results'] - assert custom_repo2.product.id in {subscr['product']['id'] for subscr in ak_subscriptions} + + ak_repos = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] + ak.content_override( + data={ + 'content_overrides': [ + {'content_label': ak_repos[1]['content']['label'], 'value': '1'} + ] + } + ) + ak_results = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] + assert ak_results[1]['overrides'][0]['name'] == 'enabled' + assert ak_results[1]['overrides'][0]['value'] is True + assert org_subscriptions[1].name == ak_results[1]['product']['name'] ak[0].delete() with pytest.raises(HTTPError): target_sat.api.ActivationKey(id=ak[0].id).read() From 681a5b1a4751641794a94521d6b20885eadec865 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Thu, 28 Sep 2023 15:45:57 -0400 Subject: [PATCH 26/96] updating adding test for rh repos with basearch --- tests/foreman/cli/test_repositories.py | 49 ++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index 2a98e9dbd58..5f9939666f2 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -19,6 +19,8 @@ import pytest from requests.exceptions import HTTPError +from robottelo.constants import DEFAULT_ARCHITECTURE, REPOS + @pytest.mark.rhel_ver_match('[^6]') def test_positive_custom_products_disabled_by_default( @@ -85,3 +87,50 @@ def test_negative_invalid_repo_fails_publish( with pytest.raises(HTTPError) as context: cv.publish() assert 'Remove the invalid repository before publishing again' in context.value.response.text + + +def test_positive_test_disable_rh_repo_with_basearch( + module_target_sat, module_entitlement_manifest_org +): + """Verify that users can disable Red Hat Repositories with basearch + + :id: dd3b63b7-1dbf-4d8a-ab66-348de0ad7cf3 + + :steps: + 1. You have the Appstream Kicstart repositories release version + "8" synced in from the release of RHEL 8 + 2. hammer repository-set disable --basearch --name --product-id + --organization --releasever + + + :expectedresults: Users can now disable Red Hat repositories with + basearch + + :customerscenario: true + + :BZ: 1932486 + """ + rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + basearch=DEFAULT_ARCHITECTURE, + org_id=module_entitlement_manifest_org.id, + product=REPOS['kickstart']['rhel8_aps']['product'], + repo=REPOS['kickstart']['rhel8_aps']['name'], + reposet=REPOS['kickstart']['rhel8_aps']['reposet'], + releasever=REPOS['kickstart']['rhel8_aps']['version'], + ) + repo = module_target_sat.api.Repository(id=rh_repo_id).read() + repo.sync(timeout=2000) + module_target_sat.execute( + f'hammer repository-set list ' + f'--organization-id {module_entitlement_manifest_org.id} ' + f'--name "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)")' + ) + disabled_repo = module_target_sat.execute( + f'hammer repository-set disable --basearch {DEFAULT_ARCHITECTURE} ' + f'--name "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)" ' + f'--product-id {repo.product.id} ' + f'--organization-id {module_entitlement_manifest_org.id} ' + f'--releasever 8 ' + f'--repository-id {rh_repo_id}' + ) + assert 'Repository disabled' in disabled_repo.stdout \ No newline at end of file From 4883c7687573cf106c0518ceb9d3c12faa56d5b8 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Thu, 28 Sep 2023 15:50:48 -0400 Subject: [PATCH 27/96] removed extra tests --- tests/upgrades/test_activation_key.py | 48 +++++++++------------------ tests/upgrades/test_repository.py | 4 +++ 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 4ca26cab8ae..7e7cdf9de21 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -73,22 +73,13 @@ def test_pre_create_activation_key(self, activation_key_setup, target_sat): org_subscriptions = target_sat.api.Subscription( organization=activation_key_setup['org'] ).search() - ak_repos = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] - ak.content_override( - data={ - 'content_overrides': [ - {'content_label': ak_repos[0]['content']['label'], 'value': '1'} - ] - } - ) - ak_results = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] - assert ak_results[0]['overrides'][0]['name'] == 'enabled' - assert ak_results[0]['overrides'][0]['value'] is True - assert org_subscriptions[0].name == ak_results[0]['product']['name'] - ak.host_collection.append( - target_sat.api.HostCollection(organization=activation_key_setup['org']).create() - ) - ak.update(['host_collection', 'organization_id']) + for subscription in org_subscriptions: + ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) + ak_subscriptions = ak.product_content()['results'] + subscr_id = {subscr['product']['id'] for subscr in ak_subscriptions} + assert subscr_id == {activation_key_setup['custom_repo'].product.id} + ak.host_collection.append(target_sat.api.HostCollection().create()) + ak.update(['host_collection']) assert len(ak.host_collection) == 1 @pytest.mark.post_upgrade(depend_on=test_pre_create_activation_key) @@ -116,9 +107,9 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): ) assert f'{pre_test_name}_ak' == ak[0].name assert f'{pre_test_name}_cv' == cv[0].name - ak.host_collection.append(target_sat.api.HostCollection(organization=org).create()) - ak.update(['host_collection', 'organization_id']) - assert len(ak.host_collection) == 2 + ak[0].host_collection.append(target_sat.api.HostCollection().create()) + ak[0].update(['host_collection']) + assert len(ak[0].host_collection) == 2 custom_repo2 = target_sat.api.Repository( product=target_sat.api.Product(organization=org[0]).create() ).create() @@ -126,19 +117,12 @@ def test_post_crud_activation_key(self, dependent_scenario_name, target_sat): cv2 = target_sat.api.ContentView(organization=org[0], repository=[custom_repo2.id]).create() cv2.publish() org_subscriptions = target_sat.api.Subscription(organization=org[0]).search() - - ak_repos = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] - ak.content_override( - data={ - 'content_overrides': [ - {'content_label': ak_repos[1]['content']['label'], 'value': '1'} - ] - } - ) - ak_results = ak.product_content(data={'id': ak.id, 'content_access_mode_all': 1})['results'] - assert ak_results[1]['overrides'][0]['name'] == 'enabled' - assert ak_results[1]['overrides'][0]['value'] is True - assert org_subscriptions[1].name == ak_results[1]['product']['name'] + for subscription in org_subscriptions: + provided_products_ids = [prod.id for prod in subscription.read().provided_product] + if custom_repo2.product.id in provided_products_ids: + ak[0].add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) + ak_subscriptions = ak[0].product_content()['results'] + assert custom_repo2.product.id in {subscr['product']['id'] for subscr in ak_subscriptions} ak[0].delete() with pytest.raises(HTTPError): target_sat.api.ActivationKey(id=ak[0].id).read() diff --git a/tests/upgrades/test_repository.py b/tests/upgrades/test_repository.py index 998db385ad4..2b4d4f0479c 100644 --- a/tests/upgrades/test_repository.py +++ b/tests/upgrades/test_repository.py @@ -19,12 +19,16 @@ import pytest from robottelo.config import settings +<<<<<<< HEAD from robottelo.constants import ( DEFAULT_ARCHITECTURE, FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME, REPOS, ) +======= +from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME +>>>>>>> 4cf5d9913 (removed extra tests) from robottelo.hosts import ContentHost UPSTREAM_USERNAME = 'rTtest123' From 73a5247581f7bc07a591c7eddf50df6e8b89aceb Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Tue, 10 Oct 2023 15:23:52 -0400 Subject: [PATCH 28/96] removed extra console command --- tests/foreman/cli/test_repositories.py | 9 +-------- tests/upgrades/test_repository.py | 4 ---- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index 5f9939666f2..1076ba0424e 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -89,9 +89,7 @@ def test_negative_invalid_repo_fails_publish( assert 'Remove the invalid repository before publishing again' in context.value.response.text -def test_positive_test_disable_rh_repo_with_basearch( - module_target_sat, module_entitlement_manifest_org -): +def test_positive_disable_rh_repo_with_basearch(module_target_sat, module_entitlement_manifest_org): """Verify that users can disable Red Hat Repositories with basearch :id: dd3b63b7-1dbf-4d8a-ab66-348de0ad7cf3 @@ -120,11 +118,6 @@ def test_positive_test_disable_rh_repo_with_basearch( ) repo = module_target_sat.api.Repository(id=rh_repo_id).read() repo.sync(timeout=2000) - module_target_sat.execute( - f'hammer repository-set list ' - f'--organization-id {module_entitlement_manifest_org.id} ' - f'--name "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)")' - ) disabled_repo = module_target_sat.execute( f'hammer repository-set disable --basearch {DEFAULT_ARCHITECTURE} ' f'--name "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)" ' diff --git a/tests/upgrades/test_repository.py b/tests/upgrades/test_repository.py index 2b4d4f0479c..998db385ad4 100644 --- a/tests/upgrades/test_repository.py +++ b/tests/upgrades/test_repository.py @@ -19,16 +19,12 @@ import pytest from robottelo.config import settings -<<<<<<< HEAD from robottelo.constants import ( DEFAULT_ARCHITECTURE, FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME, REPOS, ) -======= -from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME, FAKE_4_CUSTOM_PACKAGE_NAME ->>>>>>> 4cf5d9913 (removed extra tests) from robottelo.hosts import ContentHost UPSTREAM_USERNAME = 'rTtest123' From 91eb04996b3faff989fc5da55d7975c36930f26d Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Tue, 10 Oct 2023 15:34:38 -0400 Subject: [PATCH 29/96] swapped out hammer execute for reposets method --- tests/foreman/cli/test_repositories.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index 1076ba0424e..0081ea5e213 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -118,12 +118,14 @@ def test_positive_disable_rh_repo_with_basearch(module_target_sat, module_entitl ) repo = module_target_sat.api.Repository(id=rh_repo_id).read() repo.sync(timeout=2000) - disabled_repo = module_target_sat.execute( - f'hammer repository-set disable --basearch {DEFAULT_ARCHITECTURE} ' - f'--name "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)" ' - f'--product-id {repo.product.id} ' - f'--organization-id {module_entitlement_manifest_org.id} ' - f'--releasever 8 ' - f'--repository-id {rh_repo_id}' + disabled_repo = module_target_sat.cli.RepositorySet.disable( + { + 'basearch': DEFAULT_ARCHITECTURE, + 'name': "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)", + 'product-id': repo.product.id, + 'organization-id': module_entitlement_manifest_org.id, + 'releasever': 8, + 'repository-id': rh_repo_id, + } ) - assert 'Repository disabled' in disabled_repo.stdout \ No newline at end of file + assert 'Repository disabled' in disabled_repo[0]['message'] From 55b4d0a0cba9bcbcd32dc76395e68e34a9ff33d9 Mon Sep 17 00:00:00 2001 From: Cole Higgins Date: Mon, 16 Oct 2023 15:21:58 -0400 Subject: [PATCH 30/96] addressing comments --- tests/foreman/cli/test_repositories.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/foreman/cli/test_repositories.py b/tests/foreman/cli/test_repositories.py index 0081ea5e213..222f6938860 100644 --- a/tests/foreman/cli/test_repositories.py +++ b/tests/foreman/cli/test_repositories.py @@ -19,7 +19,7 @@ import pytest from requests.exceptions import HTTPError -from robottelo.constants import DEFAULT_ARCHITECTURE, REPOS +from robottelo.constants import DEFAULT_ARCHITECTURE, REPOS, REPOSET @pytest.mark.rhel_ver_match('[^6]') @@ -117,14 +117,14 @@ def test_positive_disable_rh_repo_with_basearch(module_target_sat, module_entitl releasever=REPOS['kickstart']['rhel8_aps']['version'], ) repo = module_target_sat.api.Repository(id=rh_repo_id).read() - repo.sync(timeout=2000) + repo.sync(timeout=600) disabled_repo = module_target_sat.cli.RepositorySet.disable( { 'basearch': DEFAULT_ARCHITECTURE, - 'name': "Red Hat Enterprise Linux 8 for x86_64 - BaseOS (Kickstart)", + 'name': REPOSET['kickstart']['rhel8'], 'product-id': repo.product.id, 'organization-id': module_entitlement_manifest_org.id, - 'releasever': 8, + 'releasever': REPOS['kickstart']['rhel8_aps']['version'], 'repository-id': rh_repo_id, } ) From 72c87108bf22ec14e5ae68d04feb484b4c4ed17f Mon Sep 17 00:00:00 2001 From: David Moore <109112035+damoore044@users.noreply.github.com> Date: Tue, 17 Oct 2023 14:58:53 -0400 Subject: [PATCH 31/96] [Stream] fix for e2e errata:cli ~test_positive_list_affected_chosts_by_erratum_restrict_flag (#12896) * Testing stream fixes for errata:cli * Fix setup, registration, teardown, helpers * Trimming unnecessary executions. * One more unnecessary execution --- tests/foreman/cli/test_errata.py | 78 ++++++++++++++++++++++---------- 1 file changed, 54 insertions(+), 24 deletions(-) diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index 13e0502ac31..f74c434b1ee 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -25,7 +25,6 @@ from robottelo.cli.activationkey import ActivationKey from robottelo.cli.base import CLIReturnCodeError -from robottelo.cli.contentview import ContentView, ContentViewFilter from robottelo.cli.erratum import Erratum from robottelo.cli.factory import ( make_content_view_filter, @@ -195,16 +194,28 @@ def hosts(request): def register_hosts( hosts, module_entitlement_manifest_org, - module_ak_cv_lce, - rh_repo, - custom_repo, + module_lce, + module_ak, module_target_sat, ): """Register hosts to Satellite""" for host in hosts: - host.install_katello_ca(module_target_sat) - host.register_contenthost(module_entitlement_manifest_org.name, module_ak_cv_lce.name) - host.enable_repo(REPOS['rhst7']['id']) + module_target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': REPO_WITH_ERRATA['url'], + 'organization-id': module_entitlement_manifest_org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': module_ak.id, + } + ) + host.register( + activation_keys=module_ak.name, + target=module_target_sat, + org=module_entitlement_manifest_org, + loc=None, + ) + assert host.subscribed + return hosts @@ -212,6 +223,9 @@ def register_hosts( def errata_hosts(register_hosts): """Ensure that rpm is installed on host.""" for host in register_hosts: + # Enable all custom and rh repositories. + host.execute(r'subscription-manager repos --enable \*') + host.execute(r'yum-config-manager --enable \*') # Remove all packages. for errata in REPO_WITH_ERRATA['errata']: # Remove package if present, old or new. @@ -225,6 +239,7 @@ def errata_hosts(register_hosts): result = host.execute(f'yum install -y {old_package}') if result.status != 0: pytest.fail(f'Failed to install {old_package}: {result.stdout} {result.stderr}') + return register_hosts @@ -339,7 +354,7 @@ def filter_sort_errata(org, sort_by_date='issued', filter_by_org=None): assert errata_ids == sorted_errata_ids -def cv_publish_promote(cv, org, lce): +def cv_publish_promote(sat, cv, org, lce, force=False): """Publish and promote a new version into the given lifecycle environment. :param cv: content view @@ -349,27 +364,37 @@ def cv_publish_promote(cv, org, lce): :param lce: lifecycle environment :type lce: entities.LifecycleEnvironment """ - ContentView.publish({'id': cv.id}) - cvv = ContentView.info({'id': cv.id})['versions'][-1] - ContentView.version_promote( + sat.cli.ContentView.publish( { - 'id': cvv['id'], - 'organization-id': org.id, + 'id': cv.id, + 'organization': org, + 'lifecycle-environment-ids': lce.id, + } + ) + cv = cv.read() + cvv_id = sorted(cvv.id for cvv in cv.version)[-1] + sat.cli.ContentView.version_promote( + { + 'id': cvv_id, + 'organization': org, + 'content-view-id': cv.id, 'to-lifecycle-environment-id': lce.id, + 'force': force, } ) -def cv_filter_cleanup(filter_id, cv, org, lce): +def cv_filter_cleanup(sat, filter_id, cv, org, lce): """Delete the cv filter, then publish and promote an unfiltered version.""" - ContentViewFilter.delete( + sat.cli.ContentViewFilter.delete( { 'content-view-id': cv.id, 'id': filter_id, 'organization-id': org.id, } ) - cv_publish_promote(cv, org, lce) + cv = cv.read() + cv_publish_promote(sat, cv, org, lce) @pytest.mark.tier3 @@ -667,7 +692,12 @@ def test_install_errata_to_one_host( @pytest.mark.tier3 @pytest.mark.e2e def test_positive_list_affected_chosts_by_erratum_restrict_flag( - request, module_entitlement_manifest_org, module_cv, module_lce, errata_hosts + target_sat, + request, + module_entitlement_manifest_org, + module_cv, + module_lce, + errata_hosts, ): """View a list of affected content hosts for an erratum filtered with restrict flags. Applicability is calculated using the Library, @@ -699,32 +729,30 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( :CaseAutomation: Automated """ - # Uninstall package so that only the first errata applies. for host in errata_hosts: host.execute(f'yum erase -y {REPO_WITH_ERRATA["errata"][1]["package_name"]}') - + host.execute('subscription-manager repos') + target_sat.cli.Host.errata_recalculate({'host-id': host.nailgun_host.id}) # Create list of uninstallable errata. errata = REPO_WITH_ERRATA['errata'][0] uninstallable = REPO_WITH_ERRATA['errata_ids'].copy() uninstallable.remove(errata['id']) - # Check search for only installable errata param = { 'errata-restrict-installable': 1, - 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, } errata_ids = get_errata_ids(param) + assert errata_ids, 'No installable errata found' assert errata['id'] in errata_ids, 'Errata not found in list of installable errata' assert not set(uninstallable) & set(errata_ids), 'Unexpected errata found' # Check search of errata is not affected by installable=0 restrict flag param = { 'errata-restrict-installable': 0, - 'content-view-id': module_cv.id, 'lifecycle-environment-id': module_lce.id, 'organization-id': module_entitlement_manifest_org.id, 'per-page': PER_PAGE_LARGE, @@ -770,6 +798,7 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( @request.addfinalizer def cleanup(): cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_entitlement_manifest_org, @@ -786,7 +815,7 @@ def cleanup(): ) # Publish and promote a new version with the filter - cv_publish_promote(module_cv, module_entitlement_manifest_org, module_lce) + cv_publish_promote(target_sat, module_cv, module_entitlement_manifest_org, module_lce) # Check that the installable erratum is no longer present in the list param = { @@ -956,6 +985,7 @@ def test_host_errata_search_commands( @request.addfinalizer def cleanup(): cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_entitlement_manifest_org, @@ -972,7 +1002,7 @@ def cleanup(): ) # Publish and promote a new version with the filter - cv_publish_promote(module_cv, module_entitlement_manifest_org, module_lce) + cv_publish_promote(target_sat, module_cv, module_entitlement_manifest_org, module_lce) # Step 8: Run tests again. Applicable should still be true, installable should now be false. # Search for hosts that require the bugfix package. From bb8eb50bc8ed34b1933db62ea4a07f603da59788 Mon Sep 17 00:00:00 2001 From: Adarsh dubey Date: Wed, 18 Oct 2023 00:37:36 +0530 Subject: [PATCH 32/96] Fixing Discovery tests (#12914) --- tests/foreman/api/test_discoveredhost.py | 12 ++++++------ tests/foreman/cli/test_discoveredhost.py | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index 773175812cf..0db7c486bcf 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -172,7 +172,7 @@ class TestDiscoveredHost: @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) - @pytest.mark.rhel_ver_match('[^6]') + @pytest.mark.rhel_ver_list([8, 9]) @pytest.mark.tier3 def test_positive_provision_pxe_host( self, @@ -204,8 +204,8 @@ def test_positive_provision_pxe_host( mac = provisioning_host._broker_args['provisioning_nic_mac_addr'] wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=600, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup @@ -227,7 +227,7 @@ def test_positive_provision_pxe_host( @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) - @pytest.mark.rhel_ver_match('[^6]') + @pytest.mark.rhel_ver_list([8, 9]) @pytest.mark.tier3 def test_positive_provision_pxe_less_host( self, @@ -254,8 +254,8 @@ def test_positive_provision_pxe_less_host( mac = pxeless_discovery_host._broker_args['provisioning_nic_mac_addr'] wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=600, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup diff --git a/tests/foreman/cli/test_discoveredhost.py b/tests/foreman/cli/test_discoveredhost.py index 031116a2e09..4ee7015ec03 100644 --- a/tests/foreman/cli/test_discoveredhost.py +++ b/tests/foreman/cli/test_discoveredhost.py @@ -24,7 +24,7 @@ @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) -@pytest.mark.rhel_ver_match('8') +@pytest.mark.rhel_ver_match('7') def test_rhel_pxe_discovery_provisioning( module_provisioning_rhel_content, module_discovery_sat, @@ -57,8 +57,8 @@ def test_rhel_pxe_discovery_provisioning( wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=600, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup @@ -95,7 +95,7 @@ def _finalize(): @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) -@pytest.mark.rhel_ver_match('8') +@pytest.mark.rhel_ver_match('7') def test_rhel_pxeless_discovery_provisioning( module_discovery_sat, pxeless_discovery_host, @@ -120,8 +120,8 @@ def test_rhel_pxeless_discovery_provisioning( wait_for( lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], - timeout=240, - delay=20, + timeout=600, + delay=40, ) discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup From ed25cc48b772c07cd307e4dc235508b5954968e4 Mon Sep 17 00:00:00 2001 From: Lukas Hellebrandt Date: Wed, 4 Oct 2023 15:44:02 +0200 Subject: [PATCH 33/96] Use sAMAccountName --- tests/foreman/cli/test_ldapauthsource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index 92d5185cc83..3feed9bad43 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -127,7 +127,7 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data): 'attr-firstname': LDAP_ATTR['firstname'], 'attr-lastname': LDAP_ATTR['surname'], 'attr-mail': LDAP_ATTR['mail'], - 'account': ad_data['ldap_user_name'], + 'account': fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", 'account-password': ad_data['ldap_user_passwd'], 'base-dn': ad_data['base_dn'], } From e18735e62dde4da1359ec1e4d51bbb93ca8ffac8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 23:54:43 -0400 Subject: [PATCH 34/96] Bump pytest-reportportal from 5.2.2 to 5.3.0 (#12932) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8f8d3ce9496..2eb0a3241d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,7 +15,7 @@ python-box==7.1.1 pytest==7.4.2 pytest-services==2.2.1 pytest-mock==3.11.1 -pytest-reportportal==5.2.2 +pytest-reportportal==5.3.0 pytest-xdist==3.3.1 pytest-ibutsu==2.2.4 PyYAML==6.0.1 From 76666e2ca1f607ab77de5463cc373b1d566c0acc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:33:04 +0530 Subject: [PATCH 35/96] Bump deepdiff from 6.6.0 to 6.6.1 (#12931) Bumps [deepdiff](https://github.com/seperman/deepdiff) from 6.6.0 to 6.6.1. - [Release notes](https://github.com/seperman/deepdiff/releases) - [Changelog](https://github.com/seperman/deepdiff/blob/master/docs/changelog.rst) - [Commits](https://github.com/seperman/deepdiff/commits) --- updated-dependencies: - dependency-name: deepdiff dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2eb0a3241d7..c8d944d9722 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ betelgeuse==1.10.0 broker[docker]==0.4.1 cryptography==41.0.4 -deepdiff==6.6.0 +deepdiff==6.6.1 dynaconf[vault]==3.2.3 fauxfactory==3.1.0 jinja2==3.1.2 From a82d63bbc0053c41cd4d058caff86fd774ab5c3b Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Wed, 18 Oct 2023 12:46:36 +0530 Subject: [PATCH 36/96] Add automation for BZ:1994654/SAT-4845 (#12901) Signed-off-by: Gaurav Talreja --- tests/foreman/ui/test_host.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index 08ce12c02ff..6f5f34fb210 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -1271,9 +1271,9 @@ def test_global_registration_form_populate( 4. Open the global registration form and select the same host-group 5. check host registration form should be populated automatically based on the host-group - :BZ: 2056469 + :BZ: 2056469, 1994654 - :CaseAutomation: Automated + :customerscenario: true """ hg_name = gen_string('alpha') iface = gen_string('alpha') @@ -1287,6 +1287,8 @@ def test_global_registration_form_populate( content_view=module_promoted_cv, group_parameters_attributes=[group_params], ).create() + new_org = target_sat.api.Organization().create() + new_ak = target_sat.api.ActivationKey(organization=new_org).create() with session: session.hostgroup.update( hg_name, @@ -1302,12 +1304,23 @@ def test_global_registration_form_populate( }, full_read=True, ) - assert hg_name in cmd['general']['host_group'] assert module_ak_with_cv.name in cmd['general']['activation_key_helper'] assert module_lce.name in cmd['advanced']['life_cycle_env_helper'] assert constants.FAKE_0_CUSTOM_PACKAGE in cmd['advanced']['install_packages_helper'] + session.organization.select(org_name=new_org.name) + cmd = session.host.get_register_command( + { + 'general.organization': new_org.name, + 'general.operating_system': default_os.title, + 'general.insecure': True, + }, + full_read=True, + ) + assert new_org.name in cmd['general']['organization'] + assert new_ak.name in cmd['general']['activation_keys'] + @pytest.mark.tier2 def test_global_registration_with_capsule_host( From ea7b5c952000e5566eb4ab459ef997b7df3221f6 Mon Sep 17 00:00:00 2001 From: Lukas Pramuk Date: Wed, 18 Oct 2023 15:54:57 +0200 Subject: [PATCH 37/96] Fix maintain CLI caching class attributes (#12918) --- robottelo/cli/sm_packages.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/robottelo/cli/sm_packages.py b/robottelo/cli/sm_packages.py index ece44a773e0..d4674279172 100644 --- a/robottelo/cli/sm_packages.py +++ b/robottelo/cli/sm_packages.py @@ -30,6 +30,7 @@ class Packages(Base): def lock(cls, options=None): """Build satellite-maintain packages lock""" cls.command_sub = 'lock' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -37,6 +38,7 @@ def lock(cls, options=None): def unlock(cls, options=None): """Build satellite-maintain packages unlock""" cls.command_sub = 'unlock' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -44,6 +46,7 @@ def unlock(cls, options=None): def is_locked(cls, options=None): """Build satellite-maintain packages is-locked""" cls.command_sub = 'is-locked' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) @@ -75,5 +78,6 @@ def update(cls, packages='', options=None): def check_update(cls, options=None): """Build satellite-maintain packages check-update""" cls.command_sub = 'check-update' + cls.command_end = None options = options or {} return cls.sm_execute(cls._construct_command(options)) From 522cceb74e2a7a9d0ab08faf7d56c4c16e93bfae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 20 Oct 2023 00:06:24 -0400 Subject: [PATCH 38/96] Bump pytest-mock from 3.11.1 to 3.12.0 (#12948) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c8d944d9722..e2c7c0bb551 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ pyotp==2.9.0 python-box==7.1.1 pytest==7.4.2 pytest-services==2.2.1 -pytest-mock==3.11.1 +pytest-mock==3.12.0 pytest-reportportal==5.3.0 pytest-xdist==3.3.1 pytest-ibutsu==2.2.4 From d1dcab6d531b1c9787aca3e3c0b5d8255615861c Mon Sep 17 00:00:00 2001 From: vsedmik <46570670+vsedmik@users.noreply.github.com> Date: Fri, 20 Oct 2023 08:50:25 +0200 Subject: [PATCH 39/96] Fix Container Image Tag test (#12848) --- tests/foreman/ui/test_containerimagetag.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/tests/foreman/ui/test_containerimagetag.py b/tests/foreman/ui/test_containerimagetag.py index 681cb1e877d..a1e68bc7e61 100644 --- a/tests/foreman/ui/test_containerimagetag.py +++ b/tests/foreman/ui/test_containerimagetag.py @@ -22,9 +22,11 @@ from robottelo.constants import ( CONTAINER_REGISTRY_HUB, CONTAINER_UPSTREAM_NAME, + DEFAULT_CV, ENVIRONMENT, REPO_TYPE, ) +from robottelo.utils.issue_handlers import is_open @pytest.fixture(scope="module") @@ -49,7 +51,6 @@ def module_repository(module_product): return repo -@pytest.mark.skip_if_open("BZ:2009069") @pytest.mark.tier2 def test_positive_search(session, module_org, module_product, module_repository): """Search for a docker image tag and reads details of it @@ -60,13 +61,22 @@ def test_positive_search(session, module_org, module_product, module_repository) details are read :CaseLevel: Integration + + :BZ: 2009069, 2242515 """ with session: session.organization.select(org_name=module_org.name) search = session.containerimagetag.search('latest') - assert module_product.name in [i['Product Name'] for i in search] - assert module_repository.name in [i['Repository Name'] for i in search] + if not is_open('BZ:2242515'): + assert module_product.name in [i['Product Name'] for i in search] values = session.containerimagetag.read('latest') - assert module_product.name == values['details']['product'] - assert module_repository.name == values['details']['repository'] + if not is_open('BZ:2242515'): + assert module_product.name == values['details']['product'] assert values['lce']['table'][0]['Environment'] == ENVIRONMENT + repo_line = next( + (item for item in values['repos']['table'] if item['Name'] == module_repository.name), + None, + ) + assert module_product.name == repo_line['Product'] + assert DEFAULT_CV == repo_line['Content View'] + assert 'Success' in repo_line['Last Sync'] From 9ac30c6959101f28f901b3e8a3a6c157315b56e3 Mon Sep 17 00:00:00 2001 From: vsedmik <46570670+vsedmik@users.noreply.github.com> Date: Fri, 20 Oct 2023 11:13:34 +0200 Subject: [PATCH 40/96] Remove katello-agent from helpers and mixins (#12892) --- pytest_fixtures/component/repository.py | 1 + robottelo/host_helpers/repository_mixins.py | 5 +--- robottelo/hosts.py | 25 ------------------- testimony.yaml | 1 - tests/foreman/api/test_errata.py | 5 +--- tests/foreman/cli/test_host.py | 12 ++++----- tests/foreman/cli/test_reporttemplates.py | 1 - .../cli/test_vm_install_products_package.py | 6 ++--- tests/foreman/destructive/test_contenthost.py | 2 +- tests/foreman/ui/test_activationkey.py | 2 +- tests/foreman/ui/test_contenthost.py | 6 ++--- tests/foreman/ui/test_dashboard.py | 2 +- tests/foreman/ui/test_errata.py | 8 ++---- tests/foreman/ui/test_host.py | 12 +++------ tests/foreman/ui/test_hostcollection.py | 9 +++---- tests/foreman/ui/test_subscription.py | 1 - 16 files changed, 24 insertions(+), 74 deletions(-) diff --git a/pytest_fixtures/component/repository.py b/pytest_fixtures/component/repository.py index 2336bf12e5a..209245ff649 100644 --- a/pytest_fixtures/component/repository.py +++ b/pytest_fixtures/component/repository.py @@ -69,6 +69,7 @@ def module_rhst_repo(module_target_sat, module_org_with_manifest, module_promote ) cv.publish() cv = cv.read() + cv.version.sort(key=lambda version: version.id) cv.version[-1].promote(data={'environment_ids': module_lce.id}) return REPOS['rhst7']['id'] diff --git a/robottelo/host_helpers/repository_mixins.py b/robottelo/host_helpers/repository_mixins.py index 99b5576edb9..d5d532b89f9 100644 --- a/robottelo/host_helpers/repository_mixins.py +++ b/robottelo/host_helpers/repository_mixins.py @@ -721,18 +721,16 @@ def setup_virtual_machine( vm, location_title=None, patch_os_release=False, - install_katello_agent=True, enable_rh_repos=True, enable_custom_repos=False, configure_rhel_repo=False, ): """ Setup The virtual machine basic task, eg: install katello ca, - register vm host, enable rh repos and install katello-agent + register vm host and enable rh repos :param robottelo.hosts.ContentHost vm: The Virtual machine to setup. :param bool patch_os_release: whether to patch the VM with os version. - :param bool install_katello_agent: whether to install katello-agent :param bool enable_rh_repos: whether to enable RH repositories :param bool enable_custom_repos: whether to enable custom repositories :param bool configure_rhel_repo: Whether to configure the distro Red Hat repository, @@ -762,7 +760,6 @@ def setup_virtual_machine( product_label=self.custom_product['label'] if self.custom_product else None, activation_key=self._setup_content_data['activation_key']['name'], patch_os_release_distro=patch_os_release_distro, - install_katello_agent=install_katello_agent, ) if configure_rhel_repo: rhel_repo_option_name = f'rhel{constants.DISTROS_MAJOR_VERSION[self.distro]}_os' diff --git a/robottelo/hosts.py b/robottelo/hosts.py index cc17f606e38..d8838a9606b 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -599,26 +599,6 @@ def get_base_url_for_older_rhel_minor(self): raise ValueError('not supported major version') return baseurl - def install_katello_agent(self): - """Install katello-agent on the virtual machine. - - :return: None. - :raises ContentHostError: if katello-agent is not installed. - """ - result = self.execute('yum install -y katello-agent') - if result.status != 0: - raise ContentHostError(f'Failed to install katello-agent: {result.stdout}') - if getattr(self, '_cont_inst', None): - # We're running in a container, goferd won't be running as a service - # so let's run it in the foreground, then detach from the exec - self._cont_inst.exec_run('goferd -f', detach=True) - else: - # We're in a traditional VM, so goferd should be running after katello-agent install - try: - wait_for(lambda: self.execute('service goferd status').status == 0) - except TimedOutError: - raise ContentHostError('katello-agent is not running') - def install_katello_host_tools(self): """Installs Katello host tools on the broker virtual machine @@ -1203,7 +1183,6 @@ def contenthost_setup( lce=None, activation_key=None, patch_os_release_distro=None, - install_katello_agent=True, ): """ Setup a Content Host with basic components and tasks. @@ -1215,7 +1194,6 @@ def contenthost_setup( :param str lce: Lifecycle environment label if applicable. :param str activation_key: Activation key name if applicable. :param str patch_os_release_distro: distro name, to patch the VM with os version. - :param bool install_katello_agent: whether to install katello agent. """ rh_repo_ids = rh_repo_ids or [] repo_labels = repo_labels or [] @@ -1241,8 +1219,6 @@ def contenthost_setup( raise CLIFactoryError( f'Failed to enable custom repository {repo_label!s}\n{result.stderr}' ) - if install_katello_agent: - self.install_katello_agent() def virt_who_hypervisor_config( self, @@ -1312,7 +1288,6 @@ def virt_who_hypervisor_config( activation_key=activation_key['name'], patch_os_release_distro='rhel7', rh_repo_ids=[repo['repository-id'] for repo in repos if repo['cdn']], - install_katello_agent=False, ) # configure manually RHEL custom repo url as sync time is very big # (more than 2 hours for RHEL 7Server) and not critical in this context. diff --git a/testimony.yaml b/testimony.yaml index 46d0b289f43..237a80b8f23 100644 --- a/testimony.yaml +++ b/testimony.yaml @@ -65,7 +65,6 @@ CaseComponent: - Infrastructure - Installer - InterSatelliteSync - - katello-agent - katello-tracer - LDAP - Leappintegration diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index 8c6383a4c7a..a20e38e7a90 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -562,7 +562,6 @@ def test_positive_incremental_update_required( rhel7_contenthost.register_contenthost(module_org.label, activation_key.name) assert rhel7_contenthost.subscribed rhel7_contenthost.enable_repo(constants.REPOS['rhst7']['id']) - rhel7_contenthost.install_katello_agent() host = rhel7_contenthost.nailgun_host # install package to create demand for an Erratum assert rhel7_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}').status == 0 @@ -670,9 +669,7 @@ def test_errata_installation_with_swidtags( 'appstream': settings.repos.rhel8_os.appstream, } ) - module_repos_collection_with_manifest.setup_virtual_machine( - rhel8_contenthost, install_katello_agent=False - ) + module_repos_collection_with_manifest.setup_virtual_machine(rhel8_contenthost) # install older module stream rhel8_contenthost.add_rex_key(satellite=target_sat) diff --git a/tests/foreman/cli/test_host.py b/tests/foreman/cli/test_host.py index 538c8ebe27a..5453de02b53 100644 --- a/tests/foreman/cli/test_host.py +++ b/tests/foreman/cli/test_host.py @@ -1948,9 +1948,9 @@ def test_positive_attach( } ) host_subscription_client.enable_repo(module_rhst_repo) - # ensure that katello agent can be installed + # ensure that katello-host-tools can be installed try: - host_subscription_client.install_katello_agent() + host_subscription_client.install_katello_host_tools() except ContentHostError: pytest.fail('ContentHostError raised unexpectedly!') @@ -1995,9 +1995,9 @@ def test_positive_attach_with_lce( } ) host_subscription_client.enable_repo(module_rhst_repo) - # ensure that katello agent can be installed + # ensure that katello-host-tools can be installed try: - host_subscription_client.install_katello_agent() + host_subscription_client.install_katello_host_tools() except ContentHostError: pytest.fail('ContentHostError raised unexpectedly!') @@ -2209,9 +2209,9 @@ def test_positive_auto_attach( ) Host.subscription_auto_attach({'host-id': host['id']}) host_subscription_client.enable_repo(module_rhst_repo) - # ensure that katello agent can be installed + # ensure that katello-host-tools can be installed try: - host_subscription_client.install_katello_agent() + host_subscription_client.install_katello_host_tools() except ContentHostError: pytest.fail('ContentHostError raised unexpectedly!') diff --git a/tests/foreman/cli/test_reporttemplates.py b/tests/foreman/cli/test_reporttemplates.py index 8f2729046b3..7f1b939f1e0 100644 --- a/tests/foreman/cli/test_reporttemplates.py +++ b/tests/foreman/cli/test_reporttemplates.py @@ -910,7 +910,6 @@ def test_positive_generate_hostpkgcompare( assert client.subscribed clients.append(client) client.enable_repo(REPOS['rhst7']['id']) - client.install_katello_agent() clients.sort(key=lambda client: client.hostname) hosts_info = [Host.info({'name': client.hostname}) for client in clients] diff --git a/tests/foreman/cli/test_vm_install_products_package.py b/tests/foreman/cli/test_vm_install_products_package.py index 1e16997457d..ad17cdaf5f0 100644 --- a/tests/foreman/cli/test_vm_install_products_package.py +++ b/tests/foreman/cli/test_vm_install_products_package.py @@ -67,10 +67,8 @@ def test_vm_install_package(repos_collection, function_entitlement_manifest_org, # Create repos, content view, and activation key. repos_collection.setup_content(function_entitlement_manifest_org.id, lce['id']) with Broker(nick=distro, host_class=ContentHost) as host: - # install katello-agent - repos_collection.setup_virtual_machine( - host, enable_custom_repos=True, install_katello_agent=False - ) + # enable custom repos + repos_collection.setup_virtual_machine(host, enable_custom_repos=True) # install a package from custom repo result = host.execute(f'yum -y install {FAKE_0_CUSTOM_PACKAGE}') assert result.status == 0 diff --git a/tests/foreman/destructive/test_contenthost.py b/tests/foreman/destructive/test_contenthost.py index 3d07a2b493d..a420502ba16 100644 --- a/tests/foreman/destructive/test_contenthost.py +++ b/tests/foreman/destructive/test_contenthost.py @@ -53,7 +53,7 @@ def test_content_access_after_stopped_foreman(target_sat, rhel7_contenthost): ], ) repos_collection.setup_content(org.id, lce.id, upload_manifest=False, override=True) - repos_collection.setup_virtual_machine(rhel7_contenthost, install_katello_agent=False) + repos_collection.setup_virtual_machine(rhel7_contenthost) result = rhel7_contenthost.execute(f'yum -y install {FAKE_1_CUSTOM_PACKAGE}') assert result.status == 0 assert target_sat.cli.Service.stop(options={'only': 'foreman'}).status == 0 diff --git a/tests/foreman/ui/test_activationkey.py b/tests/foreman/ui/test_activationkey.py index d0664d86e27..04546d9ba76 100644 --- a/tests/foreman/ui/test_activationkey.py +++ b/tests/foreman/ui/test_activationkey.py @@ -103,7 +103,7 @@ def test_positive_end_to_end_register( repos_collection.setup_content(org.id, lce.id, upload_manifest=False) ak_name = repos_collection.setup_content_data['activation_key']['name'] - repos_collection.setup_virtual_machine(rhel7_contenthost, install_katello_agent=False) + repos_collection.setup_virtual_machine(rhel7_contenthost) with session: session.organization.select(org.name) session.location.select(default_location.name) diff --git a/tests/foreman/ui/test_contenthost.py b/tests/foreman/ui/test_contenthost.py index 1db98df1d70..97cdef261fd 100644 --- a/tests/foreman/ui/test_contenthost.py +++ b/tests/foreman/ui/test_contenthost.py @@ -81,10 +81,8 @@ def vm(module_repos_collection_with_manifest, rhel7_contenthost, target_sat): @pytest.fixture def vm_module_streams(module_repos_collection_with_manifest, rhel8_contenthost, target_sat): - """Virtual machine registered in satellite without katello-agent installed""" - module_repos_collection_with_manifest.setup_virtual_machine( - rhel8_contenthost, install_katello_agent=False - ) + """Virtual machine registered in satellite""" + module_repos_collection_with_manifest.setup_virtual_machine(rhel8_contenthost) rhel8_contenthost.add_rex_key(satellite=target_sat) yield rhel8_contenthost diff --git a/tests/foreman/ui/test_dashboard.py b/tests/foreman/ui/test_dashboard.py index 80b7ce16732..bd11b7bb900 100644 --- a/tests/foreman/ui/test_dashboard.py +++ b/tests/foreman/ui/test_dashboard.py @@ -261,7 +261,7 @@ def test_positive_user_access_with_host_filter( rhel_contenthost.add_rex_key(target_sat) repos_collection.setup_content(org.id, lce.id, upload_manifest=False) repos_collection.setup_virtual_machine( - rhel_contenthost, location_title=module_location.name, install_katello_agent=False + rhel_contenthost, location_title=module_location.name ) rhel_contenthost.run('subscription-manager repos --enable "*"') result = rhel_contenthost.run(f'yum install -y {FAKE_7_CUSTOM_PACKAGE}') diff --git a/tests/foreman/ui/test_errata.py b/tests/foreman/ui/test_errata.py index fb0c703530b..755a69172dc 100644 --- a/tests/foreman/ui/test_errata.py +++ b/tests/foreman/ui/test_errata.py @@ -484,9 +484,7 @@ def test_positive_apply_for_all_hosts( nick=module_repos_collection_with_setup.distro, host_class=ContentHost, _count=2 ) as clients: for client in clients: - module_repos_collection_with_setup.setup_virtual_machine( - client, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client) client.add_rex_key(satellite=target_sat) assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE) with session: @@ -582,9 +580,7 @@ def test_positive_filter_by_environment( nick=module_repos_collection_with_setup.distro, host_class=ContentHost, _count=2 ) as clients: for client in clients: - module_repos_collection_with_setup.setup_virtual_machine( - client, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client) assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) # Promote the latest content view version to a new lifecycle environment content_view = entities.ContentView( diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index 6f5f34fb210..b76273263bf 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -1858,9 +1858,7 @@ def test_positive_update_delete_package( """ client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) with session: session.location.select(loc_name=DEFAULT_LOC) if not is_open('BZ:2132680'): @@ -1979,9 +1977,7 @@ def test_positive_apply_erratum( # install package client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) errata_id = settings.repos.yum_3.errata[25] client.run(f'yum install -y {FAKE_7_CUSTOM_PACKAGE}') result = client.run(f'rpm -q {FAKE_7_CUSTOM_PACKAGE}') @@ -2062,9 +2058,7 @@ def test_positive_crud_module_streams( module_name = 'duck' client = rhel_contenthost client.add_rex_key(target_sat) - module_repos_collection_with_setup.setup_virtual_machine( - client, target_sat, install_katello_agent=False - ) + module_repos_collection_with_setup.setup_virtual_machine(client, target_sat) with session: session.location.select(loc_name=DEFAULT_LOC) streams = session.host_new.get_module_streams(client.hostname, module_name) diff --git a/tests/foreman/ui/test_hostcollection.py b/tests/foreman/ui/test_hostcollection.py index 2c60125c293..ae673f7650f 100644 --- a/tests/foreman/ui/test_hostcollection.py +++ b/tests/foreman/ui/test_hostcollection.py @@ -74,7 +74,7 @@ def vm_content_hosts(smart_proxy_location, module_repos_collection, module_targe distro = module_repos_collection.distro with Broker(nick=distro, host_class=ContentHost, _count=2) as clients: for client in clients: - module_repos_collection.setup_virtual_machine(client, install_katello_agent=False) + module_repos_collection.setup_virtual_machine(client) client.add_rex_key(satellite=module_target_sat) module_target_sat.api_factory.update_vm_host_location(client, smart_proxy_location.id) yield clients @@ -86,9 +86,7 @@ def vm_content_hosts_module_stream( ): with Broker(nick='rhel8', host_class=ContentHost, _count=2) as clients: for client in clients: - module_repos_collection_with_manifest.setup_virtual_machine( - client, install_katello_agent=False - ) + module_repos_collection_with_manifest.setup_virtual_machine(client) client.add_rex_key(satellite=module_target_sat) module_target_sat.api_factory.update_vm_host_location(client, smart_proxy_location.id) yield clients @@ -567,8 +565,7 @@ def test_positive_change_assigned_content( :steps: 1. Setup activation key with content view that contain product repositories - 2. Prepare hosts (minimum 2) and subscribe them to activation key, - katello agent must be also installed and running on each host + 2. Prepare hosts (minimum 2) and subscribe them to activation key 3. Create a host collection and add the hosts to it 4. Run "subscription-manager repos" command on each host to notice the repos urls current values diff --git a/tests/foreman/ui/test_subscription.py b/tests/foreman/ui/test_subscription.py index 617c9753403..2196eabb60b 100644 --- a/tests/foreman/ui/test_subscription.py +++ b/tests/foreman/ui/test_subscription.py @@ -315,7 +315,6 @@ def test_positive_view_vdc_subscription_products( target_sat, org.label, activation_key=repos_collection.setup_content_data['activation_key']['name'], - install_katello_agent=False, ) with session: session.organization.select(org.name) From f9cec5efe795129617cfe6ae2f1ff0a2343e01e0 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Fri, 20 Oct 2023 19:06:34 +0530 Subject: [PATCH 41/96] [Upgrades] Add upgrade tests for ProvisioningTemplates (#12897) Add upgrade tests for ProvisioningTemplates Signed-off-by: Gaurav Talreja --- pytest_fixtures/component/domain.py | 10 +- tests/upgrades/test_provisioningtemplate.py | 141 ++++++++++++++++++++ 2 files changed, 146 insertions(+), 5 deletions(-) create mode 100644 tests/upgrades/test_provisioningtemplate.py diff --git a/pytest_fixtures/component/domain.py b/pytest_fixtures/component/domain.py index c62b45f54d5..57a1c265b30 100644 --- a/pytest_fixtures/component/domain.py +++ b/pytest_fixtures/component/domain.py @@ -1,15 +1,15 @@ # Domain Fixtures -from nailgun import entities import pytest @pytest.fixture(scope='session') def default_domain(session_target_sat, default_smart_proxy): domain_name = session_target_sat.hostname.partition('.')[-1] - dom = entities.Domain().search(query={'search': f'name={domain_name}'})[0] - dom.dns = default_smart_proxy - dom.update(['dns']) - return entities.Domain(id=dom.id).read() + dom = session_target_sat.api.Domain().search(query={'search': f'name={domain_name}'})[0] + if 'dns' in session_target_sat.get_features(): + dom.dns = default_smart_proxy + dom.update(['dns']) + return session_target_sat.api.Domain(id=dom.id).read() @pytest.fixture(scope='module') diff --git a/tests/upgrades/test_provisioningtemplate.py b/tests/upgrades/test_provisioningtemplate.py new file mode 100644 index 00000000000..b5bc11af363 --- /dev/null +++ b/tests/upgrades/test_provisioningtemplate.py @@ -0,0 +1,141 @@ +"""Test for ProvisioningTemplates related Upgrade Scenario's + +:Requirement: UpgradedSatellite + +:CaseAutomation: Automated + +:CaseComponent: ProvisioningTemplates + +:Team: Rocket + +:TestType: Functional + +:CaseLevel: Integration + +:CaseImportance: High + +:Upstream: No +""" +from fauxfactory import gen_string +import pytest + +from robottelo.config import settings + +provisioning_template_kinds = ['provision', 'PXEGrub', 'PXEGrub2', 'PXELinux', 'iPXE'] + + +class TestScenarioPositiveProvisioningTemplates: + """Provisioning Templates can be rendered correctly on host created in previous and upgraded versions + + :steps: + 1. Create host on Satellite and trying rendering provisioning templates + 2. Upgrade the Satellite to the next or latest version. + 3. After the upgrade, verify provisioning templates can be rendered on existing host + 4. Create host on upgraded Satellite and trying rendering provisioning templates. + + :expectedresults: + 1. Provisioning templates for host are able to render in previous and upgraded versions + """ + + @pytest.mark.pre_upgrade + @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) + def test_pre_scenario_provisioning_templates( + self, + module_target_sat, + module_org, + module_location, + default_os, + default_domain, + default_architecture, + default_partitiontable, + pxe_loader, + save_test_data, + ): + """Verify Host created Read the Provision template + + :id: preupgrade-3f338475-fa69-43ef-ac86-f00f4d324b21 + + :steps: + 1. Create host on Satellite and trying rendering provisioning templates + + :expectedresults: + 1. Provisioning templates for host are able to render in before upgrading to new version + + :parametrized: yes + """ + host = module_target_sat.api.Host( + organization=module_org, + location=module_location, + name=gen_string('alpha'), + operatingsystem=default_os, + architecture=default_architecture, + domain=default_domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + pxe_loader=pxe_loader.pxe_loader, + ).create() + + for kind in provisioning_template_kinds: + assert host.read_template(data={'template_kind': kind}) + + save_test_data( + { + 'provision_host_id': host.id, + 'pxe_loader': pxe_loader.pxe_loader, + } + ) + + @pytest.mark.post_upgrade(depend_on=test_pre_scenario_provisioning_templates) + @pytest.mark.parametrize('pre_upgrade_data', ['bios', 'uefi'], indirect=True) + def test_post_scenario_provisioning_templates( + self, + request, + pre_upgrade_data, + module_target_sat, + ): + """Host provisioned using pre-upgrade GCE CR + + :id: postupgrade-ef82143d-efef-49b2-9702-93d67ef6805e + + :steps: + 1. Postupgrade, verify provisioning templates rendering for host + 2. Create a new host on Satellite and try rendering provisioning templates + + :expectedresults: + 1. Provisioning templates for existing and new host are able to render. + + :parametrized: yes + """ + pxe_loader = pre_upgrade_data.pxe_loader + pre_upgrade_host = module_target_sat.api.Host().search( + query={'search': f'id={pre_upgrade_data.provision_host_id}'} + )[0] + org = module_target_sat.api.Organization(id=pre_upgrade_host.organization.id).read() + loc = module_target_sat.api.Location(id=pre_upgrade_host.location.id).read() + domain = module_target_sat.api.Domain(id=pre_upgrade_host.domain.id).read() + architecture = module_target_sat.api.Architecture( + id=pre_upgrade_host.architecture.id + ).read() + os = module_target_sat.api.OperatingSystem(id=pre_upgrade_host.operatingsystem.id).read() + ptable = module_target_sat.api.PartitionTable(id=pre_upgrade_host.ptable.id).read() + + for kind in provisioning_template_kinds: + assert pre_upgrade_host.read_template(data={'template_kind': kind}) + + new_host_name = gen_string('alpha') + new_host = module_target_sat.api.Host( + name=new_host_name, + organization=org, + location=loc, + architecture=architecture, + domain=domain, + operatingsystem=os, + ptable=ptable, + root_pass=settings.provisioning.host_root_password, + pxe_loader=pxe_loader, + ).create() + request.addfinalizer(pre_upgrade_host.delete) + request.addfinalizer(new_host.delete) + + for kind in provisioning_template_kinds: + assert new_host.read_template(data={'template_kind': kind}) From c698caf38fdc11c69fe80caa931d5ac17ce79cf5 Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 19 Sep 2023 09:11:33 +0200 Subject: [PATCH 42/96] Acs test coverage added --- robottelo/constants/__init__.py | 3 +- tests/foreman/ui/test_acs.py | 357 ++++++++++++++++++++++++++++++++ 2 files changed, 359 insertions(+), 1 deletion(-) create mode 100644 tests/foreman/ui/test_acs.py diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index e7d2548758b..c96397cab53 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -222,7 +222,8 @@ class Colored(Box): 'yum': "yum", 'ostree': "ostree", 'docker': "docker", - "ansible_collection": "ansible collection", + 'ansible_collection': "ansible collection", + 'file': "file", } DOWNLOAD_POLICIES = { diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py new file mode 100644 index 00000000000..97094bafe61 --- /dev/null +++ b/tests/foreman/ui/test_acs.py @@ -0,0 +1,357 @@ +"""Tests for Alternate Content Sources UI + +:Requirement: AlternateContentSources + +:CaseAutomation: Automated + +:CaseLevel: Acceptance + +:CaseComponent: AlternateContentSources + +:Team: Phoenix-content + +:TestType: Functional + +:CaseImportance: High + +:Upstream: No +""" +import pytest + +from robottelo import constants +from robottelo.config import settings +from robottelo.constants import CONTENT_CREDENTIALS_TYPES +from robottelo.constants import REPO_TYPE +from robottelo.utils.datafactory import gen_string + + +ssl_name, product_name, product_label, product_description = (gen_string('alpha') for _ in range(4)) +repos_to_enable = ['rhae2.9_el8'] + + +@pytest.fixture(scope='function') +def ui_acs_setup(session, target_sat, module_sca_manifest_org): + """ + This fixture creates all the necessary data for the test to run. + It creates an organization, content credentials, product and repositories. + """ + + with target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + + session.contentcredential.create( + { + 'name': ssl_name, + 'content_type': CONTENT_CREDENTIALS_TYPES['ssl'], + 'content': gen_string('alpha'), + } + ) + + session.product.create( + {'name': product_name, 'label': product_label, 'description': product_description} + ) + + session.repository.create( + product_name, + { + 'name': gen_string('alpha'), + 'repo_type': REPO_TYPE['file'], + 'repo_content.upstream_url': settings.repos.file_type_repo.url, + }, + ) + + for repo in repos_to_enable: + session.redhatrepository.enable( + custom_query=constants.REPOS[repo]['id'], + arch=constants.DEFAULT_ARCHITECTURE, + entity_name=None, + ) + + return target_sat, module_sca_manifest_org + + +@pytest.mark.e2e +def test_acs_positive_end_to_end(session, ui_acs_setup): + """ + Create, update, delete and refresh ACSes of all supported types. + + :id: 047452cc-5a9f-4473-96b1-d5b6830b7d6b + + :steps: + 1. Select an organization + 2. Create ACSes (all supported types/combinations) + 3. Create ACS on which deletion is going to be tested + 4. Test deletion + 5. Test refresh + 6. Test renaming and changing description + 7. Test editing capsules + 8. Test editing urls and subpaths + 9. Test editing credentials + 10. Test editing products + + + :expectedresults: This test should create all supported types (10) + of the Aleternate Content Sources one by one and asserts that actions + were made correctly on them. + """ + + target_sat, module_sca_manifest_org = ui_acs_setup + + with target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + + # Create ACS using "Simplified" option with content type of "File" + session.acs.create_new_acs( + simplified_type=True, + content_type='file', + name='simpleFile', + description='simpleFileDesc', + add_all_capsules=True, + use_http_proxies=True, + products_to_add=product_name, + ) + + # Create ACS using "Simplified" option with content type of "Yum" + session.acs.create_new_acs( + simplified_type=True, + content_type='yum', + name='simpleYum', + description='simpleYumDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + products_to_add=[constants.REPOS[repo]['product'] for repo in repos_to_enable], + ) + + # Create ACS using "Custom" option with content type of "File" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileManualAuth', + description='customFileManualAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumManualAuth', + description='customYumManualAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumContentAuth', + description='customYumContentAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "File" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileContentAuth', + description='customFileContentAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumNoneAuth', + description='customYumNoneAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) + + # Create ACS using "Custom" option with content type of "File" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileNoneAuth', + description='customFileNoneAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) + + # Create ACS using "RHUI" option + # and using content credentials authentication + session.acs.create_new_acs( + rhui_type=True, + name='rhuiYumContentAuth', + description='rhuiYumContentAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "RHUI" option + # and using NO authentication + session.acs.create_new_acs( + rhui_type=True, + name='rhuiYumNoneAuth', + description='rhuiYumNoneAuthDesc', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + none_auth=True, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS on which deletion is going to be tested + session.acs.create_new_acs( + rhui_type=True, + name='testAcsToBeDeleted', + description='testAcsToBeDeleted', + capsules_to_add=target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + none_auth=True, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Delete ACS and check if trying to read it afterwards fails + session.acs.delete_acs(acs_name='testAcsToBeDeleted') + with pytest.raises(ValueError): + session.acs.get_row_drawer_content(acs_name='testAcsToBeDeleted') + + # Refresh ACS and check that last refresh time is updated + session.acs.refresh_acs(acs_name='simpleFile') + simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFile') + assert ( + simple_file_refreshed['details']['last_refresh'] == 'less than a minute ago' + or '1 minute ago' + ) + + # Rename and change description of ACS and then check that it was changed + simple_file_renamed = session.acs.edit_acs_details( + acs_name_to_edit='simpleFile', + new_acs_name='simpleFileRenamed', + new_description='simpleFileRenamedDesc', + ) + simple_file_renamed = session.acs.get_row_drawer_content(acs_name='simpleFileRenamed') + assert ( + simple_file_renamed['details']['details_stack_content']['name'] == 'simpleFileRenamed' + ) + assert ( + simple_file_renamed['details']['details_stack_content']['description'] + == 'simpleFileRenamedDesc' + ) + + # Edit ACS capsules + custom_file_edited_capsules = session.acs.edit_capsules( + acs_name_to_edit='customFileContentAuth', remove_all=True, use_http_proxies=False + ) + custom_file_edited_capsules = session.acs.get_row_drawer_content( + acs_name='customFileContentAuth' + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content']['capsules_list'] == [] + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content']['use_http_proxies'] + == 'false' + ) + + # Edit ACS urls and subpaths + custom_yum_edited_url = session.acs.edit_url_subpaths( + acs_name_to_edit='customYumNoneAuth', + new_url='https://testNEW.com', + new_subpaths=['test/', 'testNEW/'], + ) + custom_yum_edited_url = session.acs.get_row_drawer_content(acs_name='customYumNoneAuth') + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['url'] + == 'https://testNEW.com' + ) + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['subpaths'] + == 'test/,testNEW/' + ) + + # Edit ACS credentials + custom_file_edited_credentials = session.acs.edit_credentials( + acs_name_to_edit='customFileManualAuth', + verify_ssl=False, + manual_auth=True, + username='changedUserName', + ) + custom_file_edited_credentials = session.acs.get_row_drawer_content( + acs_name='customFileManualAuth' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content']['verify_ssl'] + == 'false' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content']['username'] + == 'changedUserName' + ) + + # Edit ACS products + simple_yum_edited_products = session.acs.edit_products( + acs_name_to_edit='simpleYum', + remove_all=True, + ) + simple_yum_edited_products = session.acs.get_row_drawer_content(acs_name='simpleYum') + assert ( + simple_yum_edited_products['products']['products_stack_content']['products_list'] == [] + ) From 7ef8a55688e085a47c9a19420e156acfdd47b37f Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 19 Sep 2023 09:37:17 +0200 Subject: [PATCH 43/96] Fix docstring --- tests/foreman/ui/test_acs.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index 97094bafe61..8765d1725df 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -78,21 +78,22 @@ def test_acs_positive_end_to_end(session, ui_acs_setup): :id: 047452cc-5a9f-4473-96b1-d5b6830b7d6b :steps: - 1. Select an organization - 2. Create ACSes (all supported types/combinations) - 3. Create ACS on which deletion is going to be tested - 4. Test deletion - 5. Test refresh - 6. Test renaming and changing description - 7. Test editing capsules - 8. Test editing urls and subpaths - 9. Test editing credentials - 10. Test editing products - - - :expectedresults: This test should create all supported types (10) - of the Aleternate Content Sources one by one and asserts that actions - were made correctly on them. + 1. Select an organization + 2. Create ACSes (all supported types/combinations) + 3. Create ACS on which deletion is going to be tested + 4. Test deletion + 5. Test refresh + 6. Test renaming and changing description + 7. Test editing capsules + 8. Test editing urls and subpaths + 9. Test editing credentials + 10. Test editing products + + + :expectedresults: + This test should create all supported types (10) + of the Aleternate Content Sources one by one and asserts that actions + were made correctly on them. """ target_sat, module_sca_manifest_org = ui_acs_setup From d2738af7a19bdd5817fc9dcba8850c6be5e75256 Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 19 Sep 2023 15:39:14 +0200 Subject: [PATCH 44/96] Assert changed --- tests/foreman/ui/test_acs.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index 8765d1725df..c2a69238e73 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -89,7 +89,6 @@ def test_acs_positive_end_to_end(session, ui_acs_setup): 9. Test editing credentials 10. Test editing products - :expectedresults: This test should create all supported types (10) of the Aleternate Content Sources one by one and asserts that actions @@ -277,10 +276,10 @@ def test_acs_positive_end_to_end(session, ui_acs_setup): # Refresh ACS and check that last refresh time is updated session.acs.refresh_acs(acs_name='simpleFile') simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFile') - assert ( - simple_file_refreshed['details']['last_refresh'] == 'less than a minute ago' - or '1 minute ago' - ) + assert simple_file_refreshed['details']['last_refresh'] in [ + 'less than a minute ago', + '1 minute ago', + ] # Rename and change description of ACS and then check that it was changed simple_file_renamed = session.acs.edit_acs_details( From 4bb037cc6fe1a9630d005e881af25b54cf252ec2 Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 10 Oct 2023 08:36:11 +0200 Subject: [PATCH 45/96] Comments addressed --- tests/foreman/ui/test_acs.py | 749 +++++++++++++++++++++-------------- 1 file changed, 445 insertions(+), 304 deletions(-) diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index c2a69238e73..2ec0d98db40 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -19,339 +19,480 @@ import pytest from robottelo import constants -from robottelo.config import settings -from robottelo.constants import CONTENT_CREDENTIALS_TYPES from robottelo.constants import REPO_TYPE +from robottelo.constants.repos import CUSTOM_FILE_REPO from robottelo.utils.datafactory import gen_string -ssl_name, product_name, product_label, product_description = (gen_string('alpha') for _ in range(4)) +ssl_name, product_name, product_label, product_description, repository_name = ( + gen_string('alpha') for _ in range(5) +) repos_to_enable = ['rhae2.9_el8'] -@pytest.fixture(scope='function') -def ui_acs_setup(session, target_sat, module_sca_manifest_org): +@pytest.fixture(scope='class') +def acs_setup(class_target_sat, module_sca_manifest_org): """ This fixture creates all the necessary data for the test to run. It creates an organization, content credentials, product and repositories. """ - - with target_sat.ui_session() as session: - session.organization.select(org_name=module_sca_manifest_org.name) - - session.contentcredential.create( + class_target_sat.api.ContentCredential( + name=ssl_name, + content=gen_string('alpha'), + organization=module_sca_manifest_org.id, + content_type="cert", + ).create() + + product = class_target_sat.api.Product( + name=product_name, organization=module_sca_manifest_org.id + ).create() + + class_target_sat.api.Repository( + product=product, content_type=REPO_TYPE['file'], url=CUSTOM_FILE_REPO + ).create() + + for repo in repos_to_enable: + class_target_sat.cli.RepositorySet.enable( { - 'name': ssl_name, - 'content_type': CONTENT_CREDENTIALS_TYPES['ssl'], - 'content': gen_string('alpha'), + 'organization-id': module_sca_manifest_org.id, + 'name': constants.REPOS[repo]['reposet'], + 'product': constants.REPOS[repo]['product'], + 'releasever': constants.REPOS[repo]['version'], + 'basearch': constants.DEFAULT_ARCHITECTURE, } ) - session.product.create( - {'name': product_name, 'label': product_label, 'description': product_description} - ) - - session.repository.create( - product_name, - { - 'name': gen_string('alpha'), - 'repo_type': REPO_TYPE['file'], - 'repo_content.upstream_url': settings.repos.file_type_repo.url, - }, - ) - - for repo in repos_to_enable: - session.redhatrepository.enable( - custom_query=constants.REPOS[repo]['id'], - arch=constants.DEFAULT_ARCHITECTURE, - entity_name=None, - ) - - return target_sat, module_sca_manifest_org + return class_target_sat, module_sca_manifest_org -@pytest.mark.e2e -def test_acs_positive_end_to_end(session, ui_acs_setup): +class TestAllAcsTypes: """ - Create, update, delete and refresh ACSes of all supported types. - - :id: 047452cc-5a9f-4473-96b1-d5b6830b7d6b - - :steps: - 1. Select an organization - 2. Create ACSes (all supported types/combinations) - 3. Create ACS on which deletion is going to be tested - 4. Test deletion - 5. Test refresh - 6. Test renaming and changing description - 7. Test editing capsules - 8. Test editing urls and subpaths - 9. Test editing credentials - 10. Test editing products - - :expectedresults: - This test should create all supported types (10) - of the Aleternate Content Sources one by one and asserts that actions - were made correctly on them. + Test class insuring fixture is ran once before + test_check_all_acs_types_can_be_created """ - target_sat, module_sca_manifest_org = ui_acs_setup - - with target_sat.ui_session() as session: - session.organization.select(org_name=module_sca_manifest_org.name) - - # Create ACS using "Simplified" option with content type of "File" - session.acs.create_new_acs( - simplified_type=True, - content_type='file', - name='simpleFile', - description='simpleFileDesc', - add_all_capsules=True, - use_http_proxies=True, - products_to_add=product_name, - ) - - # Create ACS using "Simplified" option with content type of "Yum" - session.acs.create_new_acs( - simplified_type=True, - content_type='yum', - name='simpleYum', - description='simpleYumDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - products_to_add=[constants.REPOS[repo]['product'] for repo in repos_to_enable], - ) - - # Create ACS using "Custom" option with content type of "File" - # and using manual authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileManualAuth', - description='customFileManualAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - manual_auth=True, - username='test', - password='test', - verify_ssl=True, - ca_cert=ssl_name, - ) - - # Create ACS using "Custom" option with content type of "Yum" - # and using manual authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumManualAuth', - description='customYumManualAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - manual_auth=True, - username='test', - password='test', - verify_ssl=True, - ca_cert=ssl_name, - ) + pytestmark = pytest.mark.usefixtures('acs_setup') + + @pytest.mark.parametrize('cnt_type', ['yum', 'file']) + @pytest.mark.parametrize('acs_type', ['custom', 'simplified', 'rhui']) + def test_check_all_acs_types_can_be_created(self, session, cnt_type, acs_type, acs_setup): + """ + This test creates all possible ACS types. + + :id cbd0f4e6-2151-446a-90d3-69c6935a0c91 + + :parametrized: yes + + :steps: + 1. Select an organization + 2. Create ACSes (randomly selected ones) + 3. Test refresh + 4. Test renaming and changing description + 5. Test editing capsules + 6. Test editing urls and subpaths + 7. Test editing credentials + 8. Test editing products + 9. Create ACS on which deletion is going to be tested + 10. Test deletion + :expectedresults: + This test should create some + Aleternate Content Sources and asserts that actions + were made correctly on them. + + """ + + if acs_type == 'rhui' and cnt_type == 'file': + pytest.skip('Unsupported parameter combination.') + + class_target_sat, module_sca_manifest_org = acs_setup + with class_target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + + match acs_type: + case 'simplified': + + if cnt_type == 'yum': + # Create ACS using "Simplified" option with content type of "Yum" + session.acs.create_new_acs( + simplified_type=True, + content_type='yum', + name='simpleYum', + description='simpleYumDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + products_to_add=[ + constants.REPOS[repo]['product'] for repo in repos_to_enable + ], + ) + + if cnt_type == 'file': + # Create ACS using "Simplified" option with content type of "File" + session.acs.create_new_acs( + simplified_type=True, + content_type='file', + name='simpleFile', + description='simpleFileDesc', + add_all_capsules=True, + use_http_proxies=True, + products_to_add=product_name, + ) + + case 'custom': + + if cnt_type == 'yum': + # Create ACS using "Custom" option with content type of "Yum" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumManualAuth', + description='customYumManualAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumContentAuth', + description='customYumContentAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "Yum" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumNoneAuth', + description='customYumNoneAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) + + if cnt_type == 'file': + # Create ACS using "Custom" option with content type of "File" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileContentAuth', + description='customFileContentAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "Custom" option with content type of "File" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileNoneAuth', + description='customFileNoneAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) + + # Create ACS using "Custom" option with content type of "File" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileManualAuth', + description='customFileManualAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) + + case 'rhui': + # Create ACS using "RHUI" option + # and using content credentials authentication + session.acs.create_new_acs( + rhui_type=True, + name='rhuiYumContentAuth', + description='rhuiYumContentAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) + + # Create ACS using "RHUI" option + # and using NO authentication + session.acs.create_new_acs( + rhui_type=True, + name='rhuiYumNoneAuth', + description='rhuiYumNoneAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + none_auth=True, + verify_ssl=True, + ca_cert=ssl_name, + ) + + +class TestAcsE2e: + """ + Test class insuring fixture is ran once before + test_acs_positive_end_to_end + """ - # Create ACS using "Custom" option with content type of "Yum" - # and using content credentials authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumContentAuth', - description='customYumContentAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) + pytestmark = pytest.mark.usefixtures('acs_setup') + + @pytest.mark.e2e + def test_acs_positive_end_to_end(self, session, acs_setup): + """ + Create, update, delete and refresh ACSes. + + :id: 047452cc-5a9f-4473-96b1-d5b6830b7d6b + + :steps: + 1. Select an organization + 2. Create ACSes (randomly selected ones) + 3. Test refresh + 4. Test renaming and changing description + 5. Test editing capsules + 6. Test editing urls and subpaths + 7. Test editing credentials + 8. Test editing products + 9. Create ACS on which deletion is going to be tested + 10. Test deletion + :expectedresults: + This test should create some + Aleternate Content Sources and asserts that actions + were made correctly on them. + """ + + class_target_sat, module_sca_manifest_org = acs_setup + + with class_target_sat.ui_session() as session: + session.organization.select(org_name=module_sca_manifest_org.name) + + # Create ACS using "Simplified" option with content type of "File" + session.acs.create_new_acs( + simplified_type=True, + content_type='file', + name='simpleFile', + description='simpleFileDesc', + add_all_capsules=True, + use_http_proxies=True, + products_to_add=product_name, + ) - # Create ACS using "Custom" option with content type of "File" - # and using content credentials authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileContentAuth', - description='customFileContentAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) + # Create ACS using "Simplified" option with content type of "Yum" + session.acs.create_new_acs( + simplified_type=True, + content_type='yum', + name='simpleYum', + description='simpleYumDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + products_to_add=[constants.REPOS[repo]['product'] for repo in repos_to_enable], + ) - # Create ACS using "Custom" option with content type of "Yum" - # and using NO authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumNoneAuth', - description='customYumNoneAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - none_auth=True, - ) + # Create ACS using "Custom" option with content type of "File" + # and using manual authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileManualAuth', + description='customFileManualAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + manual_auth=True, + username='test', + password='test', + verify_ssl=True, + ca_cert=ssl_name, + ) - # Create ACS using "Custom" option with content type of "File" - # and using NO authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileNoneAuth', - description='customFileNoneAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - none_auth=True, - ) + # Create ACS using "Custom" option with content type of "File" + # and using content credentials authentication + session.acs.create_new_acs( + custom_type=True, + content_type='file', + name='customFileContentAuth', + description='customFileContentAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + content_credentials_auth=True, + ssl_client_cert=ssl_name, + ssl_client_key=ssl_name, + verify_ssl=True, + ca_cert=ssl_name, + ) - # Create ACS using "RHUI" option - # and using content credentials authentication - session.acs.create_new_acs( - rhui_type=True, - name='rhuiYumContentAuth', - description='rhuiYumContentAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com/pulp/content', - subpaths=['test/', 'test2/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) + # Create ACS using "Custom" option with content type of "Yum" + # and using NO authentication + session.acs.create_new_acs( + custom_type=True, + content_type='yum', + name='customYumNoneAuth', + description='customYumNoneAuthDesc', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com', + subpaths=['test/'], + none_auth=True, + ) - # Create ACS using "RHUI" option - # and using NO authentication - session.acs.create_new_acs( - rhui_type=True, - name='rhuiYumNoneAuth', - description='rhuiYumNoneAuthDesc', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com/pulp/content', - subpaths=['test/', 'test2/'], - none_auth=True, - verify_ssl=True, - ca_cert=ssl_name, - ) + # Refresh ACS and check that last refresh time is updated + session.acs.refresh_acs(acs_name='simpleFile') + simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFile') + assert simple_file_refreshed['details']['last_refresh'] in [ + 'less than a minute ago', + '1 minute ago', + ] + + # Rename and change description of ACS and then check that it was changed + simple_file_renamed = session.acs.edit_acs_details( + acs_name_to_edit='simpleFile', + new_acs_name='simpleFileRenamed', + new_description='simpleFileRenamedDesc', + ) + simple_file_renamed = session.acs.get_row_drawer_content(acs_name='simpleFileRenamed') + assert ( + simple_file_renamed['details']['details_stack_content']['name'] + == 'simpleFileRenamed' + ) + assert ( + simple_file_renamed['details']['details_stack_content']['description'] + == 'simpleFileRenamedDesc' + ) - # Create ACS on which deletion is going to be tested - session.acs.create_new_acs( - rhui_type=True, - name='testAcsToBeDeleted', - description='testAcsToBeDeleted', - capsules_to_add=target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com/pulp/content', - subpaths=['test/', 'test2/'], - none_auth=True, - verify_ssl=True, - ca_cert=ssl_name, - ) + # Edit ACS capsules + custom_file_edited_capsules = session.acs.edit_capsules( + acs_name_to_edit='customFileContentAuth', remove_all=True, use_http_proxies=False + ) + custom_file_edited_capsules = session.acs.get_row_drawer_content( + acs_name='customFileContentAuth' + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content']['capsules_list'] + == [] + ) + assert ( + custom_file_edited_capsules['capsules']['capsules_stack_content'][ + 'use_http_proxies' + ] + == 'false' + ) - # Delete ACS and check if trying to read it afterwards fails - session.acs.delete_acs(acs_name='testAcsToBeDeleted') - with pytest.raises(ValueError): - session.acs.get_row_drawer_content(acs_name='testAcsToBeDeleted') - - # Refresh ACS and check that last refresh time is updated - session.acs.refresh_acs(acs_name='simpleFile') - simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFile') - assert simple_file_refreshed['details']['last_refresh'] in [ - 'less than a minute ago', - '1 minute ago', - ] - - # Rename and change description of ACS and then check that it was changed - simple_file_renamed = session.acs.edit_acs_details( - acs_name_to_edit='simpleFile', - new_acs_name='simpleFileRenamed', - new_description='simpleFileRenamedDesc', - ) - simple_file_renamed = session.acs.get_row_drawer_content(acs_name='simpleFileRenamed') - assert ( - simple_file_renamed['details']['details_stack_content']['name'] == 'simpleFileRenamed' - ) - assert ( - simple_file_renamed['details']['details_stack_content']['description'] - == 'simpleFileRenamedDesc' - ) + # Edit ACS urls and subpaths + custom_yum_edited_url = session.acs.edit_url_subpaths( + acs_name_to_edit='customYumNoneAuth', + new_url='https://testNEW.com', + new_subpaths=['test/', 'testNEW/'], + ) + custom_yum_edited_url = session.acs.get_row_drawer_content(acs_name='customYumNoneAuth') + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['url'] + == 'https://testNEW.com' + ) + assert ( + custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content'][ + 'subpaths' + ] + == 'test/,testNEW/' + ) - # Edit ACS capsules - custom_file_edited_capsules = session.acs.edit_capsules( - acs_name_to_edit='customFileContentAuth', remove_all=True, use_http_proxies=False - ) - custom_file_edited_capsules = session.acs.get_row_drawer_content( - acs_name='customFileContentAuth' - ) - assert ( - custom_file_edited_capsules['capsules']['capsules_stack_content']['capsules_list'] == [] - ) - assert ( - custom_file_edited_capsules['capsules']['capsules_stack_content']['use_http_proxies'] - == 'false' - ) + # Edit ACS credentials + custom_file_edited_credentials = session.acs.edit_credentials( + acs_name_to_edit='customFileManualAuth', + verify_ssl=False, + manual_auth=True, + username='changedUserName', + ) + custom_file_edited_credentials = session.acs.get_row_drawer_content( + acs_name='customFileManualAuth' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content'][ + 'verify_ssl' + ] + == 'false' + ) + assert ( + custom_file_edited_credentials['credentials']['credentials_stack_content'][ + 'username' + ] + == 'changedUserName' + ) - # Edit ACS urls and subpaths - custom_yum_edited_url = session.acs.edit_url_subpaths( - acs_name_to_edit='customYumNoneAuth', - new_url='https://testNEW.com', - new_subpaths=['test/', 'testNEW/'], - ) - custom_yum_edited_url = session.acs.get_row_drawer_content(acs_name='customYumNoneAuth') - assert ( - custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['url'] - == 'https://testNEW.com' - ) - assert ( - custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['subpaths'] - == 'test/,testNEW/' - ) + # Edit ACS products + simple_yum_edited_products = session.acs.edit_products( + acs_name_to_edit='simpleYum', + remove_all=True, + ) + simple_yum_edited_products = session.acs.get_row_drawer_content(acs_name='simpleYum') + assert ( + simple_yum_edited_products['products']['products_stack_content']['products_list'] + == [] + ) - # Edit ACS credentials - custom_file_edited_credentials = session.acs.edit_credentials( - acs_name_to_edit='customFileManualAuth', - verify_ssl=False, - manual_auth=True, - username='changedUserName', - ) - custom_file_edited_credentials = session.acs.get_row_drawer_content( - acs_name='customFileManualAuth' - ) - assert ( - custom_file_edited_credentials['credentials']['credentials_stack_content']['verify_ssl'] - == 'false' - ) - assert ( - custom_file_edited_credentials['credentials']['credentials_stack_content']['username'] - == 'changedUserName' - ) + # Create ACS on which deletion is going to be tested + session.acs.create_new_acs( + rhui_type=True, + name='testAcsToBeDeleted', + description='testAcsToBeDeleted', + capsules_to_add=class_target_sat.hostname, + use_http_proxies=True, + base_url='https://test.com/pulp/content', + subpaths=['test/', 'test2/'], + none_auth=True, + verify_ssl=True, + ca_cert=ssl_name, + ) - # Edit ACS products - simple_yum_edited_products = session.acs.edit_products( - acs_name_to_edit='simpleYum', - remove_all=True, - ) - simple_yum_edited_products = session.acs.get_row_drawer_content(acs_name='simpleYum') - assert ( - simple_yum_edited_products['products']['products_stack_content']['products_list'] == [] - ) + # Delete ACS and check if trying to read it afterwards fails + session.acs.delete_acs(acs_name='testAcsToBeDeleted') + with pytest.raises(ValueError): + session.acs.get_row_drawer_content(acs_name='testAcsToBeDeleted') From 09e992de6e60c0311cd3029b8d4817234f85ba87 Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 10 Oct 2023 09:27:32 +0200 Subject: [PATCH 46/96] Precommit ran --- tests/foreman/ui/test_acs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index 2ec0d98db40..a31138a9bab 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -23,7 +23,6 @@ from robottelo.constants.repos import CUSTOM_FILE_REPO from robottelo.utils.datafactory import gen_string - ssl_name, product_name, product_label, product_description, repository_name = ( gen_string('alpha') for _ in range(5) ) From 1cbdb3480f81c664c65cfe3d535fcfda8e6e409e Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Tue, 10 Oct 2023 09:39:37 +0200 Subject: [PATCH 47/96] Docstring fixed --- tests/foreman/ui/test_acs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index a31138a9bab..dd12e66af13 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -78,7 +78,7 @@ def test_check_all_acs_types_can_be_created(self, session, cnt_type, acs_type, a """ This test creates all possible ACS types. - :id cbd0f4e6-2151-446a-90d3-69c6935a0c91 + :id: cbd0f4e6-2151-446a-90d3-69c6935a0c91 :parametrized: yes From af1e9ecfe09ac601fecc894153c5becaff713890 Mon Sep 17 00:00:00 2001 From: Ladislav Vasina Date: Fri, 20 Oct 2023 12:57:58 +0200 Subject: [PATCH 48/96] Address comments and add class_sca_manifest --- pytest_fixtures/component/taxonomy.py | 15 + tests/foreman/ui/test_acs.py | 406 ++++++++++++-------------- 2 files changed, 201 insertions(+), 220 deletions(-) diff --git a/pytest_fixtures/component/taxonomy.py b/pytest_fixtures/component/taxonomy.py index faad21122e5..c6140cebb63 100644 --- a/pytest_fixtures/component/taxonomy.py +++ b/pytest_fixtures/component/taxonomy.py @@ -103,6 +103,13 @@ def module_sca_manifest_org(module_org, module_sca_manifest, module_target_sat): return module_org +@pytest.fixture(scope='class') +def class_sca_manifest_org(class_org, class_sca_manifest, class_target_sat): + """Creates an organization and uploads an SCA mode manifest generated with manifester""" + class_target_sat.upload_manifest(class_org.id, class_sca_manifest.content) + return class_org + + @pytest.fixture(scope='module') def module_extra_rhel_entitlement_manifest_org( module_target_sat, @@ -197,6 +204,14 @@ def module_sca_manifest(): yield manifest +@pytest.fixture(scope='class') +def class_sca_manifest(): + """Yields a manifest in Simple Content Access mode with subscriptions determined by the + `manifest_category.golden_ticket` setting in conf/manifest.yaml.""" + with Manifester(manifest_category=settings.manifest.golden_ticket) as manifest: + yield manifest + + @pytest.fixture(scope='function') def function_entitlement_manifest(): """Yields a manifest in entitlement mode with subscriptions determined by the diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index dd12e66af13..e1c40911c6e 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -30,7 +30,7 @@ @pytest.fixture(scope='class') -def acs_setup(class_target_sat, module_sca_manifest_org): +def acs_setup(class_target_sat, class_sca_manifest_org): """ This fixture creates all the necessary data for the test to run. It creates an organization, content credentials, product and repositories. @@ -38,12 +38,12 @@ def acs_setup(class_target_sat, module_sca_manifest_org): class_target_sat.api.ContentCredential( name=ssl_name, content=gen_string('alpha'), - organization=module_sca_manifest_org.id, + organization=class_sca_manifest_org.id, content_type="cert", ).create() product = class_target_sat.api.Product( - name=product_name, organization=module_sca_manifest_org.id + name=product_name, organization=class_sca_manifest_org.id ).create() class_target_sat.api.Repository( @@ -53,7 +53,7 @@ def acs_setup(class_target_sat, module_sca_manifest_org): for repo in repos_to_enable: class_target_sat.cli.RepositorySet.enable( { - 'organization-id': module_sca_manifest_org.id, + 'organization-id': class_sca_manifest_org.id, 'name': constants.REPOS[repo]['reposet'], 'product': constants.REPOS[repo]['product'], 'releasever': constants.REPOS[repo]['version'], @@ -61,7 +61,7 @@ def acs_setup(class_target_sat, module_sca_manifest_org): } ) - return class_target_sat, module_sca_manifest_org + return class_target_sat, class_sca_manifest_org class TestAllAcsTypes: @@ -72,206 +72,164 @@ class TestAllAcsTypes: pytestmark = pytest.mark.usefixtures('acs_setup') - @pytest.mark.parametrize('cnt_type', ['yum', 'file']) - @pytest.mark.parametrize('acs_type', ['custom', 'simplified', 'rhui']) - def test_check_all_acs_types_can_be_created(self, session, cnt_type, acs_type, acs_setup): + def gen_params(): + """ + This function generates parameters that are used in test_check_all_acs_types_can_be_created. + """ + + parameters_dict = { + '_common': { + 'use_http_proxies': True, + }, + 'custom': { + '_common': { + 'custom_type': True, + 'base_url': 'https://test.com/', + 'subpaths': ['test/'], + 'capsules_to_add': 'class_target_sat.hostname', + }, + 'yum_manual_auth': { + 'content_type': 'yum', + 'name': 'customYumManualAuth', + 'description': 'customYumManualAuthDesc', + 'manual_auth': True, + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'username': 'test', + 'password': 'test', + }, + 'yum_content_auth': { + 'content_type': 'yum', + 'name': 'customYumContentAuth', + 'description': 'customYumContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + 'verify_ssl': True, + 'ca_cert': ssl_name, + }, + 'yum_none_auth': { + 'content_type': 'yum', + 'name': 'customYumNoneAuth', + 'description': 'customYumNoneAuthDesc', + 'none_auth': True, + }, + 'file_manual_auth': { + 'content_type': 'file', + 'name': 'customFileManualAuth', + 'description': 'customFileManualAuthDesc', + 'manual_auth': True, + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'username': 'test', + 'password': 'test', + }, + 'file_content_auth': { + 'content_type': 'file', + 'name': 'customFileContentAuth', + 'description': 'customFileContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + 'verify_ssl': True, + 'ca_cert': ssl_name, + }, + 'file_none_auth': { + 'content_type': 'file', + 'name': 'customFileNoneAuth', + 'description': 'customFileNoneAuthDesc', + 'none_auth': True, + }, + }, + 'simplified': { + '_common': {'simplified_type': True}, + 'yum': { + 'content_type': 'yum', + 'name': 'simpleYum', + 'description': 'simpleYumDesc', + 'capsules_to_add': 'class_target_sat.hostname', + 'products_to_add': [ + constants.REPOS[repo]['product'] for repo in repos_to_enable + ], + }, + 'file': { + 'content_type': 'file', + 'name': 'simpleFile', + 'description': 'simpleFileDesc', + 'add_all_capsules': True, + 'products_to_add': product_name, + }, + }, + 'rhui': { + '_common': { + 'rhui_type': True, + 'base_url': 'https://test.com/pulp/content', + 'subpaths': ['test/', 'test2/'], + 'verify_ssl': True, + 'ca_cert': ssl_name, + 'capsules_to_add': 'class_target_sat.hostname', + }, + 'yum_none_auth': { + 'name': 'rhuiYumNoneAuth', + 'description': 'rhuiYumNoneAuthDesc', + 'none_auth': True, + }, + 'yum_content_auth': { + 'name': 'rhuiYumContentAuth', + 'description': 'rhuiYumContentAuthDesc', + 'content_credentials_auth': True, + 'ssl_client_cert': ssl_name, + 'ssl_client_key': ssl_name, + }, + }, + } + + ids = [] + vals = [] + # This code creates a list of scenario IDs and values for each scenario. + # It loops through the keys in the parameters dictionary, and uses the keys to create a scenario ID + # and then it uses the scenario ID to access the scenario values from the parameters dictionary. + # The code then adds the scenario values to the list of scenario values. + for acs in parameters_dict.keys(): + if not acs.startswith('_'): + for cnt in parameters_dict[acs]: + if not cnt.startswith('_'): + scenario = ( + parameters_dict[acs][cnt] + | parameters_dict.get('_common', {}) + | parameters_dict[acs].get('_common', {}) + ) + ids.append(f'{acs}_{cnt}') + vals.append(scenario) + return (vals, ids) + + @pytest.mark.parametrize('scenario', gen_params()[0], ids=gen_params()[1]) + def test_check_all_acs_types_can_be_created(session, scenario, acs_setup): """ This test creates all possible ACS types. - :id: cbd0f4e6-2151-446a-90d3-69c6935a0c91 + :id: 6bfad272-3ff8-4780-b346-1229d70524b1 :parametrized: yes :steps: 1. Select an organization - 2. Create ACSes (randomly selected ones) - 3. Test refresh - 4. Test renaming and changing description - 5. Test editing capsules - 6. Test editing urls and subpaths - 7. Test editing credentials - 8. Test editing products - 9. Create ACS on which deletion is going to be tested - 10. Test deletion + 2. Create ACSes :expectedresults: - This test should create some - Aleternate Content Sources and asserts that actions - were made correctly on them. - + This test should create all Aleternate Content Sources """ - if acs_type == 'rhui' and cnt_type == 'file': - pytest.skip('Unsupported parameter combination.') + class_target_sat, class_sca_manifest_org = acs_setup + vals = scenario - class_target_sat, module_sca_manifest_org = acs_setup - with class_target_sat.ui_session() as session: - session.organization.select(org_name=module_sca_manifest_org.name) - - match acs_type: - case 'simplified': - - if cnt_type == 'yum': - # Create ACS using "Simplified" option with content type of "Yum" - session.acs.create_new_acs( - simplified_type=True, - content_type='yum', - name='simpleYum', - description='simpleYumDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - products_to_add=[ - constants.REPOS[repo]['product'] for repo in repos_to_enable - ], - ) + # Replace the placeholder in 'capsules_to_add' with the hostname of the Satellite under test + for val in vals: + if 'capsules_to_add' in val: + vals['capsules_to_add'] = class_target_sat.hostname - if cnt_type == 'file': - # Create ACS using "Simplified" option with content type of "File" - session.acs.create_new_acs( - simplified_type=True, - content_type='file', - name='simpleFile', - description='simpleFileDesc', - add_all_capsules=True, - use_http_proxies=True, - products_to_add=product_name, - ) - - case 'custom': - - if cnt_type == 'yum': - # Create ACS using "Custom" option with content type of "Yum" - # and using manual authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumManualAuth', - description='customYumManualAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - manual_auth=True, - username='test', - password='test', - verify_ssl=True, - ca_cert=ssl_name, - ) - - # Create ACS using "Custom" option with content type of "Yum" - # and using content credentials authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumContentAuth', - description='customYumContentAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) - - # Create ACS using "Custom" option with content type of "Yum" - # and using NO authentication - session.acs.create_new_acs( - custom_type=True, - content_type='yum', - name='customYumNoneAuth', - description='customYumNoneAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - none_auth=True, - ) - - if cnt_type == 'file': - # Create ACS using "Custom" option with content type of "File" - # and using content credentials authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileContentAuth', - description='customFileContentAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) - - # Create ACS using "Custom" option with content type of "File" - # and using NO authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileNoneAuth', - description='customFileNoneAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - none_auth=True, - ) - - # Create ACS using "Custom" option with content type of "File" - # and using manual authentication - session.acs.create_new_acs( - custom_type=True, - content_type='file', - name='customFileManualAuth', - description='customFileManualAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com', - subpaths=['test/'], - manual_auth=True, - username='test', - password='test', - verify_ssl=True, - ca_cert=ssl_name, - ) - - case 'rhui': - # Create ACS using "RHUI" option - # and using content credentials authentication - session.acs.create_new_acs( - rhui_type=True, - name='rhuiYumContentAuth', - description='rhuiYumContentAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com/pulp/content', - subpaths=['test/', 'test2/'], - content_credentials_auth=True, - ssl_client_cert=ssl_name, - ssl_client_key=ssl_name, - verify_ssl=True, - ca_cert=ssl_name, - ) - - # Create ACS using "RHUI" option - # and using NO authentication - session.acs.create_new_acs( - rhui_type=True, - name='rhuiYumNoneAuth', - description='rhuiYumNoneAuthDesc', - capsules_to_add=class_target_sat.hostname, - use_http_proxies=True, - base_url='https://test.com/pulp/content', - subpaths=['test/', 'test2/'], - none_auth=True, - verify_ssl=True, - ca_cert=ssl_name, - ) + with class_target_sat.ui_session() as session: + session.organization.select(org_name=class_sca_manifest_org.name) + session.acs.create_new_acs(**vals) class TestAcsE2e: @@ -306,17 +264,17 @@ def test_acs_positive_end_to_end(self, session, acs_setup): were made correctly on them. """ - class_target_sat, module_sca_manifest_org = acs_setup + class_target_sat, class_sca_manifest_org = acs_setup with class_target_sat.ui_session() as session: - session.organization.select(org_name=module_sca_manifest_org.name) + session.organization.select(org_name=class_sca_manifest_org.name) # Create ACS using "Simplified" option with content type of "File" session.acs.create_new_acs( simplified_type=True, content_type='file', - name='simpleFile', - description='simpleFileDesc', + name='simpleFileTest', + description='simpleFileTestDesc', add_all_capsules=True, use_http_proxies=True, products_to_add=product_name, @@ -326,8 +284,8 @@ def test_acs_positive_end_to_end(self, session, acs_setup): session.acs.create_new_acs( simplified_type=True, content_type='yum', - name='simpleYum', - description='simpleYumDesc', + name='simpleYumTest', + description='simpleYumTestDesc', capsules_to_add=class_target_sat.hostname, use_http_proxies=True, products_to_add=[constants.REPOS[repo]['product'] for repo in repos_to_enable], @@ -338,8 +296,8 @@ def test_acs_positive_end_to_end(self, session, acs_setup): session.acs.create_new_acs( custom_type=True, content_type='file', - name='customFileManualAuth', - description='customFileManualAuthDesc', + name='customFileManualTestAuth', + description='customFileManualTestAuthDesc', capsules_to_add=class_target_sat.hostname, use_http_proxies=True, base_url='https://test.com', @@ -356,8 +314,8 @@ def test_acs_positive_end_to_end(self, session, acs_setup): session.acs.create_new_acs( custom_type=True, content_type='file', - name='customFileContentAuth', - description='customFileContentAuthDesc', + name='customFileContentAuthTest', + description='customFileContentAuthTestDesc', capsules_to_add=class_target_sat.hostname, use_http_proxies=True, base_url='https://test.com', @@ -374,8 +332,8 @@ def test_acs_positive_end_to_end(self, session, acs_setup): session.acs.create_new_acs( custom_type=True, content_type='yum', - name='customYumNoneAuth', - description='customYumNoneAuthDesc', + name='customYumNoneAuthTest', + description='customYumNoneAuthTestDesc', capsules_to_add=class_target_sat.hostname, use_http_proxies=True, base_url='https://test.com', @@ -384,8 +342,8 @@ def test_acs_positive_end_to_end(self, session, acs_setup): ) # Refresh ACS and check that last refresh time is updated - session.acs.refresh_acs(acs_name='simpleFile') - simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFile') + session.acs.refresh_acs(acs_name='simpleFileTest') + simple_file_refreshed = session.acs.get_row_drawer_content(acs_name='simpleFileTest') assert simple_file_refreshed['details']['last_refresh'] in [ 'less than a minute ago', '1 minute ago', @@ -393,26 +351,30 @@ def test_acs_positive_end_to_end(self, session, acs_setup): # Rename and change description of ACS and then check that it was changed simple_file_renamed = session.acs.edit_acs_details( - acs_name_to_edit='simpleFile', - new_acs_name='simpleFileRenamed', - new_description='simpleFileRenamedDesc', + acs_name_to_edit='simpleFileTest', + new_acs_name='simpleFileTestRenamed', + new_description='simpleFileTestRenamedDesc', + ) + simple_file_renamed = session.acs.get_row_drawer_content( + acs_name='simpleFileTestRenamed' ) - simple_file_renamed = session.acs.get_row_drawer_content(acs_name='simpleFileRenamed') assert ( simple_file_renamed['details']['details_stack_content']['name'] - == 'simpleFileRenamed' + == 'simpleFileTestRenamed' ) assert ( simple_file_renamed['details']['details_stack_content']['description'] - == 'simpleFileRenamedDesc' + == 'simpleFileTestRenamedDesc' ) # Edit ACS capsules custom_file_edited_capsules = session.acs.edit_capsules( - acs_name_to_edit='customFileContentAuth', remove_all=True, use_http_proxies=False + acs_name_to_edit='customFileContentAuthTest', + remove_all=True, + use_http_proxies=False, ) custom_file_edited_capsules = session.acs.get_row_drawer_content( - acs_name='customFileContentAuth' + acs_name='customFileContentAuthTest' ) assert ( custom_file_edited_capsules['capsules']['capsules_stack_content']['capsules_list'] @@ -427,11 +389,13 @@ def test_acs_positive_end_to_end(self, session, acs_setup): # Edit ACS urls and subpaths custom_yum_edited_url = session.acs.edit_url_subpaths( - acs_name_to_edit='customYumNoneAuth', + acs_name_to_edit='customYumNoneAuthTest', new_url='https://testNEW.com', new_subpaths=['test/', 'testNEW/'], ) - custom_yum_edited_url = session.acs.get_row_drawer_content(acs_name='customYumNoneAuth') + custom_yum_edited_url = session.acs.get_row_drawer_content( + acs_name='customYumNoneAuthTest' + ) assert ( custom_yum_edited_url['url_and_subpaths']['url_and_subpaths_stack_content']['url'] == 'https://testNEW.com' @@ -445,13 +409,13 @@ def test_acs_positive_end_to_end(self, session, acs_setup): # Edit ACS credentials custom_file_edited_credentials = session.acs.edit_credentials( - acs_name_to_edit='customFileManualAuth', + acs_name_to_edit='customFileManualTestAuth', verify_ssl=False, manual_auth=True, username='changedUserName', ) custom_file_edited_credentials = session.acs.get_row_drawer_content( - acs_name='customFileManualAuth' + acs_name='customFileManualTestAuth' ) assert ( custom_file_edited_credentials['credentials']['credentials_stack_content'][ @@ -468,10 +432,12 @@ def test_acs_positive_end_to_end(self, session, acs_setup): # Edit ACS products simple_yum_edited_products = session.acs.edit_products( - acs_name_to_edit='simpleYum', + acs_name_to_edit='simpleYumTest', remove_all=True, ) - simple_yum_edited_products = session.acs.get_row_drawer_content(acs_name='simpleYum') + simple_yum_edited_products = session.acs.get_row_drawer_content( + acs_name='simpleYumTest' + ) assert ( simple_yum_edited_products['products']['products_stack_content']['products_list'] == [] From 9f5ec57badc22bd088f271219d3e1a033a24abce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Oct 2023 23:59:25 -0400 Subject: [PATCH 49/96] Bump cryptography from 41.0.4 to 41.0.5 (#12965) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e2c7c0bb551..1d0317b8bbb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ betelgeuse==1.10.0 broker[docker]==0.4.1 -cryptography==41.0.4 +cryptography==41.0.5 deepdiff==6.6.1 dynaconf[vault]==3.2.3 fauxfactory==3.1.0 From 8c8780895b5e86b991d3d441c0473226956a8ad7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 00:31:27 -0400 Subject: [PATCH 50/96] Bump pytest from 7.4.2 to 7.4.3 (#12966) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1d0317b8bbb..614e8fda50f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ navmazing==1.1.6 productmd==1.37 pyotp==2.9.0 python-box==7.1.1 -pytest==7.4.2 +pytest==7.4.3 pytest-services==2.2.1 pytest-mock==3.12.0 pytest-reportportal==5.3.0 From 291698f925eb7016ff7d11764b218e2c7cf995d4 Mon Sep 17 00:00:00 2001 From: Jitendra Yejare Date: Wed, 25 Oct 2023 20:51:04 +0530 Subject: [PATCH 51/96] Fix vault makescripts with capture output (#12909) * Fix vault makescripts with capture output * Handle topped Vault enablement in .env file --- pytest_plugins/auto_vault.py | 3 +-- robottelo/utils/vault.py | 17 ++++++++--------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/pytest_plugins/auto_vault.py b/pytest_plugins/auto_vault.py index e63fc7f0835..cb9e1f0c10a 100644 --- a/pytest_plugins/auto_vault.py +++ b/pytest_plugins/auto_vault.py @@ -1,5 +1,4 @@ """Plugin enables pytest to notify and update the requirements""" -import subprocess from robottelo.utils.vault import Vault @@ -7,4 +6,4 @@ def pytest_addoption(parser): """Options to allow user to update the requirements""" with Vault() as vclient: - vclient.login(stdout=subprocess.PIPE, stderr=subprocess.PIPE) + vclient.login() diff --git a/robottelo/utils/vault.py b/robottelo/utils/vault.py index b0fc77d861e..d3a40d1a706 100644 --- a/robottelo/utils/vault.py +++ b/robottelo/utils/vault.py @@ -25,7 +25,7 @@ def __init__(self, env_file='.env'): def setup(self): if self.env_path.exists(): self.envdata = self.env_path.read_text() - is_enabled = re.findall('\nVAULT_ENABLED_FOR_DYNACONF=(.*)', self.envdata) + is_enabled = re.findall('^(?:.*\n)*VAULT_ENABLED_FOR_DYNACONF=(.*)', self.envdata) if is_enabled: self.vault_enabled = is_enabled[0] self.export_vault_addr() @@ -53,7 +53,7 @@ def exec_vault_command(self, command: str, **kwargs): :param comamnd str: The vault CLI command :param kwargs dict: Arguments to the subprocess run command to customize the run behavior """ - vcommand = subprocess.run(command, shell=True, **kwargs) # capture_output=True + vcommand = subprocess.run(command, shell=True, capture_output=True, **kwargs) if vcommand.returncode != 0: verror = str(vcommand.stderr) if vcommand.returncode == 127: @@ -63,7 +63,7 @@ def exec_vault_command(self, command: str, **kwargs): if 'Error revoking token' in verror: logger.info("Token is alredy revoked!") elif 'Error looking up token' in verror: - logger.warning("Warning! Vault not logged in!") + logger.info("Vault is not logged in!") else: logger.error(f"Error! {verror}") return vcommand @@ -75,7 +75,7 @@ def login(self, **kwargs): and 'VAULT_SECRET_ID_FOR_DYNACONF' not in os.environ ): if self.status(**kwargs).returncode != 0: - logger.warning( + logger.info( "Warning! The browser is about to open for vault OIDC login, " "close the tab once the sign-in is done!" ) @@ -86,9 +86,7 @@ def login(self, **kwargs): self.exec_vault_command(command="vault token renew -i 10h", **kwargs) logger.info("Success! Vault OIDC Logged-In and extended for 10 hours!") # Fetching tokens - token = self.exec_vault_command( - "vault token lookup --format json", capture_output=True - ).stdout + token = self.exec_vault_command("vault token lookup --format json").stdout token = json.loads(str(token.decode('UTF-8')))['data']['id'] # Setting new token in env file _envdata = re.sub( @@ -107,8 +105,9 @@ def logout(self): '.*VAULT_TOKEN_FOR_DYNACONF=.*', "# VAULT_TOKEN_FOR_DYNACONF=myroot", self.envdata ) self.env_path.write_text(_envdata) - self.exec_vault_command('vault token revoke -self') - logger.info("Success! OIDC token removed from Env file successfully!") + vstatus = self.exec_vault_command('vault token revoke -self') + if vstatus.returncode == 0: + logger.info("Success! OIDC token removed from Env file successfully!") def status(self, **kwargs): vstatus = self.exec_vault_command('vault token lookup', **kwargs) From 33f4b323fdbf26a83af37ff1aac635f287b082b2 Mon Sep 17 00:00:00 2001 From: Jitendra Yejare Date: Thu, 26 Oct 2023 01:37:09 +0530 Subject: [PATCH 52/96] Fixture collection splitting restructured (#12923) Fixture collection spliting restructured --- pytest_plugins/fixture_collection.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pytest_plugins/fixture_collection.py b/pytest_plugins/fixture_collection.py index 934efa5f56d..6f61f2b3360 100644 --- a/pytest_plugins/fixture_collection.py +++ b/pytest_plugins/fixture_collection.py @@ -13,7 +13,7 @@ def pytest_addoption(parser): example: pytest tests/foreman --uses-fixtures target_sat module_target_sat ''' - parser.addoption("--uses-fixtures", nargs='+', help=help_text) + parser.addoption("--uses-fixtures", nargs='?', help=help_text) def pytest_collection_modifyitems(items, config): @@ -22,17 +22,18 @@ def pytest_collection_modifyitems(items, config): return filter_fixtures = config.getvalue('uses_fixtures') + fixtures_list = filter_fixtures.split(',') if ',' in filter_fixtures else [filter_fixtures] selected = [] deselected = [] for item in items: - if set(item.fixturenames).intersection(set(filter_fixtures)): + if set(item.fixturenames).intersection(set(fixtures_list)): selected.append(item) else: deselected.append(item) logger.debug( f'Selected {len(selected)} and deselected {len(deselected)} ' - f'tests based on given fixtures {filter_fixtures} used by tests' + f'tests based on given fixtures {fixtures_list} used by tests' ) config.hook.pytest_deselected(items=deselected) items[:] = selected From 8b0eca51c1384eb75b514fedd15bdc68bf3dabe5 Mon Sep 17 00:00:00 2001 From: Lukas Pramuk Date: Thu, 26 Oct 2023 11:21:38 +0200 Subject: [PATCH 53/96] Remove test_positive_backup_restore_snapshot stub (#12974) --- tests/foreman/maintain/test_backup_restore.py | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/tests/foreman/maintain/test_backup_restore.py b/tests/foreman/maintain/test_backup_restore.py index 473f768fa74..6f6817730b1 100644 --- a/tests/foreman/maintain/test_backup_restore.py +++ b/tests/foreman/maintain/test_backup_restore.py @@ -675,30 +675,3 @@ def test_positive_backup_restore_incremental( query={'search': f'name="{secondary_repo.name}"'} )[0] assert repo.id == secondary_repo.id - - -@pytest.mark.stubbed -def test_positive_backup_restore_snapshot(): - """Take the snapshot backup of a server, restore it, check for content - - :id: dcf3b815-97ed-4c2e-9f2d-5eedd8591c98 - - :setup: - 1. satellite installed on an LVM-based storage with sufficient free extents - - :steps: - 1. create the snapshot backup (with/without pulp) - 2. check that appropriate files are created - 3. restore the backup (installer --reset-data is run in this step) - 4. check system health - 5. check the content was restored - - :expectedresults: - 1. backup succeeds - 2. expected files are present in the backup - 3. restore succeeds - 4. system health check succeeds - 5. content is present after restore - - :CaseAutomation: NotAutomated - """ From 4d694c50301d6b6ba29652029c524ee17242aa7b Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Thu, 26 Oct 2023 14:55:23 +0530 Subject: [PATCH 54/96] Add workaround for cpu_mode for EL9 Libvirt tests (#12973) Add workaround for cpu_mode for el9 libvirt tests Signed-off-by: Gaurav Talreja --- tests/foreman/cli/test_computeresource_libvirt.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/foreman/cli/test_computeresource_libvirt.py b/tests/foreman/cli/test_computeresource_libvirt.py index e5e1997fa3b..c1144c88733 100644 --- a/tests/foreman/cli/test_computeresource_libvirt.py +++ b/tests/foreman/cli/test_computeresource_libvirt.py @@ -45,6 +45,7 @@ from robottelo.config import settings from robottelo.constants import FOREMAN_PROVIDERS, LIBVIRT_RESOURCE_URL from robottelo.utils.datafactory import parametrized +from robottelo.utils.issue_handlers import is_open LIBVIRT_URL = LIBVIRT_RESOURCE_URL % settings.libvirt.libvirt_hostname @@ -436,6 +437,7 @@ def test_positive_provision_end_to_end( module_sca_manifest_org, module_location, provisioning_hostgroup, + module_provisioning_rhel_content, ): """Provision a host on Libvirt compute resource with the help of hostgroup. @@ -453,10 +455,14 @@ def test_positive_provision_end_to_end( :expectedresults: Host should be provisioned with hostgroup :parametrized: yes + + :BZ: 2236693 """ sat = module_libvirt_provisioning_sat.sat cr_name = gen_string('alpha') hostname = gen_string('alpha').lower() + os_major_ver = module_provisioning_rhel_content.os.major + cpu_mode = 'host-passthrough' if is_open('BZ:2236693') and os_major_ver == '9' else 'default' libvirt_cr = sat.cli.ComputeResource.create( { 'name': cr_name, @@ -476,7 +482,7 @@ def test_positive_provision_end_to_end( 'compute-resource-id': libvirt_cr['id'], 'ip': None, 'mac': None, - 'compute-attributes': 'cpus=1, memory=6442450944, cpu_mode=default, start=1', + 'compute-attributes': f'cpus=1, memory=6442450944, cpu_mode={cpu_mode}, start=1', 'interface': f'compute_type=bridge,compute_bridge=br-{settings.provisioning.vlan_id}', 'volume': 'capacity=10', 'provision-method': 'build', From 5a4dcdbc2a38382741c3be6f7488ab218deec6fd Mon Sep 17 00:00:00 2001 From: vijay sawant Date: Thu, 26 Oct 2023 20:40:59 +0530 Subject: [PATCH 55/96] add GitHub Pull request template (.md) file (#12943) * add GitHub issue template (.yaml) and Pull request template (.md) files * final changes after reivew * add example of PRT comment and remove checklist section --- .github/new_pull_request.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/new_pull_request.md diff --git a/.github/new_pull_request.md b/.github/new_pull_request.md new file mode 100644 index 00000000000..ba1e776a9ac --- /dev/null +++ b/.github/new_pull_request.md @@ -0,0 +1,16 @@ +### Problem Statement + + +### Solution + + +### Related Issues + + + + \ No newline at end of file From 2bcb943accb85e61ed0d8de389fdeb09752d5974 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 27 Oct 2023 00:48:03 -0400 Subject: [PATCH 56/96] Bump wrapanapi from 3.5.18 to 3.6.0 (#12992) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 614e8fda50f..b7e16b82192 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ requests==2.31.0 tenacity==8.2.3 testimony==2.3.0 wait-for==1.2.0 -wrapanapi==3.5.18 +wrapanapi==3.6.0 # Get airgun, nailgun and upgrade from master git+https://github.com/SatelliteQE/airgun.git@master#egg=airgun From 4c438437d53fafe3db9cda54c79a08b36b6904b1 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Fri, 27 Oct 2023 13:54:42 +0530 Subject: [PATCH 57/96] Add workaround for cpu_mode for EL9 Libvirt UI tests (#12996) Signed-off-by: Gaurav Talreja --- tests/foreman/ui/test_computeresource_libvirt.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/foreman/ui/test_computeresource_libvirt.py b/tests/foreman/ui/test_computeresource_libvirt.py index 1cb250d2346..2d05fa45bf3 100644 --- a/tests/foreman/ui/test_computeresource_libvirt.py +++ b/tests/foreman/ui/test_computeresource_libvirt.py @@ -28,6 +28,7 @@ FOREMAN_PROVIDERS, LIBVIRT_RESOURCE_URL, ) +from robottelo.utils.issue_handlers import is_open pytestmark = [pytest.mark.skip_if_not_set('libvirt')] @@ -135,6 +136,7 @@ def test_positive_provision_end_to_end( module_location, provisioning_hostgroup, module_libvirt_provisioning_sat, + module_provisioning_rhel_content, ): """Provision Host on libvirt compute resource, and delete it afterwards @@ -146,12 +148,14 @@ def test_positive_provision_end_to_end( :customerscenario: true - :BZ: 1243223 + :BZ: 1243223, 2236693 :parametrized: yes """ sat = module_libvirt_provisioning_sat.sat hostname = gen_string('alpha').lower() + os_major_ver = module_provisioning_rhel_content.os.major + cpu_mode = 'host-passthrough' if is_open('BZ:2236693') and os_major_ver == '9' else 'default' cr = sat.api.LibvirtComputeResource( provider=FOREMAN_PROVIDERS['libvirt'], url=LIBVIRT_URL, @@ -169,6 +173,7 @@ def test_positive_provision_end_to_end( 'host.inherit_deploy_option': False, 'host.deploy': f'{cr.name} (Libvirt)', 'provider_content.virtual_machine.memory': '6144', + 'provider_content.virtual_machine.cpu_mode': cpu_mode, 'interfaces.interface.network_type': 'Physical (Bridge)', 'interfaces.interface.network': f'br-{settings.provisioning.vlan_id}', 'additional_information.comment': 'Libvirt provision using valid data', From b2d4f9832ac250848a17959f47c2736a648b15e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Strelec?= Date: Fri, 27 Oct 2023 18:41:46 +0200 Subject: [PATCH 58/96] Update `test_positive_all_packages_update` (#12342) * add test_positive_fm_packages_check_update * add capsule marker * add regex to find packages to update * remove duplicate test * update regex to match if there's multiple packages --- tests/foreman/destructive/test_packages.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/foreman/destructive/test_packages.py b/tests/foreman/destructive/test_packages.py index 3a31e354e46..05c44bf1501 100644 --- a/tests/foreman/destructive/test_packages.py +++ b/tests/foreman/destructive/test_packages.py @@ -16,11 +16,14 @@ :Upstream: No """ +import re + import pytest pytestmark = pytest.mark.destructive +@pytest.mark.include_capsule def test_positive_all_packages_update(target_sat): """Verify update and check-update work as expected. @@ -48,5 +51,10 @@ def test_positive_all_packages_update(target_sat): target_sat.power_control(state='reboot') # Run check-update again to verify there are no more packages available to update result = target_sat.cli.Packages.check_update() + # Regex to match if there are packages available to update + # Matches lines like '\n\nwalrus.noarch 5.21-1 custom_repo\n' + pattern = '(\\n){1,2}(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\n)' + matches = re.search(pattern, result.stdout) + assert matches is None # No packages available to update assert 'FAIL' not in result.stdout assert result.status == 0 From 7beff86b0c3b4600f813a0bfb81b5892a88d1b82 Mon Sep 17 00:00:00 2001 From: Samuel Bible Date: Fri, 27 Oct 2023 11:45:12 -0500 Subject: [PATCH 59/96] No blank cvpage test (#12775) * Renaming old CVUI tests to remove them from pipeline, and add first new CVUI tests * Rename files, remove assignee, and remove search test * Readd casecomponent marker to old cv tests * Fix test docstring issues * Add test for blank page in french language * Update test docstring * Update entity reference * Create CV before UI session begins * Update test steps --- tests/foreman/ui/test_contentview.py | 33 ++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/foreman/ui/test_contentview.py b/tests/foreman/ui/test_contentview.py index caef3fc15cb..83df54ba541 100644 --- a/tests/foreman/ui/test_contentview.py +++ b/tests/foreman/ui/test_contentview.py @@ -40,3 +40,36 @@ def test_positive_create_cv(session, target_sat): with target_sat.ui_session() as session: session.contentview_new.create(dict(name=cv)) assert session.contentview_new.search(cv)[0]['Name'] == cv + + +@pytest.mark.tier2 +def test_no_blank_page_on_language_switch(session, target_sat, module_org): + """Able to view the new CV UI when the language is set to something other + than English + + :id: d8745aca-b199-4c7e-a970-b1f0f5c5d56c + + :steps: + 1. Change the Satellite system language to French + 2. Attempt to view the CV UI, and read the CV table + + :expectedresults: CV UI is visible, and isn't a blank page + + :CaseLevel: System + + :BZ: 2163538 + + :customerscenario: true + """ + user_password = gen_string('alpha') + user = target_sat.api.User( + default_organization=module_org, + organization=[module_org], + password=user_password, + admin=True, + ).create() + cv = target_sat.api.ContentView(organization=module_org).create() + cv.publish() + with target_sat.ui_session(user=user.login, password=user_password) as session: + session.user.update(user.login, {'user.language': 'Français'}) + assert session.contentview_new.read_french_lang_cv() From dcf83d8e3f4c7da91bdbc653efb974cc274623ab Mon Sep 17 00:00:00 2001 From: Shubham Ganar <67952129+shubhamsg199@users.noreply.github.com> Date: Mon, 30 Oct 2023 21:16:20 +0530 Subject: [PATCH 60/96] Add automation for BZ 2243679 (#13000) Closed loop BZ#2243679 Signed-off-by: Shubham Ganar --- .../foreman/api/test_provisioningtemplate.py | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/tests/foreman/api/test_provisioningtemplate.py b/tests/foreman/api/test_provisioningtemplate.py index ac0567181c5..26763511774 100644 --- a/tests/foreman/api/test_provisioningtemplate.py +++ b/tests/foreman/api/test_provisioningtemplate.py @@ -561,3 +561,82 @@ def test_positive_template_check_aap_snippet( assert 'systemctl enable ansible-callback' in render assert f'"host_config_key":"{config_key}"' in render assert '{"package_install": "zsh"}' in render + + @pytest.mark.parametrize('module_sync_kickstart_content', [7, 8, 9], indirect=True) + def test_positive_template_check_rex_snippet( + self, + module_sync_kickstart_content, + module_target_sat, + module_provisioning_capsule, + module_sca_manifest_org, + module_location, + module_default_org_view, + module_lce_library, + default_architecture, + default_partitiontable, + ): + """Read the provision template and verify the host params and home directory permissions for the rex user are properly set and rendered. + + :id: e5212c46-d269-4bce-8e03-9d00c086e69e + + :steps: + 1. Create a host by setting host params remote_execution_ssh_user, remote_execution_create_user, remote_execution_effective_user_method and remote_execution_ssh_keys + 2. Read the provision templete to verify host params + + :expectedresults: The rendered template has the host params set and correct home directory permissions for the rex user. + + :BZ: 2243679 + + :customerscenario: true + + :parametrized: yes + """ + macaddress = gen_mac(multicast=False) + rex_user = gen_string('alpha') + ssh_key = gen_string('alphanumeric') + host = module_target_sat.api.Host( + organization=module_sca_manifest_org, + location=module_location, + name=gen_string('alpha').lower(), + mac=macaddress, + operatingsystem=module_sync_kickstart_content.os, + architecture=default_architecture, + domain=module_sync_kickstart_content.domain, + root_pass=settings.provisioning.host_root_password, + ptable=default_partitiontable, + content_facet_attributes={ + 'content_source_id': module_provisioning_capsule.id, + 'content_view_id': module_default_org_view.id, + 'lifecycle_environment_id': module_lce_library.id, + }, + host_parameters_attributes=[ + { + 'name': 'remote_execution_ssh_user', + 'value': rex_user, + 'parameter_type': 'string', + }, + { + 'name': 'remote_execution_create_user', + 'value': 'true', + 'parameter_type': 'boolean', + }, + { + 'name': 'remote_execution_effective_user_method', + 'value': 'sudo', + 'parameter_type': 'string', + }, + { + 'name': 'remote_execution_ssh_keys', + 'value': ssh_key, + 'parameter_type': 'string', + }, + ], + ).create() + rex_snippet = host.read_template(data={'template_kind': 'provision'})['template'] + assert f'chown -R {rex_user}: ~{rex_user}' in rex_snippet + assert f'chown -R {rex_user}: ~{rex_user}/.ssh' in rex_snippet + assert ( + f'echo "{rex_user} ALL = (root) NOPASSWD : ALL\nDefaults:{rex_user} !requiretty" > /etc/sudoers.d/{rex_user}' + in rex_snippet + ) + assert ssh_key in rex_snippet From ad951328f74cb3a6af418a3292212a346c1b3348 Mon Sep 17 00:00:00 2001 From: vijay sawant Date: Tue, 31 Oct 2023 13:53:47 +0530 Subject: [PATCH 61/96] sync RHOSP repos to capsule (#12990) --- tests/foreman/api/test_capsulecontent.py | 69 ++++++++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index cdc388fd149..900d0803aed 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -1353,3 +1353,72 @@ def test_positive_remove_capsule_orphans( 'ls /var/lib/pulp/media/artifact/*/* | xargs file | grep RPM' ) assert result.status, 'RPM artifacts are still present. They should be gone.' + + @pytest.mark.skip_if_not_set('capsule') + def test_positive_capsule_sync_openstack_container_repos( + self, + module_target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, + ): + """Synchronize openstack container repositories to capsule + + :id: 23e64385-7f34-4ab9-bd63-72306e5a4de0 + + :setup: + 1. A blank external capsule that has not been synced yet. + + :steps: + 1. Enable and sync openstack container repos. + + :expectedresults: + 1. container repos should sync on capsule. + + :customerscenario: true + + :BZ: 2154734 + + """ + upstream_names = [ + 'rhosp13/openstack-cinder-api', + 'rhosp13/openstack-neutron-server', + 'rhosp13/openstack-neutron-dhcp-agent', + 'rhosp13/openstack-nova-api', + ] + repos = [] + + for ups_name in upstream_names: + repo = module_target_sat.api.Repository( + content_type='docker', + docker_upstream_name=ups_name, + product=function_product, + url=constants.RH_CONTAINER_REGISTRY_HUB, + upstream_username=settings.subscription.rhn_username, + upstream_password=settings.subscription.rhn_password, + ).create() + repo.sync(timeout=1800) + repos.append(repo) + + # Associate LCE with the capsule + module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': function_lce.id} + ) + result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() + assert len(result['results']) + assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] + + # Create and publish a content view with all repositories + cv = module_target_sat.api.ContentView(organization=function_org, repository=repos).create() + cv.publish() + cv = cv.read() + assert len(cv.version) == 1 + + # Promote the latest CV version into capsule's LCE + cvv = cv.version[-1].read() + cvv.promote(data={'environment_ids': function_lce.id}) + cvv = cvv.read() + assert len(cvv.environment) == 2 + + module_capsule_configured.wait_for_sync() From c7d24e8540b7d9feced3480f387a613a2def7946 Mon Sep 17 00:00:00 2001 From: vsedmik <46570670+vsedmik@users.noreply.github.com> Date: Tue, 31 Oct 2023 10:49:18 +0100 Subject: [PATCH 62/96] Fix Client repos in constants (#13008) --- robottelo/constants/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index c96397cab53..2cf9b98c887 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -289,9 +289,9 @@ class Colored(Box): 'rhsc7': 'Red Hat Satellite Capsule 6.11 (for RHEL 7 Server) (RPMs)', 'rhsc8': 'Red Hat Satellite Capsule 6.13 for RHEL 8 x86_64 (RPMs)', 'rhsc7_iso': 'Red Hat Satellite Capsule 6.4 (for RHEL 7 Server) (ISOs)', - 'rhsclient7': 'Red Hat Satellite Client 6 for RHEL 7 Server RPMs x86_64', - 'rhsclient8': 'Red Hat Satellite Client 6 for RHEL 8 x86_64 RPMs', - 'rhsclient9': 'Red Hat Satellite Client 6 for RHEL 9 x86_64 RPMs', + 'rhsclient7': 'Red Hat Satellite Client 6 (for RHEL 7 Server) (RPMs)', + 'rhsclient8': 'Red Hat Satellite Client 6 for RHEL 8 x86_64 (RPMs)', + 'rhsclient9': 'Red Hat Satellite Client 6 for RHEL 9 x86_64 (RPMs)', 'rhst7': 'Red Hat Satellite Tools 6.9 (for RHEL 7 Server) (RPMs)', 'rhst7_610': 'Red Hat Satellite Tools 6.10 (for RHEL 7 Server) (RPMs)', 'rhst6': 'Red Hat Satellite Tools 6.9 (for RHEL 6 Server) (RPMs)', @@ -408,7 +408,7 @@ class Colored(Box): 'name': ('Red Hat Satellite Client 6 for RHEL 8 x86_64 RPMs'), 'version': '6', 'reposet': REPOSET['rhsclient8'], - 'product': PRDS['rhel'], + 'product': PRDS['rhel8'], 'distro': 'rhel8', 'key': PRODUCT_KEY_SAT_CLIENT, }, @@ -417,7 +417,7 @@ class Colored(Box): 'name': ('Red Hat Satellite Client 6 for RHEL 9 x86_64 RPMs'), 'version': '6', 'reposet': REPOSET['rhsclient9'], - 'product': PRDS['rhel'], + 'product': PRDS['rhel9'], 'distro': 'rhel9', 'key': PRODUCT_KEY_SAT_CLIENT, }, From 176f5abe3caac419370d5a66539e55b1c6bc4dd0 Mon Sep 17 00:00:00 2001 From: Adarsh dubey Date: Tue, 31 Oct 2023 15:55:52 +0530 Subject: [PATCH 63/96] Discovery coverage for rule priority/limit/provisioning (#12962) * Discovery coverage for rule priority/limit/provisioning * Adding parametrization for broker workflow --- pytest_fixtures/component/provision_pxe.py | 27 +++++ tests/foreman/api/test_discoveredhost.py | 45 ++++++-- tests/foreman/api/test_discoveryrule.py | 122 ++++++++++----------- 3 files changed, 118 insertions(+), 76 deletions(-) diff --git a/pytest_fixtures/component/provision_pxe.py b/pytest_fixtures/component/provision_pxe.py index 1fc0e47397b..90e7c1798ff 100644 --- a/pytest_fixtures/component/provision_pxe.py +++ b/pytest_fixtures/component/provision_pxe.py @@ -237,6 +237,33 @@ def provisioning_host(module_ssh_key_file, pxe_loader): prov_host.blank = getattr(prov_host, 'blank', False) +@pytest.fixture +def provision_multiple_hosts(module_ssh_key_file, pxe_loader, request): + """Fixture to check out two blank VMs""" + vlan_id = settings.provisioning.vlan_id + cd_iso = ( + "" # TODO: Make this an optional fixture parameter (update vm_firmware when adding this) + ) + # Keeping the default value to 2 + count = request.param if request.param is not None else 2 + + with Broker( + workflow="deploy-configure-pxe-provisioning-host-rhv", + host_class=ContentHost, + _count=count, + target_vlan_id=vlan_id, + target_vm_firmware=pxe_loader.vm_firmware, + target_vm_cd_iso=cd_iso, + blank=True, + target_memory='6GiB', + auth=module_ssh_key_file, + ) as hosts: + yield hosts + + for prov_host in hosts: + prov_host.blank = getattr(prov_host, 'blank', False) + + @pytest.fixture def provisioning_hostgroup( module_provisioning_sat, diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index 0db7c486bcf..e61bf069b0a 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -339,7 +339,7 @@ def test_positive_auto_provision_all( @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_refresh_facts_pxe_host(self): + def test_positive_refresh_facts_pxe_host(self, module_target_sat): """Refresh the facts of pxe based discovered hosts by adding a new NIC :id: 413fb608-cd5c-441d-af86-fd2d40346d96 @@ -354,14 +354,12 @@ def test_positive_refresh_facts_pxe_host(self): :expectedresults: Added Fact should be displayed on refreshing the facts - :CaseAutomation: NotAutomated - :CaseImportance: High """ @pytest.mark.on_premises_provisioning @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) - @pytest.mark.parametrize('pxe_loader', ['bios', 'uefi'], indirect=True) + @pytest.mark.parametrize('pxe_loader', ['uefi'], indirect=True) @pytest.mark.rhel_ver_match('9') @pytest.mark.tier3 def test_positive_reboot_pxe_host( @@ -394,6 +392,7 @@ def test_positive_reboot_pxe_host( timeout=240, delay=20, ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] discovered_host.hostgroup = provisioning_hostgroup discovered_host.location = provisioning_hostgroup.location[0] @@ -402,25 +401,51 @@ def test_positive_reboot_pxe_host( result = sat.api.DiscoveredHost(id=discovered_host.id).reboot() assert 'Unable to perform reboot' not in result - @pytest.mark.stubbed + @pytest.mark.on_premises_provisioning + @pytest.mark.parametrize('module_provisioning_sat', ['discovery'], indirect=True) + @pytest.mark.parametrize('pxe_loader', ['bios'], indirect=True) + @pytest.mark.rhel_ver_match('9') + @pytest.mark.parametrize('provision_multiple_hosts', [2]) @pytest.mark.tier3 - def test_positive_reboot_all_pxe_hosts(self): + def test_positive_reboot_all_pxe_hosts( + self, + module_provisioning_rhel_content, + module_discovery_sat, + provision_multiple_hosts, + provisioning_hostgroup, + pxe_loader, + count, + ): """Rebooting all pxe-based discovered hosts :id: 69c807f8-5646-4aa6-8b3c-5ecdb69560ed :parametrized: yes - :Setup: Provisioning should be configured and a hosts should be discovered via PXE boot. + :Setup: Provisioning should be configured and hosts should be discovered via PXE boot. :Steps: PUT /api/v2/discovered_hosts/reboot_all - :expectedresults: All disdcovered host should be rebooted successfully - - :CaseAutomation: Automated + :expectedresults: All discovered hosst should be rebooted successfully :CaseImportance: Medium """ + sat = module_discovery_sat.sat + for host in provision_multiple_hosts: + host.power_control(ensure=False) + mac = host._broker_args['provisioning_nic_mac_addr'] + wait_for( + lambda: sat.api.DiscoveredHost().search(query={'mac': mac}) != [], + timeout=240, + delay=20, + ) + discovered_host = sat.api.DiscoveredHost().search(query={'mac': mac})[0] + discovered_host.hostgroup = provisioning_hostgroup + discovered_host.location = provisioning_hostgroup.location[0] + discovered_host.organization = provisioning_hostgroup.organization[0] + discovered_host.build = True + result = sat.api.DiscoveredHost().reboot_all() + assert 'Discovered hosts are rebooting now' in result['message'] class TestFakeDiscoveryTests: diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index f55a287d675..51ac5fec162 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -16,36 +16,16 @@ :Upstream: No """ -from fauxfactory import gen_choice, gen_integer, gen_string -from nailgun import entities +from fauxfactory import gen_choice, gen_integer import pytest from requests.exceptions import HTTPError from robottelo.utils.datafactory import valid_data_list -@pytest.fixture(scope="module") -def module_hostgroup(module_org): - module_hostgroup = entities.HostGroup(organization=[module_org]).create() - yield module_hostgroup - module_hostgroup.delete() - - -@pytest.fixture(scope="module") -def module_location(module_location): - yield module_location - module_location.delete() - - -@pytest.fixture(scope="module") -def module_org(module_org): - yield module_org - module_org.delete() - - @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup): +def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, target_sat): """Create a new discovery rule with several attributes, update them and delete the rule itself. @@ -67,7 +47,7 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup) name = gen_choice(list(valid_data_list().values())) search = gen_choice(searches) hostname = 'myhost-<%= rand(99999) %>' - discovery_rule = entities.DiscoveryRule( + discovery_rule = target_sat.api.DiscoveryRule( name=name, search_=search, hostname=hostname, @@ -103,23 +83,10 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup) discovery_rule.read() -@pytest.mark.tier1 -def test_negative_create_with_invalid_host_limit_and_priority(): - """Create a discovery rule with invalid host limit and priority - - :id: e3c7acb1-ac56-496b-ac04-2a83f66ec290 - - :expectedresults: Validation error should be raised - """ - with pytest.raises(HTTPError): - entities.DiscoveryRule(max_count=gen_string('alpha')).create() - with pytest.raises(HTTPError): - entities.DiscoveryRule(priority=gen_string('alpha')).create() - - -@pytest.mark.stubbed @pytest.mark.tier3 -def test_positive_provision_with_rule_priority(): +def test_positive_update_and_provision_with_rule_priority( + module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org +): """Create multiple discovery rules with different priority and check rule with highest priority executed first @@ -130,44 +97,67 @@ def test_positive_provision_with_rule_priority(): :expectedresults: Host with lower count have higher priority and that rule should be executed first - :CaseAutomation: NotAutomated - :CaseImportance: High """ + discovered_host = module_target_sat.api_factory.create_discovered_host() + + prio_rule = module_target_sat.api.DiscoveryRule( + max_count=5, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=1, + ).create() + rule = module_target_sat.api.DiscoveryRule( + max_count=5, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=10, + ).create() -@pytest.mark.stubbed -@pytest.mark.tier3 -def test_positive_multi_provision_with_rule_limit(): - """Create a discovery rule (CPU_COUNT = 2) with host limit 1 and - provision more than 2 hosts with same rule - - :id: 553c8ebf-d1c1-4ac2-7948-d3664a5b450b - - :Setup: Hosts with two CPUs should already be discovered - - :expectedresults: Rule should only be applied to 2 discovered hosts - and the rule should already be skipped for the 3rd one. - - :CaseAutomation: NotAutomated + result = module_target_sat.api.DiscoveredHost(id=discovered_host['id']).auto_provision() + assert f'provisioned with rule {prio_rule.name}' in result['message'] - :CaseImportance: High - """ + # Delete discovery rule + for _ in rule, prio_rule: + _.delete() + with pytest.raises(HTTPError): + _.read() -@pytest.mark.stubbed @pytest.mark.tier3 -def test_positive_provision_with_updated_discovery_rule(): - """Update an existing rule and provision a host with it. +def test_positive_multi_provision_with_rule_limit( + module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org +): + """Create a discovery rule with certain host limit and try to provision more than the passed limit - :id: 3fb20f0f-02e9-4158-9744-f583308c4e89 - - :Setup: Host should already be discovered + :id: 553c8ebf-d1c1-4ac2-7948-d3664a5b450b - :expectedresults: User should be able to update the rule and it should - be applied on discovered host + :Setup: Hosts should already be discovered - :CaseAutomation: NotAutomated + :expectedresults: Rule should only be applied to the number of the hosts passed as limit in the rule :CaseImportance: High """ + for _ in range(2): + discovered_host = module_target_sat.api_factory.create_discovered_host() + + rule = module_target_sat.api.DiscoveryRule( + max_count=1, + hostgroup=module_discovery_hostgroup, + search_=f'name = {discovered_host["name"]}', + location=[discovery_location], + organization=[discovery_org], + priority=1000, + ).create() + result = module_target_sat.api.DiscoveredHost().auto_provision_all() + assert '1 discovered hosts were provisioned' in result['message'] + + # Delete discovery rule + rule.delete() + with pytest.raises(HTTPError): + rule.read() From df42f4bf76cae45ca5211987ca985c0d8072332d Mon Sep 17 00:00:00 2001 From: Shweta Singh Date: Tue, 31 Oct 2023 16:06:11 +0530 Subject: [PATCH 64/96] Remove skip marker from test (#13005) Remove skip marker from the BZ as it is no longer reproducible --- tests/foreman/api/test_registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/foreman/api/test_registration.py b/tests/foreman/api/test_registration.py index 9d184f029f5..d9267bb5d2f 100644 --- a/tests/foreman/api/test_registration.py +++ b/tests/foreman/api/test_registration.py @@ -89,7 +89,6 @@ def test_host_registration_end_to_end( @pytest.mark.tier3 @pytest.mark.rhel_ver_match('[^6]') -@pytest.mark.skip_if_open("BZ:2229112") def test_positive_allow_reregistration_when_dmi_uuid_changed( module_org, rhel_contenthost, target_sat, module_ak_with_synced_repo, module_location ): From aa0b9319f53a67f7b1b4797021aa676556b0bda4 Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Fri, 8 Sep 2023 17:20:48 +0200 Subject: [PATCH 65/96] ISS refactor - batch 3 Introduced changes: 1. Extended test_positive_import_content_for_disconnected_sat_with_existing_content by assertion for correct error message when non-import-only CV is used for import 2. Removed _import_entities 3. Added test for customer scenarios: test_postive_export_import_cv_with_long_name test_positive_export_rerun_failed_import test_postive_export_import_repo_with_GPG --- tests/foreman/cli/test_satellitesync.py | 240 ++++++++++++++++++++---- 1 file changed, 199 insertions(+), 41 deletions(-) diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index d5a5cfdf9f6..a4c71f13d80 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -17,10 +17,12 @@ :Upstream: No """ import os +from time import sleep from fauxfactory import gen_string from manifester import Manifester import pytest +from wait_for import wait_for from robottelo.cli.base import CLIReturnCodeError from robottelo.cli.content_export import ContentExport @@ -46,6 +48,7 @@ PULP_IMPORT_DIR, REPO_TYPE, REPOS, + DataFile, ) from robottelo.constants.repos import ANSIBLE_GALAXY @@ -486,41 +489,6 @@ def _create_cv(cv_name, repo, module_org, publish=True): return content_view, cvv_id -def _import_entities(product, repo, cv, mos='no'): - """Sets same CV, product and repository in importing organization as - exporting organization - - :param str product: The product name same as exporting product - :param str repo: The repo name same as exporting repo - :param str cv: The cv name same as exporting cv - :param str mos: Mirror on Sync repo, by default 'no' can override to 'yes' - :returns dictionary with CLI entities created in this function - """ - importing_org = make_org() - importing_prod = make_product({'organization-id': importing_org['id'], 'name': product}) - importing_repo = make_repository( - { - 'name': repo, - 'mirror-on-sync': mos, - 'download-policy': 'immediate', - 'product-id': importing_prod['id'], - } - ) - importing_cv = make_content_view({'name': cv, 'organization-id': importing_org['id']}) - ContentView.add_repository( - { - 'id': importing_cv['id'], - 'organization-id': importing_org['id'], - 'repository-id': importing_repo['id'], - } - ) - return { - 'importing_org': importing_org, - 'importing_repo': importing_repo, - 'importing_cv': importing_cv, - } - - class TestContentViewSync: """Implements Content View Export Import tests in CLI""" @@ -1333,6 +1301,107 @@ def test_postive_export_import_cv_with_file_content( assert len(imported_files) assert len(exported_files) == len(imported_files) + @pytest.mark.tier2 + @pytest.mark.parametrize( + 'function_synced_rhel_repo', + ['rhae2'], + indirect=True, + ) + def test_positive_export_rerun_failed_import( + self, + target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_synced_rhel_repo, + function_sca_manifest_org, + function_import_org_with_manifest, + ): + """Verify that import can be rerun successfully after failed import. + + :id: 73e7cece-9a93-4203-9c2c-813d5a8d7700 + + :parametrized: yes + + :setup: + 1. Enabled and synced RH repository. + + :steps: + 1. Create CV, add repo from the setup, publish it and run export. + 2. Start import of the CV into another organization and kill it before it's done. + 3. Rerun the import again, let it finish and check the CVV was imported. + + :expectedresults: + 1. First import should fail, no CVV should be added. + 2. Second import should succeed without errors and should contain the CVV. + + :CaseImportance: Medium + + :BZ: 2058905 + + :customerscenario: true + """ + # Create CV and publish + cv_name = gen_string('alpha') + cv = target_sat.cli_factory.make_content_view( + {'name': cv_name, 'organization-id': function_sca_manifest_org.id} + ) + target_sat.cli.ContentView.add_repository( + { + 'id': cv['id'], + 'organization-id': function_sca_manifest_org.id, + 'repository-id': function_synced_rhel_repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': cv['id']}) + cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(cv['versions']) == 1 + cvv = cv['versions'][0] + # Verify export directory is empty + assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == '' + # Export the CV + export = target_sat.cli.ContentExport.completeVersion( + {'id': cvv['id'], 'organization-id': function_sca_manifest_org.id} + ) + import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message']) + assert target_sat.execute(f'ls {import_path}').stdout != '' + # Run the import asynchronously + task_id = target_sat.cli.ContentImport.version( + { + 'organization-id': function_import_org_with_manifest.id, + 'path': import_path, + 'async': True, + } + )['id'] + # Wait for the CV creation on import and make the import fail + wait_for( + lambda: target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + ) + ) + target_sat.cli.Service.restart() + sleep(30) + # Assert that the initial import task did not succeed and CVV was removed + assert ( + target_sat.api.ForemanTask() + .search( + query={'search': f'Actions::Katello::ContentViewVersion::Import and id = {task_id}'} + )[0] + .result + != 'success' + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + )['versions'] + assert len(importing_cvv) == 0 + # Rerun the import and let it finish + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org_with_manifest.id, 'path': import_path} + ) + importing_cvv = target_sat.cli.ContentView.info( + {'name': cv_name, 'organization-id': function_import_org_with_manifest.id} + )['versions'] + assert len(importing_cvv) == 1 + @pytest.mark.tier3 def test_postive_export_import_ansible_collection_repo( self, @@ -1390,6 +1459,73 @@ def test_postive_export_import_ansible_collection_repo( assert len(import_product['content']) == 1 assert import_product['content'][0]['content-type'] == "ansible_collection" + @pytest.mark.tier3 + def test_postive_export_import_repo_with_GPG( + self, + target_sat, + config_export_import_settings, + export_import_cleanup_function, + function_org, + function_synced_custom_repo, + function_import_org, + ): + """Test export and import of a repository with GPG key + + :id: a5b455aa-e87e-4ae5-a1c7-4c8e6c7f7af5 + + :setup: + 1. Product with synced custom repository. + + :steps: + 1. Create a GPG key and add it to the setup repository. + 2. Export the repository and import it into another organization. + + :expectedresults: + 1. Export and import succeeds without any errors. + 2. GPG key is imported to the importing org too. + + :CaseImportance: Medium + + :BZ: 2178645, 2090390 + + :customerscenario: true + """ + # Create a GPG key and add it to the setup repository. + gpg_key = target_sat.api.GPGKey( + organization=function_org, + content=DataFile.VALID_GPG_KEY_FILE.read_text(), + ).create() + target_sat.cli.Repository.update( + {'id': function_synced_custom_repo.id, 'gpg-key-id': gpg_key.id} + ) + # Export the repository and import it into another organization. + export = target_sat.cli.ContentExport.completeRepository( + {'id': function_synced_custom_repo.id} + ) + import_path = target_sat.move_pulp_archive(function_org, export['message']) + target_sat.cli.ContentImport.repository( + { + 'organization-id': function_import_org.id, + 'path': import_path, + } + ) + # Check the imported repo has the GPG key assigned. + imported_repo = target_sat.cli.Repository.info( + { + 'name': function_synced_custom_repo.name, + 'product': function_synced_custom_repo.product.name, + 'organization-id': function_import_org.id, + } + ) + assert int(imported_repo['content-counts']['packages']) + assert imported_repo['gpg-key']['name'] == gpg_key.name + # Check the GPG key is imported to the importing org too. + imported_gpg = target_sat.cli.ContentCredential.info( + {'organization-id': function_import_org.id, 'name': gpg_key.name} + ) + assert imported_gpg + assert imported_gpg['content'] == gpg_key.content + @pytest.mark.tier3 @pytest.mark.parametrize( 'function_synced_rhel_repo', @@ -1480,13 +1616,15 @@ def test_positive_import_content_for_disconnected_sat_with_existing_content( 1. Product with synced custom repository, published in a CV. :steps: - 1. Run complete export of the CV. - 2. On Disconnected satellite, create a cv with same name as cv on 2 and with - 'import-only' selected. - 3. Run the import command. + 1. Run complete export of the CV from setup. + 2. On Disconnected satellite, create a CV with the same name as setup CV and with + 'import-only' set to False and run the import command. + 3. On Disconnected satellite, create a CV with the same name as setup CV and with + 'import-only' set to True and run the import command. :expectedresults: - 1. Import should run successfully + 1. Import should fail with correct message when existing CV has 'import-only' set False. + 2. Import should succeed when existing CV has 'import-only' set True. :bz: 2030101 @@ -1505,7 +1643,27 @@ def test_positive_import_content_for_disconnected_sat_with_existing_content( result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' # Import section - # Create cv with 'import-only' set to true + # Create cv with 'import-only' set to False + cv = target_sat.cli_factory.make_content_view( + { + 'name': export_cv_name, + 'import-only': False, + 'organization-id': function_import_org.id, + } + ) + with pytest.raises(CLIReturnCodeError) as error: + target_sat.cli.ContentImport.version( + {'organization-id': function_import_org.id, 'path': import_path} + ) + assert ( + f"Unable to import in to Content View specified in the metadata - '{export_cv_name}'. " + "The 'import_only' attribute for the content view is set to false. To mark this " + "Content View as importable, have your system administrator run the following command " + f"on the server. \n foreman-rake katello:set_content_view_import_only ID={cv.id}" + ) in error.value.message + target_sat.cli.ContentView.remove({'id': cv.id, 'destroy-content-view': 'yes'}) + + # Create cv with 'import-only' set to True target_sat.cli_factory.make_content_view( {'name': export_cv_name, 'import-only': True, 'organization-id': function_import_org.id} ) From 72485feb6f4b046fc651a8f89ed2fd34d93e57d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 23:20:37 -0400 Subject: [PATCH 66/96] Bump dynaconf[vault] from 3.2.3 to 3.2.4 (#13021) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b7e16b82192..e401aea7641 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ betelgeuse==1.10.0 broker[docker]==0.4.1 cryptography==41.0.5 deepdiff==6.6.1 -dynaconf[vault]==3.2.3 +dynaconf[vault]==3.2.4 fauxfactory==3.1.0 jinja2==3.1.2 manifester==0.0.14 From 07c699950a68df54e66336c62e4428baa8dc7c41 Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Mon, 30 Oct 2023 21:33:12 +0100 Subject: [PATCH 67/96] Automate incremental export import of a yum repo --- robottelo/host_helpers/satellite_mixins.py | 1 + tests/foreman/cli/test_satellitesync.py | 102 +++++++++++++++------ 2 files changed, 74 insertions(+), 29 deletions(-) diff --git a/robottelo/host_helpers/satellite_mixins.py b/robottelo/host_helpers/satellite_mixins.py index 263510f6783..c2042cc856d 100644 --- a/robottelo/host_helpers/satellite_mixins.py +++ b/robottelo/host_helpers/satellite_mixins.py @@ -200,6 +200,7 @@ def move_pulp_archive(self, org, export_message): sets ownership, returns import path """ self.execute( + f'rm -rf {PULP_IMPORT_DIR}/{org.name} &&' f'mv {PULP_EXPORT_DIR}/{org.name} {PULP_IMPORT_DIR} && ' f'chown -R pulp:pulp {PULP_IMPORT_DIR}' ) diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index a4c71f13d80..e23a807f6d1 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -1837,49 +1837,93 @@ def test_negative_export_repo_from_future_datetime(self): :CaseLevel: System """ - @pytest.mark.stubbed @pytest.mark.tier3 - def test_positive_export_redhat_incremental_yum_repo(self): - """Export Red Hat YUM repo in directory incrementally. + @pytest.mark.upgrade + def test_positive_export_import_incremental_yum_repo( + self, + target_sat, + export_import_cleanup_function, + config_export_import_settings, + function_org, + function_import_org, + function_synced_custom_repo, + ): + """Export and import custom YUM repo contents incrementally. - :id: be054636-629a-40a0-b414-da3964154bd1 + :id: 318560d7-71f5-4646-ab5c-12a2ec22d031 - :steps: + :setup: + 1. Enabled and synced custom yum repository. - 1. Export whole Red Hat YUM repo. - 2. Add some packages to the earlier exported yum repo. - 3. Incrementally export the yum repo from last exported date. + :steps: + 1. First, export and import whole custom YUM repo. + 2. Add some packages to the earlier exported YUM repo. + 3. Incrementally export the custom YUM repo. + 4. Import the exported YUM repo contents incrementally. - :expectedresults: Red Hat YUM repo contents have been exported - incrementally in separate directory. + :expectedresults: + 1. Complete export and import succeeds, product and repository is created + in the importing organization and content counts match. + 2. Incremental export and import succeeds, content counts match the updated counts. - :CaseAutomation: NotAutomated + :CaseAutomation: Automated :CaseLevel: System """ + export_cc = target_sat.cli.Repository.info({'id': function_synced_custom_repo.id})[ + 'content-counts' + ] - @pytest.mark.stubbed - @pytest.mark.tier3 - @pytest.mark.upgrade - def test_positive_export_import_redhat_incremental_yum_repo(self): - """Import the exported YUM repo contents incrementally. - - :id: 318560d7-71f5-4646-ab5c-12a2ec22d031 - - :steps: + # Verify export directory is empty + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + # Export complete and check the export directory + export = target_sat.cli.ContentExport.completeRepository( + {'id': function_synced_custom_repo['id']} + ) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) - 1. First, Export and Import whole Red Hat YUM repo. - 2. Add some packages to the earlier exported yum repo. - 3. Incrementally export the Red Hat YUM repo from last exported - date. - 4. Import the exported YUM repo contents incrementally. + # Run import and verify the product and repo is created + # in the importing org and the content counts match. + import_path = target_sat.move_pulp_archive(function_org, export['message']) + target_sat.cli.ContentImport.repository( + {'organization-id': function_import_org.id, 'path': import_path} + ) + import_repo = target_sat.cli.Repository.info( + { + 'organization-id': function_import_org.id, + 'name': function_synced_custom_repo.name, + 'product': function_synced_custom_repo.product.name, + } + ) + assert import_repo['content-counts'] == export_cc, 'Import counts do not match the export.' - :expectedresults: YUM repo contents have been imported incrementally. + # Upload custom content into the repo + with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: + result = target_sat.api.Repository(id=function_synced_custom_repo.id).upload_content( + files={'content': handle} + ) + assert 'success' in result['status'] - :CaseAutomation: NotAutomated + # Export incremental and check the export directory + export = target_sat.cli.ContentExport.incrementalRepository( + {'id': function_synced_custom_repo['id']} + ) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) - :CaseLevel: System - """ + # Run the import and verify the content counts match the updated counts. + import_path = target_sat.move_pulp_archive(function_org, export['message']) + target_sat.cli.ContentImport.repository( + {'organization-id': function_import_org.id, 'path': import_path} + ) + import_repo = target_sat.cli.Repository.info( + { + 'organization-id': function_import_org.id, + 'name': function_synced_custom_repo.name, + 'product': function_synced_custom_repo.product.name, + } + ) + export_cc['packages'] = str(int(export_cc['packages']) + 1) + assert import_repo['content-counts'] == export_cc, 'Import counts do not match the export.' @pytest.mark.stubbed @pytest.mark.tier3 From ab52aabc2120a66659bfc62dbee07b2a4b981ad9 Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Tue, 31 Oct 2023 17:41:28 +0100 Subject: [PATCH 68/96] Extend export of mixed CV by other content types --- tests/foreman/cli/test_satellitesync.py | 258 ++++++++++++------------ 1 file changed, 134 insertions(+), 124 deletions(-) diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index e23a807f6d1..1edde19862e 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -41,6 +41,7 @@ from robottelo.config import settings from robottelo.constants import ( CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, DEFAULT_ARCHITECTURE, DEFAULT_CV, EXPORT_LIBRARY_NAME, @@ -70,9 +71,8 @@ def config_export_import_settings(): def export_import_cleanup_function(target_sat, function_org): """Deletes export/import dirs of function org""" yield - # Deletes directories created for export/import test target_sat.execute( - f'rm -rf {PULP_EXPORT_DIR}/{function_org.name} {PULP_IMPORT_DIR}/{function_org.name}', + f'rm -rf {PULP_EXPORT_DIR}/{function_org.name} {PULP_IMPORT_DIR}/{function_org.name}' ) @@ -80,7 +80,6 @@ def export_import_cleanup_function(target_sat, function_org): def export_import_cleanup_module(target_sat, module_org): """Deletes export/import dirs of module_org""" yield - # Deletes directories created for export/import test target_sat.execute( f'rm -rf {PULP_EXPORT_DIR}/{module_org.name} {PULP_IMPORT_DIR}/{module_org.name}' ) @@ -101,23 +100,6 @@ def function_import_org_with_manifest(target_sat, function_import_org): yield function_import_org -@pytest.fixture(scope='class') -def docker_repo(module_target_sat, module_org): - product = make_product({'organization-id': module_org.id}) - repo = make_repository( - { - 'organization-id': module_org.id, - 'product-id': product['id'], - 'content-type': REPO_TYPE['docker'], - 'download-policy': 'immediate', - 'url': 'https://quay.io', - 'docker-upstream-name': 'quay/busybox', - } - ) - Repository.synchronize({'id': repo['id']}) - yield repo - - @pytest.fixture(scope='module') def module_synced_custom_repo(module_target_sat, module_org, module_product): repo = module_target_sat.cli_factory.make_repository( @@ -183,6 +165,54 @@ def function_synced_rhel_repo(request, target_sat, function_sca_manifest_org): return repo +@pytest.fixture(scope='function') +def function_synced_file_repo(target_sat, function_org, function_product): + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, + 'product-id': function_product.id, + 'content-type': 'file', + 'url': settings.repos.file_type_repo.url, + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + yield repo + + +@pytest.fixture(scope='function') +def function_synced_docker_repo(target_sat, function_org): + product = target_sat.cli_factory.make_product({'organization-id': function_org.id}) + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, + 'product-id': product['id'], + 'content-type': REPO_TYPE['docker'], + 'download-policy': 'immediate', + 'url': CONTAINER_REGISTRY_HUB, + 'docker-upstream-name': CONTAINER_UPSTREAM_NAME, + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + yield repo + + +@pytest.fixture(scope='function') +def function_synced_AC_repo(target_sat, function_org, function_product): + repo = target_sat.cli_factory.make_repository( + { + 'organization-id': function_org.id, + 'product-id': function_product.id, + 'content-type': 'ansible_collection', + 'url': ANSIBLE_GALAXY, + 'ansible-collection-requirements': '{collections: [ \ + { name: theforeman.foreman, version: "2.1.0" }, \ + { name: theforeman.operations, version: "0.1.0"} ]}', + } + ) + target_sat.cli.Repository.synchronize({'id': repo['id']}) + yield repo + + @pytest.mark.run_in_one_thread class TestRepositoryExport: """Tests for exporting a repository via CLI""" @@ -241,7 +271,7 @@ def test_positive_export_version_custom_repo( target_sat.cli.ContentView.publish({'id': cv['id']}) cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 2 - cvv = cv['versions'][1] + cvv = max(cv['versions'], key=lambda x: int(x['id'])) target_sat.cli.ContentExport.incrementalVersion( {'id': cvv['id'], 'organization-id': module_org.id} ) @@ -344,7 +374,7 @@ def test_positive_export_complete_library_rh_repo( @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_export_repository_docker( - self, target_sat, export_import_cleanup_module, module_org, docker_repo + self, target_sat, export_import_cleanup_function, function_org, function_synced_docker_repo ): """Export docker repo via complete and incremental repository. @@ -366,18 +396,20 @@ def test_positive_export_repository_docker( :customerscenario: true """ # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export complete and check the export directory - target_sat.cli.ContentExport.completeRepository({'id': docker_repo['id']}) - assert '1.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + target_sat.cli.ContentExport.completeRepository({'id': function_synced_docker_repo['id']}) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) # Export incremental and check the export directory - target_sat.cli.ContentExport.incrementalRepository({'id': docker_repo['id']}) - assert '2.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + target_sat.cli.ContentExport.incrementalRepository( + {'id': function_synced_docker_repo['id']} + ) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) @pytest.mark.tier3 @pytest.mark.upgrade def test_positive_export_version_docker( - self, target_sat, export_import_cleanup_module, module_org, docker_repo + self, target_sat, export_import_cleanup_function, function_org, function_synced_docker_repo ): """Export CV with docker repo via complete and incremental version. @@ -402,12 +434,12 @@ def test_positive_export_version_docker( """ # Create CV and publish cv_name = gen_string('alpha') - cv = make_content_view({'name': cv_name, 'organization-id': module_org.id}) + cv = make_content_view({'name': cv_name, 'organization-id': function_org.id}) target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': docker_repo['id'], + 'organization-id': function_org.id, + 'repository-id': function_synced_docker_repo['id'], } ) target_sat.cli.ContentView.publish({'id': cv['id']}) @@ -415,21 +447,21 @@ def test_positive_export_version_docker( assert len(cv['versions']) == 1 cvv = cv['versions'][0] # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export complete and check the export directory target_sat.cli.ContentExport.completeVersion( - {'id': cvv['id'], 'organization-id': module_org.id} + {'id': cvv['id'], 'organization-id': function_org.id} ) - assert '1.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + assert '1.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) # Publish new CVV, export incremental and check the export directory target_sat.cli.ContentView.publish({'id': cv['id']}) cv = target_sat.cli.ContentView.info({'id': cv['id']}) assert len(cv['versions']) == 2 - cvv = cv['versions'][1] + cvv = max(cv['versions'], key=lambda x: int(x['id'])) target_sat.cli.ContentExport.incrementalVersion( - {'id': cvv['id'], 'organization-id': module_org.id} + {'id': cvv['id'], 'organization-id': function_org.id} ) - assert '2.0' in target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) + assert '2.0' in target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) @pytest.fixture(scope='class') @@ -1141,149 +1173,127 @@ def test_negative_import_invalid_path(self, module_org): @pytest.mark.tier3 def test_postive_export_cv_with_mixed_content_repos( - self, class_export_entities, export_import_cleanup_module, target_sat, module_org + self, + export_import_cleanup_function, + target_sat, + function_org, + function_synced_custom_repo, + function_synced_file_repo, + function_synced_docker_repo, + function_synced_AC_repo, ): """Exporting CV version having yum and non-yum(docker) is successful :id: ffcdbbc6-f787-4978-80a7-4b44c389bf49 - :steps: + :setup: + 1. Synced repositories of each content type: yum, file, docker, AC - 1. Create product with yum and non-yum(docker) repos - 2. Sync the repositories - 3. Create CV with above product and publish - 4. Export CV version contents to a directory + :steps: + 1. Create CV, add all setup repos and publish. + 2. Export CV version contents to a directory. :expectedresults: - 1. Export will succeed, however the export wont contain non-yum repo. - No warning is printed (see BZ 1775383) + 1. Export succeeds and content is exported. :BZ: 1726457 :customerscenario: true """ - product = make_product( - { - 'organization-id': module_org.id, - 'name': gen_string('alpha'), - } - ) - nonyum_repo = make_repository( - { - 'content-type': 'docker', - 'docker-upstream-name': 'quay/busybox', - 'organization-id': module_org.id, - 'product-id': product['id'], - 'url': CONTAINER_REGISTRY_HUB, - }, - ) - Repository.synchronize({'id': nonyum_repo['id']}) - yum_repo = make_repository( - { - 'name': gen_string('alpha'), - 'download-policy': 'immediate', - 'mirror-on-sync': 'no', - 'product-id': product['id'], - } - ) - Repository.synchronize({'id': yum_repo['id']}) - content_view = make_content_view({'organization-id': module_org.id}) - # Add docker and yum repo - ContentView.add_repository( - { - 'id': content_view['id'], - 'organization-id': module_org.id, - 'repository-id': nonyum_repo['id'], - } - ) - ContentView.add_repository( - { - 'id': content_view['id'], - 'organization-id': module_org.id, - 'repository-id': yum_repo['id'], - } + content_view = target_sat.cli_factory.make_content_view( + {'organization-id': function_org.id} + ) + repos = [ + function_synced_custom_repo, + function_synced_file_repo, + function_synced_docker_repo, + function_synced_AC_repo, + ] + for repo in repos: + target_sat.cli.ContentView.add_repository( + { + 'id': content_view['id'], + 'organization-id': function_org.id, + 'repository-id': repo['id'], + } + ) + target_sat.cli.ContentView.publish({'id': content_view['id']}) + exporting_cv = target_sat.cli.ContentView.info({'id': content_view['id']}) + assert len(exporting_cv['versions']) == 1 + exporting_cvv = target_sat.cli.ContentView.version_info( + {'id': exporting_cv['versions'][0]['id']} ) - ContentView.publish({'id': content_view['id']}) - exporting_cv_id = ContentView.info({'id': content_view['id']}) - assert len(exporting_cv_id['versions']) == 1 - exporting_cvv_id = exporting_cv_id['versions'][0] + assert len(exporting_cvv['repositories']) == len(repos) # check packages - exported_packages = Package.list({'content-view-version-id': exporting_cvv_id['id']}) + exported_packages = target_sat.cli.Package.list( + {'content-view-version-id': exporting_cvv['id']} + ) assert len(exported_packages) # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' # Export cv - ContentExport.completeVersion( - {'id': exporting_cvv_id['id'], 'organization-id': module_org.id} + target_sat.cli.ContentExport.completeVersion( + {'id': exporting_cvv['id'], 'organization-id': function_org.id} ) # Verify export directory is not empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) != '' + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != '' @pytest.mark.tier3 def test_postive_export_import_cv_with_file_content( self, target_sat, config_export_import_settings, - export_import_cleanup_module, - module_org, + export_import_cleanup_function, + function_org, + function_synced_file_repo, function_import_org, ): """Exporting and Importing cv with file content :id: d00739f0-dedf-4303-8929-889dc23260a4 + :setup: + 1. Product with synced file-type repository. + :steps: - 1. Create custom product and custom repo with file type - 2. Sync repo - 3. Create cv and add file repo created in step 1 and publish - 4. Export cv and import cv into another satellite. - 5. Check imported cv has files in it. + 3. Create CV, add the file repo and publish. + 4. Export the CV and import it into another organization. + 5. Check the imported CV has files in it. :expectedresults: - 1. Imported cv should have the files present in the cv of the imported system. + 1. Imported CV should have the files present. :BZ: 1995827 :customerscenario: true """ - # setup custom repo + # Create CV, add the file repo and publish. cv_name = import_cv_name = gen_string('alpha') - product = target_sat.cli_factory.make_product({'organization-id': module_org.id}) - file_repo = target_sat.cli_factory.make_repository( - { - 'organization-id': module_org.id, - 'product-id': product['id'], - 'content-type': 'file', - 'url': settings.repos.file_type_repo.url, - } - ) - target_sat.cli.Repository.synchronize({'id': file_repo['id']}) - # create cv and publish cv = target_sat.cli_factory.make_content_view( - {'name': cv_name, 'organization-id': module_org.id} + {'name': cv_name, 'organization-id': function_org.id} ) target_sat.cli.ContentView.add_repository( { 'id': cv['id'], - 'organization-id': module_org.id, - 'repository-id': file_repo['id'], + 'organization-id': function_org.id, + 'repository-id': function_synced_file_repo['id'], } ) target_sat.cli.ContentView.publish({'id': cv['id']}) - exporting_cv_id = target_sat.cli.ContentView.info({'id': cv['id']}) - assert len(exporting_cv_id['versions']) == 1 - exporting_cvv_id = exporting_cv_id['versions'][0]['id'] + exporting_cv = target_sat.cli.ContentView.info({'id': cv['id']}) + assert len(exporting_cv['versions']) == 1 + exporting_cvv_id = exporting_cv['versions'][0]['id'] # check files exported_files = target_sat.cli.File.list({'content-view-version-id': exporting_cvv_id}) assert len(exported_files) # Verify export directory is empty - assert target_sat.validate_pulp_filepath(module_org, PULP_EXPORT_DIR) == '' - # Export cv + assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == '' + # Export the CV export = target_sat.cli.ContentExport.completeVersion( - {'id': exporting_cvv_id, 'organization-id': module_org.id} + {'id': exporting_cvv_id, 'organization-id': function_org.id} ) - import_path = target_sat.move_pulp_archive(module_org, export['message']) + import_path = target_sat.move_pulp_archive(function_org, export['message']) # Check that files are present in import_path result = target_sat.execute(f'ls {import_path}') assert result.stdout != '' From c1d7b42971f61d831e4afd635ebf0c7a27bd643a Mon Sep 17 00:00:00 2001 From: dosas Date: Thu, 2 Nov 2023 08:59:08 +0100 Subject: [PATCH 69/96] Check if VAULT_ADDR env var exists before deleting it (#13016) Co-authored-by: dosas --- robottelo/utils/vault.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/robottelo/utils/vault.py b/robottelo/utils/vault.py index d3a40d1a706..d447331ac15 100644 --- a/robottelo/utils/vault.py +++ b/robottelo/utils/vault.py @@ -31,7 +31,8 @@ def setup(self): self.export_vault_addr() def teardown(self): - del os.environ['VAULT_ADDR'] + if os.environ.get('VAULT_ADDR') is not None: + del os.environ['VAULT_ADDR'] def export_vault_addr(self): vaulturl = re.findall('VAULT_URL_FOR_DYNACONF=(.*)', self.envdata)[0] From 416360946540a7ea8fd2ea1f5e605621d4eaa44b Mon Sep 17 00:00:00 2001 From: Omkar Khatavkar Date: Thu, 2 Nov 2023 13:51:03 +0530 Subject: [PATCH 70/96] PRT comment adding using curl more standard way (#12521) PRT comment adding using curl more standtard way --- .github/workflows/auto_cherry_pick.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/auto_cherry_pick.yml b/.github/workflows/auto_cherry_pick.yml index 14a833977b3..170cb4cc2a9 100644 --- a/.github/workflows/auto_cherry_pick.yml +++ b/.github/workflows/auto_cherry_pick.yml @@ -77,11 +77,14 @@ jobs: - name: Add Parent PR's PRT comment to Auto_Cherry_Picked PR's id: add-parent-prt-comment if: ${{ always() && steps.cherrypick.outcome == 'success' }} - uses: mshick/add-pr-comment@v2 - with: - issue: ${{ steps.cherrypick.outputs.number }} - message: ${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} - repo-token: ${{ secrets.CHERRYPICK_PAT }} + run: | + ISSUE_NUMBER=${{ steps.cherrypick.outputs.number }} + COMMENT_BODY=${{ needs.find-the-parent-prt-comment.outputs.prt_comment }} + + curl -X POST -H "Authorization: token ${{ secrets.CHERRYPICK_PAT }}" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${{ github.repository }}/issues/${ISSUE_NUMBER}/comments" \ + -d "{\"body\":\"$COMMENT_BODY\"}" - name: is autoMerging enabled for Auto CherryPicked PRs ? if: ${{ always() && steps.cherrypick.outcome == 'success' && contains(github.event.pull_request.labels.*.name, 'AutoMerge_Cherry_Picked') }} From 83ca28419c129aad304d49c89f758af36b9a74b7 Mon Sep 17 00:00:00 2001 From: Jake Callahan Date: Thu, 2 Nov 2023 10:06:44 -0400 Subject: [PATCH 71/96] Add Ruff pytest standards (#12796) This is a big one! Most of the changes are automatic, but a number of these are manual. I've deselected rules relating to fixture naming, but we can have a conversation later about whether we want to adopt the underscore naming conventions for fixtures. --- pyproject.toml | 6 ++++ pytest_fixtures/component/http_proxy.py | 2 +- .../component/katello_certs_check.py | 2 +- pytest_fixtures/component/lce.py | 4 +-- pytest_fixtures/component/maintain.py | 4 +-- pytest_fixtures/component/provision_azure.py | 8 ++--- pytest_fixtures/component/provision_gce.py | 10 +++--- .../component/provision_libvirt.py | 2 +- pytest_fixtures/component/puppet.py | 8 ++--- pytest_fixtures/component/repository.py | 6 ++-- pytest_fixtures/component/rh_cloud.py | 6 ++-- pytest_fixtures/component/satellite_auth.py | 14 ++++---- pytest_fixtures/component/settings.py | 2 +- pytest_fixtures/component/taxonomy.py | 8 ++--- pytest_fixtures/component/templatesync.py | 6 ++-- pytest_fixtures/core/contenthosts.py | 10 +++--- pytest_fixtures/core/sat_cap_factory.py | 12 +++---- pytest_fixtures/core/sys.py | 2 +- pytest_fixtures/core/upgrade.py | 4 +-- tests/foreman/api/test_activationkey.py | 2 +- tests/foreman/api/test_ansible.py | 4 +-- tests/foreman/api/test_bookmarks.py | 4 +-- tests/foreman/api/test_capsulecontent.py | 2 +- tests/foreman/api/test_classparameters.py | 10 +++--- .../api/test_computeresource_libvirt.py | 6 ++-- tests/foreman/api/test_contentview.py | 6 ++-- tests/foreman/api/test_discoveredhost.py | 2 +- tests/foreman/api/test_docker.py | 2 +- tests/foreman/api/test_foremantask.py | 4 +-- tests/foreman/api/test_http_proxy.py | 2 +- .../foreman/api/test_lifecycleenvironment.py | 2 +- tests/foreman/api/test_media.py | 2 +- tests/foreman/api/test_partitiontable.py | 4 +-- tests/foreman/api/test_permission.py | 13 ++++---- tests/foreman/api/test_repositories.py | 8 ++--- tests/foreman/api/test_repository.py | 3 +- tests/foreman/api/test_rhcloud_inventory.py | 3 +- tests/foreman/api/test_rhsm.py | 2 +- tests/foreman/api/test_role.py | 7 ++-- tests/foreman/api/test_subnet.py | 2 +- tests/foreman/api/test_user.py | 4 +-- tests/foreman/cli/test_activationkey.py | 6 ++-- tests/foreman/cli/test_computeresource_osp.py | 2 +- tests/foreman/cli/test_contentview.py | 2 +- tests/foreman/cli/test_discoveryrule.py | 2 +- tests/foreman/cli/test_errata.py | 16 ++++----- tests/foreman/cli/test_filter.py | 2 +- tests/foreman/cli/test_host.py | 33 +++++++++---------- tests/foreman/cli/test_ldapauthsource.py | 2 +- tests/foreman/cli/test_leapp_client.py | 2 +- tests/foreman/cli/test_model.py | 4 +-- tests/foreman/cli/test_partitiontable.py | 2 +- tests/foreman/cli/test_remoteexecution.py | 10 +++--- tests/foreman/cli/test_reporttemplates.py | 2 +- tests/foreman/cli/test_role.py | 13 ++++---- tests/foreman/cli/test_satellitesync.py | 32 +++++++++--------- tests/foreman/cli/test_subnet.py | 8 ++--- tests/foreman/cli/test_user.py | 2 +- tests/foreman/cli/test_usergroup.py | 18 +++++----- tests/foreman/cli/test_webhook.py | 2 +- .../destructive/test_capsule_loadbalancer.py | 2 +- tests/foreman/destructive/test_infoblox.py | 2 +- .../destructive/test_ldap_authentication.py | 18 +++++----- .../destructive/test_ldapauthsource.py | 14 +++----- tests/foreman/destructive/test_ping.py | 2 +- tests/foreman/maintain/test_health.py | 10 +++--- tests/foreman/sys/test_katello_certs_check.py | 4 +-- tests/foreman/ui/test_acs.py | 2 +- tests/foreman/ui/test_contenthost.py | 7 ++-- tests/foreman/ui/test_contentview_old.py | 24 ++++++++------ tests/foreman/ui/test_discoveryrule.py | 2 +- tests/foreman/ui/test_errata.py | 4 +-- tests/foreman/ui/test_host.py | 8 ++--- tests/foreman/ui/test_jobinvocation.py | 2 +- tests/foreman/ui/test_ldap_authentication.py | 16 ++++----- tests/foreman/ui/test_partitiontable.py | 2 +- tests/foreman/ui/test_provisioningtemplate.py | 2 +- tests/foreman/ui/test_repository.py | 11 ++++--- tests/foreman/ui/test_rhc.py | 2 +- tests/foreman/ui/test_settings.py | 12 +++---- tests/foreman/ui/test_subscription.py | 3 +- tests/foreman/virtwho/api/test_esx.py | 5 ++- tests/foreman/virtwho/api/test_esx_sca.py | 5 ++- tests/foreman/virtwho/api/test_nutanix.py | 5 ++- tests/foreman/virtwho/cli/test_esx.py | 9 +++-- tests/foreman/virtwho/cli/test_esx_sca.py | 9 +++-- tests/foreman/virtwho/cli/test_hyperv.py | 4 +-- tests/foreman/virtwho/cli/test_hyperv_sca.py | 4 +-- tests/foreman/virtwho/cli/test_kubevirt.py | 4 +-- .../foreman/virtwho/cli/test_kubevirt_sca.py | 4 +-- tests/foreman/virtwho/cli/test_libvirt.py | 4 +-- tests/foreman/virtwho/cli/test_libvirt_sca.py | 4 +-- tests/foreman/virtwho/cli/test_nutanix.py | 9 +++-- tests/foreman/virtwho/cli/test_nutanix_sca.py | 4 +-- tests/foreman/virtwho/conftest.py | 4 +-- tests/foreman/virtwho/ui/test_esx.py | 5 ++- tests/foreman/virtwho/ui/test_esx_sca.py | 5 ++- tests/foreman/virtwho/ui/test_nutanix.py | 5 ++- tests/foreman/virtwho/ui/test_nutanix_sca.py | 5 ++- tests/robottelo/conftest.py | 4 +-- tests/robottelo/test_cli.py | 2 +- tests/robottelo/test_datafactory.py | 2 +- tests/robottelo/test_decorators.py | 2 +- tests/robottelo/test_func_locker.py | 6 ++-- tests/robottelo/test_func_shared.py | 6 ++-- tests/robottelo/test_issue_handlers.py | 2 +- tests/upgrades/test_activation_key.py | 4 +-- tests/upgrades/test_classparameter.py | 9 ++--- tests/upgrades/test_client.py | 2 +- tests/upgrades/test_host.py | 2 +- 110 files changed, 330 insertions(+), 324 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b042ee54c72..7988aa2a58d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,12 +25,15 @@ select = [ "F", # flake8 "I", # isort # "Q", # flake8-quotes + "PT", # flake8-pytest "UP", # pyupgrade "W", # pycodestyle ] ignore = [ "E501", # line too long - handled by black + "PT004", # pytest underscrore prefix for non-return fixtures + "PT005", # pytest no underscrore prefix for return fixtures ] [tool.ruff.isort] @@ -40,6 +43,9 @@ known-first-party = [ ] combine-as-imports = true +[tool.ruff.flake8-pytest-style] +fixture-parentheses = false +mark-parentheses = false [tool.ruff.flake8-quotes] inline-quotes = "single" diff --git a/pytest_fixtures/component/http_proxy.py b/pytest_fixtures/component/http_proxy.py index ee8efc9c16a..8c6095092dd 100644 --- a/pytest_fixtures/component/http_proxy.py +++ b/pytest_fixtures/component/http_proxy.py @@ -3,7 +3,7 @@ from robottelo.config import settings -@pytest.fixture(scope='function') +@pytest.fixture def setup_http_proxy(request, module_manifest_org, target_sat): """Create a new HTTP proxy and set related settings based on proxy""" http_proxy = target_sat.api_factory.make_http_proxy(module_manifest_org, request.param) diff --git a/pytest_fixtures/component/katello_certs_check.py b/pytest_fixtures/component/katello_certs_check.py index f14299ebd52..1ee97acc565 100644 --- a/pytest_fixtures/component/katello_certs_check.py +++ b/pytest_fixtures/component/katello_certs_check.py @@ -13,7 +13,7 @@ def certs_data(sat_ready_rhel): cert_data['key_file_name'] = f'{sat_ready_rhel.hostname}/{sat_ready_rhel.hostname}.key' cert_data['cert_file_name'] = f'{sat_ready_rhel.hostname}/{sat_ready_rhel.hostname}.crt' sat_ready_rhel.custom_cert_generate(cert_data['capsule_hostname']) - yield cert_data + return cert_data @pytest.fixture diff --git a/pytest_fixtures/component/lce.py b/pytest_fixtures/component/lce.py index 368c1329131..2a3f113e9c8 100644 --- a/pytest_fixtures/component/lce.py +++ b/pytest_fixtures/component/lce.py @@ -16,7 +16,7 @@ def module_lce(module_org, module_target_sat): return module_target_sat.api.LifecycleEnvironment(organization=module_org).create() -@pytest.fixture(scope='function') +@pytest.fixture def function_lce(function_org, target_sat): return target_sat.api.LifecycleEnvironment(organization=function_org).create() @@ -31,7 +31,7 @@ def module_lce_library(module_org, module_target_sat): ) -@pytest.fixture(scope='function') +@pytest.fixture def function_lce_library(function_org, target_sat): """Returns the Library lifecycle environment from chosen organization""" return ( diff --git a/pytest_fixtures/component/maintain.py b/pytest_fixtures/component/maintain.py index 152656f8dc1..844f522de66 100644 --- a/pytest_fixtures/component/maintain.py +++ b/pytest_fixtures/component/maintain.py @@ -18,7 +18,7 @@ def module_stash(request): # Please refer the documentation for more details on stash # https://docs.pytest.org/en/latest/reference/reference.html#stash request.node.stash[synced_repos] = {} - yield request.node.stash + return request.node.stash @pytest.fixture(scope='module') @@ -98,7 +98,7 @@ def module_synced_repos(sat_maintain, module_capsule_configured, module_sca_mani sync_status = module_capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success' - yield { + return { 'custom': module_stash[synced_repos]['cust_repo'], 'rh': module_stash[synced_repos]['rh_repo'], } diff --git a/pytest_fixtures/component/provision_azure.py b/pytest_fixtures/component/provision_azure.py index 0483758b51a..27bcea16e1f 100644 --- a/pytest_fixtures/component/provision_azure.py +++ b/pytest_fixtures/component/provision_azure.py @@ -17,22 +17,22 @@ @pytest.fixture(scope='session') def sat_azure(request, session_puppet_enabled_sat, session_target_sat): hosts = {'sat': session_target_sat, 'puppet_sat': session_puppet_enabled_sat} - yield hosts[request.param] + return hosts[request.param] @pytest.fixture(scope='module') def sat_azure_org(sat_azure): - yield sat_azure.api.Organization().create() + return sat_azure.api.Organization().create() @pytest.fixture(scope='module') def sat_azure_loc(sat_azure): - yield sat_azure.api.Location().create() + return sat_azure.api.Location().create() @pytest.fixture(scope='module') def sat_azure_domain(sat_azure, sat_azure_loc, sat_azure_org): - yield sat_azure.api.Domain(location=[sat_azure_loc], organization=[sat_azure_org]).create() + return sat_azure.api.Domain(location=[sat_azure_loc], organization=[sat_azure_org]).create() @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/provision_gce.py b/pytest_fixtures/component/provision_gce.py index af260bdb38c..4c11ecb0918 100644 --- a/pytest_fixtures/component/provision_gce.py +++ b/pytest_fixtures/component/provision_gce.py @@ -20,22 +20,22 @@ @pytest.fixture(scope='session') def sat_gce(request, session_puppet_enabled_sat, session_target_sat): hosts = {'sat': session_target_sat, 'puppet_sat': session_puppet_enabled_sat} - yield hosts[getattr(request, 'param', 'sat')] + return hosts[getattr(request, 'param', 'sat')] @pytest.fixture(scope='module') def sat_gce_org(sat_gce): - yield sat_gce.api.Organization().create() + return sat_gce.api.Organization().create() @pytest.fixture(scope='module') def sat_gce_loc(sat_gce): - yield sat_gce.api.Location().create() + return sat_gce.api.Location().create() @pytest.fixture(scope='module') def sat_gce_domain(sat_gce, sat_gce_loc, sat_gce_org): - yield sat_gce.api.Domain(location=[sat_gce_loc], organization=[sat_gce_org]).create() + return sat_gce.api.Domain(location=[sat_gce_loc], organization=[sat_gce_org]).create() @pytest.fixture(scope='module') @@ -282,7 +282,7 @@ def module_gce_finishimg( return finish_image -@pytest.fixture() +@pytest.fixture def gce_setting_update(sat_gce): sat_gce.update_setting('destroy_vm_on_host_delete', True) yield diff --git a/pytest_fixtures/component/provision_libvirt.py b/pytest_fixtures/component/provision_libvirt.py index d8638d9a32a..b294ca27a29 100644 --- a/pytest_fixtures/component/provision_libvirt.py +++ b/pytest_fixtures/component/provision_libvirt.py @@ -18,4 +18,4 @@ def module_libvirt_image(module_target_sat, module_cr_libvirt): def module_libvirt_provisioning_sat(module_provisioning_sat): # Configure Libvirt CR for provisioning module_provisioning_sat.sat.configure_libvirt_cr() - yield module_provisioning_sat + return module_provisioning_sat diff --git a/pytest_fixtures/component/puppet.py b/pytest_fixtures/component/puppet.py index f088e05d0a8..0ff20685674 100644 --- a/pytest_fixtures/component/puppet.py +++ b/pytest_fixtures/component/puppet.py @@ -18,22 +18,22 @@ def session_puppet_enabled_sat(session_satellite_host): def session_puppet_enabled_capsule(session_capsule_host, session_puppet_enabled_sat): """Capsule with enabled puppet plugin""" session_capsule_host.capsule_setup(sat_host=session_puppet_enabled_sat) - yield session_capsule_host.enable_puppet_capsule(satellite=session_puppet_enabled_sat) + return session_capsule_host.enable_puppet_capsule(satellite=session_puppet_enabled_sat) @pytest.fixture(scope='module') def module_puppet_org(session_puppet_enabled_sat): - yield session_puppet_enabled_sat.api.Organization().create() + return session_puppet_enabled_sat.api.Organization().create() @pytest.fixture(scope='module') def module_puppet_loc(session_puppet_enabled_sat): - yield session_puppet_enabled_sat.api.Location().create() + return session_puppet_enabled_sat.api.Location().create() @pytest.fixture(scope='module') def module_puppet_domain(session_puppet_enabled_sat, module_puppet_loc, module_puppet_org): - yield session_puppet_enabled_sat.api.Domain( + return session_puppet_enabled_sat.api.Domain( location=[module_puppet_loc], organization=[module_puppet_org] ).create() diff --git a/pytest_fixtures/component/repository.py b/pytest_fixtures/component/repository.py index 209245ff649..74701c15203 100644 --- a/pytest_fixtures/component/repository.py +++ b/pytest_fixtures/component/repository.py @@ -22,7 +22,7 @@ def module_repo(module_repo_options, module_target_sat): return module_target_sat.api.Repository(**module_repo_options).create() -@pytest.fixture(scope='function') +@pytest.fixture def function_product(function_org): return entities.Product(organization=function_org).create() @@ -74,7 +74,7 @@ def module_rhst_repo(module_target_sat, module_org_with_manifest, module_promote return REPOS['rhst7']['id'] -@pytest.fixture(scope="function") +@pytest.fixture def repo_setup(): """ This fixture is used to create an organization, product, repository, and lifecycle environment @@ -86,7 +86,7 @@ def repo_setup(): repo = entities.Repository(name=repo_name, product=product).create() lce = entities.LifecycleEnvironment(organization=org).create() details = {'org': org, 'product': product, 'repo': repo, 'lce': lce} - yield details + return details @pytest.fixture(scope='module') diff --git a/pytest_fixtures/component/rh_cloud.py b/pytest_fixtures/component/rh_cloud.py index 617261c98c5..ead92f68db2 100644 --- a/pytest_fixtures/component/rh_cloud.py +++ b/pytest_fixtures/component/rh_cloud.py @@ -25,7 +25,7 @@ def rhcloud_activation_key(module_target_sat, rhcloud_manifest_org): purpose_role='test-role', auto_attach=False, ).create() - yield ak + return ak @pytest.fixture(scope='module') @@ -62,7 +62,7 @@ def rhel_insights_vm( module_target_sat.generate_inventory_report(rhcloud_manifest_org) # Sync inventory status module_target_sat.sync_inventory_status(rhcloud_manifest_org) - yield rhel_contenthost + return rhel_contenthost @pytest.fixture @@ -90,4 +90,4 @@ def rhcloud_capsule(module_capsule_host, module_target_sat, rhcloud_manifest_org 'location-ids': default_location.id, } ) - yield module_capsule_host + return module_capsule_host diff --git a/pytest_fixtures/component/satellite_auth.py b/pytest_fixtures/component/satellite_auth.py index 9c39ed3ade3..0a1031c9e25 100644 --- a/pytest_fixtures/component/satellite_auth.py +++ b/pytest_fixtures/component/satellite_auth.py @@ -35,7 +35,7 @@ def default_ipa_host(module_target_sat): return IPAHost(module_target_sat) -@pytest.fixture() +@pytest.fixture def ldap_cleanup(): """this is an extra step taken to clean any existing ldap source""" ldap_auth_sources = entities.AuthSourceLDAP().search() @@ -44,7 +44,7 @@ def ldap_cleanup(): for user in users: user.delete() ldap_auth.delete() - yield + return @pytest.fixture(scope='session') @@ -104,7 +104,7 @@ def open_ldap_data(): } -@pytest.fixture(scope='function') +@pytest.fixture def auth_source(ldap_cleanup, module_org, module_location, ad_data): ad_data = ad_data() return entities.AuthSourceLDAP( @@ -127,7 +127,7 @@ def auth_source(ldap_cleanup, module_org, module_location, ad_data): ).create() -@pytest.fixture(scope='function') +@pytest.fixture def auth_source_ipa(ldap_cleanup, default_ipa_host, module_org, module_location): return entities.AuthSourceLDAP( onthefly_register=True, @@ -149,7 +149,7 @@ def auth_source_ipa(ldap_cleanup, default_ipa_host, module_org, module_location) ).create() -@pytest.fixture(scope='function') +@pytest.fixture def auth_source_open_ldap(ldap_cleanup, module_org, module_location, open_ldap_data): return entities.AuthSourceLDAP( onthefly_register=True, @@ -259,7 +259,7 @@ def ldap_auth_source( else: ldap_data['server_type'] = LDAP_SERVER_TYPE['UI']['posix'] ldap_data['attr_login'] = LDAP_ATTR['login'] - yield ldap_data, auth_source + return ldap_data, auth_source @pytest.fixture @@ -459,7 +459,7 @@ def configure_hammer_no_negotiate(parametrized_enrolled_sat): @pytest.mark.external_auth -@pytest.fixture(scope='function') +@pytest.fixture def hammer_logout(parametrized_enrolled_sat): """Logout in Hammer.""" result = parametrized_enrolled_sat.cli.Auth.logout() diff --git a/pytest_fixtures/component/settings.py b/pytest_fixtures/component/settings.py index 937379bd6d6..b541e703733 100644 --- a/pytest_fixtures/component/settings.py +++ b/pytest_fixtures/component/settings.py @@ -2,7 +2,7 @@ import pytest -@pytest.fixture() +@pytest.fixture def setting_update(request, target_sat): """ This fixture is used to create an object of the provided settings parameter that we use in diff --git a/pytest_fixtures/component/taxonomy.py b/pytest_fixtures/component/taxonomy.py index c6140cebb63..ebfc9126eac 100644 --- a/pytest_fixtures/component/taxonomy.py +++ b/pytest_fixtures/component/taxonomy.py @@ -212,7 +212,7 @@ def class_sca_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture def function_entitlement_manifest(): """Yields a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml.""" @@ -220,7 +220,7 @@ def function_entitlement_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture def function_secondary_entitlement_manifest(): """Yields a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml. @@ -229,7 +229,7 @@ def function_secondary_entitlement_manifest(): yield manifest -@pytest.fixture(scope='function') +@pytest.fixture def function_sca_manifest(): """Yields a manifest in Simple Content Access mode with subscriptions determined by the `manifest_category.golden_ticket` setting in conf/manifest.yaml.""" @@ -245,7 +245,7 @@ def smart_proxy_location(module_org, module_target_sat, default_smart_proxy): return location -@pytest.fixture(scope='function') +@pytest.fixture def upgrade_entitlement_manifest(): """Returns a manifest in entitlement mode with subscriptions determined by the `manifest_category.entitlement` setting in conf/manifest.yaml. used only for diff --git a/pytest_fixtures/component/templatesync.py b/pytest_fixtures/component/templatesync.py index 3dee193cdd3..1e195ec838f 100644 --- a/pytest_fixtures/component/templatesync.py +++ b/pytest_fixtures/component/templatesync.py @@ -7,7 +7,7 @@ from robottelo.logging import logger -@pytest.fixture() +@pytest.fixture def create_import_export_local_dir(target_sat): """Creates a local directory inside root_dir on satellite from where the templates will be imported from or exported to. @@ -76,7 +76,7 @@ def git_pub_key(session_target_sat, git_port): res.raise_for_status() -@pytest.fixture(scope='function') +@pytest.fixture def git_repository(git_port, git_pub_key, request): """Creates a new repository on git provider for exporting templates. @@ -96,7 +96,7 @@ def git_repository(git_port, git_pub_key, request): res.raise_for_status() -@pytest.fixture() +@pytest.fixture def git_branch(git_repository): """Creates a new branch in the git repository for exporting templates. diff --git a/pytest_fixtures/core/contenthosts.py b/pytest_fixtures/core/contenthosts.py index 586fa95eb57..485591ed7d9 100644 --- a/pytest_fixtures/core/contenthosts.py +++ b/pytest_fixtures/core/contenthosts.py @@ -94,7 +94,7 @@ def rhel9_contenthost(request): yield host -@pytest.fixture() +@pytest.fixture def content_hosts(request): """A function-level fixture that provides two rhel content hosts object""" with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: @@ -110,7 +110,7 @@ def mod_content_hosts(request): yield hosts -@pytest.fixture() +@pytest.fixture def registered_hosts(request, target_sat, module_org, module_ak_with_cv): """Fixture that registers content hosts to Satellite, based on rh_cloud setup""" with Broker(**host_conf(request), host_class=ContentHost, _count=2) as hosts: @@ -134,7 +134,7 @@ def katello_host_tools_host(target_sat, module_org, rhel_contenthost): rhel_contenthost.register(module_org, None, ak.name, target_sat, repo=repo) rhel_contenthost.install_katello_host_tools() - yield rhel_contenthost + return rhel_contenthost @pytest.fixture @@ -149,7 +149,7 @@ def cockpit_host(class_target_sat, class_org, rhel_contenthost): rhel_contenthost.execute(f"hostnamectl set-hostname {rhel_contenthost.hostname} --static") rhel_contenthost.install_cockpit() rhel_contenthost.add_rex_key(satellite=class_target_sat) - yield rhel_contenthost + return rhel_contenthost @pytest.fixture @@ -174,7 +174,7 @@ def katello_host_tools_tracer_host(rex_contenthost, target_sat): **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} ) rex_contenthost.install_tracer() - yield rex_contenthost + return rex_contenthost @pytest.fixture diff --git a/pytest_fixtures/core/sat_cap_factory.py b/pytest_fixtures/core/sat_cap_factory.py index f08e722cbbf..2fbb8cec7b6 100644 --- a/pytest_fixtures/core/sat_cap_factory.py +++ b/pytest_fixtures/core/sat_cap_factory.py @@ -150,28 +150,28 @@ def session_capsule_host(request, capsule_factory): def capsule_configured(capsule_host, target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" capsule_host.capsule_setup(sat_host=target_sat) - yield capsule_host + return capsule_host @pytest.fixture def large_capsule_configured(large_capsule_host, target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" large_capsule_host.capsule_setup(sat_host=target_sat) - yield large_capsule_host + return large_capsule_host @pytest.fixture(scope='module') def module_capsule_configured(module_capsule_host, module_target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" module_capsule_host.capsule_setup(sat_host=module_target_sat) - yield module_capsule_host + return module_capsule_host @pytest.fixture(scope='session') def session_capsule_configured(session_capsule_host, session_target_sat): """Configure the capsule instance with the satellite from settings.server.hostname""" session_capsule_host.capsule_setup(sat_host=session_target_sat) - yield session_capsule_host + return session_capsule_host @pytest.fixture(scope='module') @@ -186,7 +186,7 @@ def module_capsule_configured_mqtt(module_capsule_configured): result = module_capsule_configured.execute('firewall-cmd --permanent --add-port="1883/tcp"') assert result.status == 0, 'Failed to open mqtt port on capsule' module_capsule_configured.execute('firewall-cmd --reload') - yield module_capsule_configured + return module_capsule_configured @pytest.fixture(scope='module') @@ -218,7 +218,7 @@ def module_capsule_configured_async_ssh(module_capsule_configured): """Configure the capsule instance with the satellite from settings.server.hostname, enable MQTT broker""" module_capsule_configured.set_rex_script_mode_provider('ssh-async') - yield module_capsule_configured + return module_capsule_configured @pytest.fixture(scope='module', params=['IDM', 'AD']) diff --git a/pytest_fixtures/core/sys.py b/pytest_fixtures/core/sys.py index b9106eac7ad..88768508ee4 100644 --- a/pytest_fixtures/core/sys.py +++ b/pytest_fixtures/core/sys.py @@ -54,7 +54,7 @@ def puppet_proxy_port_range(session_puppet_enabled_sat): @pytest.fixture(scope='class') def class_cockpit_sat(class_subscribe_satellite): class_subscribe_satellite.install_cockpit() - yield class_subscribe_satellite + return class_subscribe_satellite @pytest.fixture(scope='module') diff --git a/pytest_fixtures/core/upgrade.py b/pytest_fixtures/core/upgrade.py index a79f4867979..caf4532713b 100644 --- a/pytest_fixtures/core/upgrade.py +++ b/pytest_fixtures/core/upgrade.py @@ -5,7 +5,7 @@ from robottelo.logging import logger -@pytest.fixture(scope="function") +@pytest.fixture def dependent_scenario_name(request): """ This fixture is used to collect the dependent test case name. @@ -15,7 +15,7 @@ def dependent_scenario_name(request): for mark in request.node.own_markers if 'depend_on' in mark.kwargs ][0] - yield depend_test_name + return depend_test_name @pytest.fixture(scope="session") diff --git a/tests/foreman/api/test_activationkey.py b/tests/foreman/api/test_activationkey.py index 96da8a8f4c8..ae5cb15125a 100644 --- a/tests/foreman/api/test_activationkey.py +++ b/tests/foreman/api/test_activationkey.py @@ -296,7 +296,7 @@ def test_positive_get_releases_content(): act_key = entities.ActivationKey().create() response = client.get(act_key.path('releases'), auth=get_credentials(), verify=False).json() assert 'results' in response.keys() - assert type(response['results']) == list + assert isinstance(response['results'], list) @pytest.mark.tier2 diff --git a/tests/foreman/api/test_ansible.py b/tests/foreman/api/test_ansible.py index b01cab6401a..a842ec5c3ca 100644 --- a/tests/foreman/api/test_ansible.py +++ b/tests/foreman/api/test_ansible.py @@ -257,7 +257,7 @@ def test_add_and_remove_ansible_role_hostgroup(target_sat): assert len(host_roles) == 0 -@pytest.fixture(scope='function') +@pytest.fixture def filtered_user(target_sat, module_org, module_location): """ :Steps: @@ -286,7 +286,7 @@ def filtered_user(target_sat, module_org, module_location): return user, password -@pytest.fixture(scope='function') +@pytest.fixture def rex_host_in_org_and_loc(target_sat, module_org, module_location, rex_contenthost): api = target_sat.api host = api.Host().search(query={'search': f'name={rex_contenthost.hostname}'})[0] diff --git a/tests/foreman/api/test_bookmarks.py b/tests/foreman/api/test_bookmarks.py index 71ae30391ab..ad48053079b 100644 --- a/tests/foreman/api/test_bookmarks.py +++ b/tests/foreman/api/test_bookmarks.py @@ -83,7 +83,7 @@ def test_positive_create_with_query(controller): @pytest.mark.tier1 -@pytest.mark.parametrize('public', (True, False)) +@pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) def test_positive_create_public(controller, public): """Create a public bookmark @@ -368,7 +368,7 @@ def test_negative_update_empty_query(controller): @pytest.mark.tier1 -@pytest.mark.parametrize('public', (True, False)) +@pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) def test_positive_update_public(controller, public): """Update a bookmark public state to private and vice versa diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index 900d0803aed..acf756d02df 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -1177,7 +1177,7 @@ def test_positive_sync_CV_to_multiple_LCEs( result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() # there can and will be LCEs from other tests and orgs, but len() >= 2 assert len(result['results']) >= 2 - assert lce1.id and lce2.id in [capsule_lce['id'] for capsule_lce in result['results']] + assert {lce1.id, lce2.id}.issubset([capsule_lce['id'] for capsule_lce in result['results']]) # Create a Content View, add the repository and publish it. cv = target_sat.api.ContentView( diff --git a/tests/foreman/api/test_classparameters.py b/tests/foreman/api/test_classparameters.py index de34f81ae11..446bdf14182 100644 --- a/tests/foreman/api/test_classparameters.py +++ b/tests/foreman/api/test_classparameters.py @@ -137,10 +137,10 @@ def test_negative_update_parameter_type(self, test_data, module_puppet): 2. Error raised for invalid default value. """ sc_param = module_puppet['sc_params'].pop() + sc_param.override = True + sc_param.parameter_type = test_data['sc_type'] + sc_param.default_value = test_data['value'] with pytest.raises(HTTPError) as context: - sc_param.override = True - sc_param.parameter_type = test_data['sc_type'] - sc_param.default_value = test_data['value'] sc_param.update(['override', 'parameter_type', 'default_value']) assert sc_param.read().default_value != test_data['value'] assert 'Validation failed: Default value is invalid' in context.value.response.text @@ -370,9 +370,9 @@ def test_negative_validate_matcher_and_default_value( session_puppet_enabled_sat.api.OverrideValue( smart_class_parameter=sc_param, match='domain=example.com', value=gen_string('alpha') ).create() + sc_param.parameter_type = 'boolean' + sc_param.default_value = gen_string('alpha') with pytest.raises(HTTPError) as context: - sc_param.parameter_type = 'boolean' - sc_param.default_value = gen_string('alpha') sc_param.update(['parameter_type', 'default_value']) assert ( 'Validation failed: Default value is invalid, Lookup values is invalid' diff --git a/tests/foreman/api/test_computeresource_libvirt.py b/tests/foreman/api/test_computeresource_libvirt.py index e7e0112a0d8..a02d741e899 100644 --- a/tests/foreman/api/test_computeresource_libvirt.py +++ b/tests/foreman/api/test_computeresource_libvirt.py @@ -244,8 +244,8 @@ def test_negative_update_invalid_name( location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() request.addfinalizer(compresource.delete) + compresource.name = new_name with pytest.raises(HTTPError): - compresource.name = new_name compresource.update(['name']) assert compresource.read().name == name @@ -269,8 +269,8 @@ def test_negative_update_same_name(request, module_target_sat, module_org, modul new_compresource = module_target_sat.api.LibvirtComputeResource( location=[module_location], organization=[module_org], url=LIBVIRT_URL ).create() + new_compresource.name = name with pytest.raises(HTTPError): - new_compresource.name = name new_compresource.update(['name']) assert new_compresource.read().name != name @@ -299,7 +299,7 @@ def test_negative_update_url(url, request, module_target_sat, module_org, module location=[module_location], organization=[module_org], url=LIBVIRT_URL ).create() request.addfinalizer(compresource.delete) + compresource.url = url with pytest.raises(HTTPError): - compresource.url = url compresource.update(['url']) assert compresource.read().url != url diff --git a/tests/foreman/api/test_contentview.py b/tests/foreman/api/test_contentview.py index 0e1e94c88d4..4bf80287c3f 100644 --- a/tests/foreman/api/test_contentview.py +++ b/tests/foreman/api/test_contentview.py @@ -241,8 +241,8 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) yum_repo = entities.Repository(product=module_product).create() yum_repo.sync() assert len(content_view.repository) == 0 + content_view.repository = [yum_repo, yum_repo] with pytest.raises(HTTPError): - content_view.repository = [yum_repo, yum_repo] content_view.update(['repository']) assert len(content_view.read().repository) == 0 @@ -854,7 +854,7 @@ class TestContentViewUpdate: """Tests for updating content views.""" @pytest.mark.parametrize( - 'key, value', + ('key', 'value'), **(lambda x: {'argvalues': list(x.items()), 'ids': list(x.keys())})( {'description': gen_utf8(), 'name': gen_utf8()} ), @@ -908,8 +908,8 @@ def test_negative_update_name(self, module_cv, new_name): :CaseImportance: Critical """ + module_cv.name = new_name with pytest.raises(HTTPError): - module_cv.name = new_name module_cv.update(['name']) cv = module_cv.read() assert cv.name != new_name diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index e61bf069b0a..70beeba274e 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -157,7 +157,7 @@ def assert_discovered_host_provisioned(channel, ksrepo): raise AssertionError(f'Timed out waiting for {pattern} from VM') -@pytest.fixture() +@pytest.fixture def discovered_host_cleanup(target_sat): hosts = target_sat.api.DiscoveredHost().search() for host in hosts: diff --git a/tests/foreman/api/test_docker.py b/tests/foreman/api/test_docker.py index e87dac66fb6..51a709d7bdc 100644 --- a/tests/foreman/api/test_docker.py +++ b/tests/foreman/api/test_docker.py @@ -899,8 +899,8 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): cvv = content_view.read().version[0] lce = entities.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) + lce.registry_name_pattern = new_pattern with pytest.raises(HTTPError): - lce.registry_name_pattern = new_pattern lce.update(['registry_name_pattern']) diff --git a/tests/foreman/api/test_foremantask.py b/tests/foreman/api/test_foremantask.py index f7e8377e82f..23076dd5751 100644 --- a/tests/foreman/api/test_foremantask.py +++ b/tests/foreman/api/test_foremantask.py @@ -48,6 +48,6 @@ def test_positive_get_summary(): :CaseImportance: Critical """ summary = entities.ForemanTask().summary() - assert type(summary) is list + assert isinstance(summary, list) for item in summary: - assert type(item) is dict + assert isinstance(item, dict) diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index f9c3023c77e..b5ed102ed9d 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -293,7 +293,7 @@ def test_positive_sync_proxy_with_certificate(request, target_sat, module_org, m # Create and fetch new cerfiticate target_sat.custom_cert_generate(proxy_host) cacert = target_sat.execute(f'cat {cacert_path}').stdout - assert 'BEGIN CERTIFICATE' and 'END CERTIFICATE' in cacert + assert 'END CERTIFICATE' in cacert # Create http-proxy and repository http_proxy = target_sat.api.HTTPProxy( diff --git a/tests/foreman/api/test_lifecycleenvironment.py b/tests/foreman/api/test_lifecycleenvironment.py index b8e7b3ef347..a437d5b8a69 100644 --- a/tests/foreman/api/test_lifecycleenvironment.py +++ b/tests/foreman/api/test_lifecycleenvironment.py @@ -165,8 +165,8 @@ def test_negative_update_name(module_lce, new_name): :parametrized: yes """ + module_lce.name = new_name with pytest.raises(HTTPError): - module_lce.name = new_name module_lce.update(['name']) lce = module_lce.read() assert lce.name != new_name diff --git a/tests/foreman/api/test_media.py b/tests/foreman/api/test_media.py index e5524d914fd..89514610e72 100644 --- a/tests/foreman/api/test_media.py +++ b/tests/foreman/api/test_media.py @@ -41,7 +41,7 @@ def class_media(self, module_org): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) ) def test_positive_crud_with_name(self, module_org, name, new_name): diff --git a/tests/foreman/api/test_partitiontable.py b/tests/foreman/api/test_partitiontable.py index b1f64b8f5ac..6d78ba1135b 100644 --- a/tests/foreman/api/test_partitiontable.py +++ b/tests/foreman/api/test_partitiontable.py @@ -60,7 +60,7 @@ def test_positive_create_with_one_character_name(self, target_sat, name): @pytest.mark.tier1 @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized( list( zip( @@ -95,7 +95,7 @@ def test_positive_crud_with_name(self, target_sat, name, new_name): @pytest.mark.tier1 @pytest.mark.parametrize( - 'layout, new_layout', **parametrized(list(zip(valid_data_list(), valid_data_list()))) + ('layout', 'new_layout'), **parametrized(list(zip(valid_data_list(), valid_data_list()))) ) def test_positive_create_update_with_layout(self, target_sat, layout, new_layout): """Create new and update partition tables using different inputs as a diff --git a/tests/foreman/api/test_permission.py b/tests/foreman/api/test_permission.py index f8eb16a1500..5778682965c 100644 --- a/tests/foreman/api/test_permission.py +++ b/tests/foreman/api/test_permission.py @@ -366,13 +366,14 @@ def test_positive_check_update(self, entity_cls, class_org, class_location): new_entity = self.set_taxonomies(entity_cls(), class_org, class_location) new_entity = new_entity.create() name = new_entity.get_fields()['name'].gen_value() + if entity_cls is entities.ActivationKey: + update_entity = entity_cls( + self.cfg, id=new_entity.id, name=name, organization=class_org + ) + else: + update_entity = entity_cls(self.cfg, id=new_entity.id, name=name) with pytest.raises(HTTPError): - if entity_cls is entities.ActivationKey: - entity_cls(self.cfg, id=new_entity.id, name=name, organization=class_org).update( - ['name'] - ) - else: - entity_cls(self.cfg, id=new_entity.id, name=name).update(['name']) + update_entity.update(['name']) self.give_user_permission(_permission_name(entity_cls, 'update')) # update() calls read() under the hood, which triggers # permission error diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 1303de40c27..864468c8318 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -65,10 +65,10 @@ def test_negative_disable_repository_with_cv(module_entitlement_manifest_org, ta with pytest.raises(HTTPError) as error: reposet.disable(data=data) # assert error.value.response.status_code == 500 - assert ( - 'Repository cannot be deleted since it has already been ' - 'included in a published Content View' in error.value.response.text - ) + assert ( + 'Repository cannot be deleted since it has already been ' + 'included in a published Content View' in error.value.response.text + ) @pytest.mark.tier1 diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index e6f03383dc2..56f77e55ba4 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -1187,7 +1187,8 @@ def test_positive_recreate_pulp_repositories(self, module_entitlement_manifest_o f' --key /etc/foreman/client_key.pem' ) command_output = target_sat.execute('foreman-rake katello:correct_repositories COMMIT=true') - assert 'Recreating' in command_output.stdout and 'TaskError' not in command_output.stdout + assert 'Recreating' in command_output.stdout + assert 'TaskError' not in command_output.stdout @pytest.mark.tier2 def test_positive_mirroring_policy(self, target_sat): diff --git a/tests/foreman/api/test_rhcloud_inventory.py b/tests/foreman/api/test_rhcloud_inventory.py index de1fe67d8cb..67bd321c073 100644 --- a/tests/foreman/api/test_rhcloud_inventory.py +++ b/tests/foreman/api/test_rhcloud_inventory.py @@ -108,7 +108,8 @@ def test_rhcloud_inventory_api_e2e( infrastructure_type = [ host['system_profile']['infrastructure_type'] for host in json_data['hosts'] ] - assert 'physical' and 'virtual' in infrastructure_type + assert 'physical' in infrastructure_type + assert 'virtual' in infrastructure_type # Verify installed packages are present in report. all_host_profiles = [host['system_profile'] for host in json_data['hosts']] for host_profiles in all_host_profiles: diff --git a/tests/foreman/api/test_rhsm.py b/tests/foreman/api/test_rhsm.py index 2ebd5b517e0..2194ecdb147 100644 --- a/tests/foreman/api/test_rhsm.py +++ b/tests/foreman/api/test_rhsm.py @@ -49,4 +49,4 @@ def test_positive_path(): response = client.get(path, auth=get_credentials(), verify=False) assert response.status_code == http.client.OK assert 'application/json' in response.headers['content-type'] - assert type(response.json()) is list + assert isinstance(response.json(), list) diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index 4b42408114d..233e5ffea15 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -38,7 +38,7 @@ class TestRole: @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized(list(zip(generate_strings_list(), generate_strings_list()))), ) def test_positive_crud(self, name, new_name): @@ -1245,6 +1245,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( entities.User(id=user.id).delete() with pytest.raises(HTTPError): user_role.read() + with pytest.raises(HTTPError): user.read() try: entities.Domain().search( @@ -1318,9 +1319,9 @@ def test_negative_modify_roles_by_org_admin(self, role_taxonomies, target_sat): test_role = entities.Role().create() sc = self.user_config(user, target_sat) test_role = entities.Role(sc, id=test_role.id).read() + test_role.organization = [role_taxonomies['org']] + test_role.location = [role_taxonomies['loc']] with pytest.raises(HTTPError): - test_role.organization = [role_taxonomies['org']] - test_role.location = [role_taxonomies['loc']] test_role.update(['organization', 'location']) @pytest.mark.tier2 diff --git a/tests/foreman/api/test_subnet.py b/tests/foreman/api/test_subnet.py index c5188d12808..8c1fb2f7f8e 100644 --- a/tests/foreman/api/test_subnet.py +++ b/tests/foreman/api/test_subnet.py @@ -343,8 +343,8 @@ def test_negative_update_parameter(new_name): sub_param = entities.Parameter( name=gen_string('utf8'), subnet=subnet.id, value=gen_string('utf8') ).create() + sub_param.name = new_name with pytest.raises(HTTPError): - sub_param.name = new_name sub_param.update(['name']) diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index eff47fbba0f..626604be526 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -208,8 +208,8 @@ def test_negative_update_username(self, create_user, login): :CaseImportance: Critical """ + create_user.login = login with pytest.raises(HTTPError): - create_user.login = login create_user.update(['login']) @pytest.mark.tier1 @@ -284,8 +284,8 @@ def test_negative_update_email(self, create_user, mail): :CaseImportance: Critical """ + create_user.mail = mail with pytest.raises(HTTPError): - create_user.mail = mail create_user.update(['mail']) @pytest.mark.tier1 diff --git a/tests/foreman/cli/test_activationkey.py b/tests/foreman/cli/test_activationkey.py index 126b4144d3c..3a860981e0c 100644 --- a/tests/foreman/cli/test_activationkey.py +++ b/tests/foreman/cli/test_activationkey.py @@ -269,15 +269,15 @@ def test_negative_create_with_usage_limit_with_not_integers(module_org, limit): # invalid_values.append(0.5) with pytest.raises(CLIFactoryError) as raise_ctx: make_activation_key({'organization-id': module_org.id, 'max-hosts': limit}) - if type(limit) is int: + if isinstance(limit, int): if limit < 1: assert 'Max hosts cannot be less than one' in str(raise_ctx) - if type(limit) is str: + if isinstance(limit, str): assert 'Numeric value is required.' in str(raise_ctx) @pytest.mark.tier3 -@pytest.mark.parametrize('invalid_values', ('-1', '-500', 0)) +@pytest.mark.parametrize('invalid_values', ['-1', '-500', 0]) def test_negative_create_with_usage_limit_with_invalid_integers(module_org, invalid_values): """Create Activation key with invalid integers Usage Limit diff --git a/tests/foreman/cli/test_computeresource_osp.py b/tests/foreman/cli/test_computeresource_osp.py index c9018995dc2..acb74283973 100644 --- a/tests/foreman/cli/test_computeresource_osp.py +++ b/tests/foreman/cli/test_computeresource_osp.py @@ -46,7 +46,7 @@ def cr_cleanup(self, cr_id, id_type, target_sat): @pytest.fixture def osp_version(request): versions = {'osp16': settings.osp.api_url.osp16, 'osp17': settings.osp.api_url.osp17} - yield versions[getattr(request, 'param', 'osp16')] + return versions[getattr(request, 'param', 'osp16')] @pytest.mark.upgrade @pytest.mark.tier3 diff --git a/tests/foreman/cli/test_contentview.py b/tests/foreman/cli/test_contentview.py index 4effe29ecf4..2f22d4aefb1 100644 --- a/tests/foreman/cli/test_contentview.py +++ b/tests/foreman/cli/test_contentview.py @@ -2473,7 +2473,7 @@ def test_positive_sub_host_with_restricted_user_perm_at_default_loc( # role info (note: view_roles is not in the required permissions) with pytest.raises(CLIReturnCodeError) as context: Role.with_user(user_name, user_password).info({'id': role['id']}) - assert '403 Forbidden' in str(context) + assert '403 Forbidden' in str(context) # Create a lifecycle environment env = cli_factory.make_lifecycle_environment({'organization-id': org['id']}) # Create a product diff --git a/tests/foreman/cli/test_discoveryrule.py b/tests/foreman/cli/test_discoveryrule.py index efdc081d79f..8657e010b87 100644 --- a/tests/foreman/cli/test_discoveryrule.py +++ b/tests/foreman/cli/test_discoveryrule.py @@ -61,7 +61,7 @@ def gen_int32(min_value=1): class TestDiscoveryRule: """Implements Foreman discovery Rules tests in CLI.""" - @pytest.fixture(scope='function') + @pytest.fixture def discoveryrule_factory(self, class_org, class_location, class_hostgroup): def _create_discoveryrule(org, loc, hostgroup, options=None): """Makes a new discovery rule and asserts its success""" diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index f74c434b1ee..422860b64f2 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -185,7 +185,7 @@ def hosts(request): """Deploy hosts via broker.""" num_hosts = getattr(request, 'param', 2) with Broker(nick='rhel7', host_class=ContentHost, _count=num_hosts) as hosts: - if type(hosts) is not list or len(hosts) != num_hosts: + if not isinstance(hosts, list) or len(hosts) != num_hosts: pytest.fail('Failed to provision the expected number of hosts.') yield hosts @@ -398,9 +398,9 @@ def cv_filter_cleanup(sat, filter_id, cv, org, lce): @pytest.mark.tier3 -@pytest.mark.parametrize('filter_by_hc', ('id', 'name'), ids=('hc_id', 'hc_name')) +@pytest.mark.parametrize('filter_by_hc', ['id', 'name'], ids=('hc_id', 'hc_name')) @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'title'), ids=('org_id', 'org_name', 'org_title') + 'filter_by_org', ['id', 'name', 'title'], ids=('org_id', 'org_name', 'org_title') ) @pytest.mark.no_containers def test_positive_install_by_host_collection_and_org( @@ -1031,10 +1031,10 @@ def cleanup(): @pytest.mark.tier3 -@pytest.mark.parametrize('sort_by_date', ('issued', 'updated'), ids=('issued_date', 'updated_date')) +@pytest.mark.parametrize('sort_by_date', ['issued', 'updated'], ids=('issued_date', 'updated_date')) @pytest.mark.parametrize( 'filter_by_org', - ('id', 'name', 'label', None), + ['id', 'name', 'label', None], ids=('org_id', 'org_name', 'org_label', 'no_org_filter'), ) def test_positive_list_filter_by_org_sort_by_date( @@ -1081,9 +1081,9 @@ def test_positive_list_filter_by_product_id(products_with_repos): @pytest.mark.tier3 -@pytest.mark.parametrize('filter_by_product', ('id', 'name'), ids=('product_id', 'product_name')) +@pytest.mark.parametrize('filter_by_product', ['id', 'name'], ids=('product_id', 'product_name')) @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'label'), ids=('org_id', 'org_name', 'org_label') + 'filter_by_org', ['id', 'name', 'label'], ids=('org_id', 'org_name', 'org_label') ) def test_positive_list_filter_by_product_and_org( products_with_repos, filter_by_product, filter_by_org @@ -1147,7 +1147,7 @@ def test_negative_list_filter_by_product_name(products_with_repos): @pytest.mark.tier3 @pytest.mark.parametrize( - 'filter_by_org', ('id', 'name', 'label'), ids=('org_id', 'org_name', 'org_label') + 'filter_by_org', ['id', 'name', 'label'], ids=('org_id', 'org_name', 'org_label') ) def test_positive_list_filter_by_org(products_with_repos, filter_by_org): """Filter errata by org id, name, or label. diff --git a/tests/foreman/cli/test_filter.py b/tests/foreman/cli/test_filter.py index da5eccb86c2..6881d8b8b01 100644 --- a/tests/foreman/cli/test_filter.py +++ b/tests/foreman/cli/test_filter.py @@ -34,7 +34,7 @@ def module_perms(): return perms -@pytest.fixture(scope='function') +@pytest.fixture def function_role(): """Create a role that a filter would be assigned""" return make_role() diff --git a/tests/foreman/cli/test_host.py b/tests/foreman/cli/test_host.py index 5453de02b53..b7378829671 100644 --- a/tests/foreman/cli/test_host.py +++ b/tests/foreman/cli/test_host.py @@ -67,7 +67,7 @@ def module_default_proxy(module_target_sat): return module_target_sat.cli.Proxy.list({'search': f'url = {module_target_sat.url}:9090'})[0] -@pytest.fixture(scope="function") +@pytest.fixture def function_host(target_sat): host_template = target_sat.api.Host() host_template.create_missing() @@ -90,7 +90,7 @@ def function_host(target_sat): Host.delete({'id': host['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_user(target_sat, function_host): """ Returns dict with user object and with password to this user @@ -116,7 +116,7 @@ def function_user(target_sat, function_host): user.delete() -@pytest.fixture(scope='function') +@pytest.fixture def tracer_host(katello_host_tools_tracer_host): # create a custom, rhel version-specific mock-service repo rhelver = katello_host_tools_tracer_host.os_version.major @@ -132,7 +132,7 @@ def tracer_host(katello_host_tools_tracer_host): ) katello_host_tools_tracer_host.execute(f'systemctl start {settings.repos["MOCK_SERVICE_RPM"]}') - yield katello_host_tools_tracer_host + return katello_host_tools_tracer_host def update_smart_proxy(sat, location, smart_proxy): @@ -1494,18 +1494,13 @@ def test_positive_provision_baremetal_with_uefi_secureboot(): """ -@pytest.fixture(scope="function") +@pytest.fixture def setup_custom_repo(target_sat, module_org, katello_host_tools_host, request): """Create custom repository content""" - def restore_sca_setting(): - """Restore the original SCA setting for module_org""" - module_org.sca_enable() if sca_enabled else module_org.sca_disable() - - if module_org.sca_eligible().get('simple_content_access_eligible', False): + if sca_eligible := module_org.sca_eligible().get('simple_content_access_eligible', False): sca_enabled = module_org.simple_content_access module_org.sca_disable() - request.addfinalizer(restore_sca_setting) # get package details details = {} @@ -1551,10 +1546,14 @@ def restore_sca_setting(): ) # refresh repository metadata katello_host_tools_host.subscription_manager_list_repos() - return details + if sca_eligible: + yield + module_org.sca_enable() if sca_enabled else module_org.sca_disable() + else: + return details -@pytest.fixture(scope="function") +@pytest.fixture def yum_security_plugin(katello_host_tools_host): """Enable yum-security-plugin if the distro version requires it. Rhel6 yum version does not support updating of a specific advisory out of the box. @@ -1825,10 +1824,10 @@ def test_positive_install_package_via_rex( # -------------------------- HOST SUBSCRIPTION SUBCOMMAND FIXTURES -------------------------- @pytest.mark.skip_if_not_set('clients') -@pytest.fixture(scope="function") +@pytest.fixture def host_subscription_client(rhel7_contenthost, target_sat): rhel7_contenthost.install_katello_ca(target_sat) - yield rhel7_contenthost + return rhel7_contenthost @pytest.fixture @@ -2503,14 +2502,14 @@ def test_positive_host_with_puppet( session_puppet_enabled_sat.cli.Host.delete({'id': host['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_proxy(session_puppet_enabled_sat, puppet_proxy_port_range): proxy = session_puppet_enabled_sat.cli_factory.make_proxy() yield proxy session_puppet_enabled_sat.cli.Proxy.delete({'id': proxy['id']}) -@pytest.fixture(scope="function") +@pytest.fixture def function_host_content_source( session_puppet_enabled_sat, session_puppet_enabled_proxy, diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index 3feed9bad43..be218a95796 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -34,7 +34,7 @@ from robottelo.utils.datafactory import generate_strings_list, parametrized -@pytest.fixture() +@pytest.fixture def ldap_tear_down(): """Teardown the all ldap settings user, usergroup and ldap delete""" yield diff --git a/tests/foreman/cli/test_leapp_client.py b/tests/foreman/cli/test_leapp_client.py index 37d1784926c..a6d547f5314 100644 --- a/tests/foreman/cli/test_leapp_client.py +++ b/tests/foreman/cli/test_leapp_client.py @@ -78,7 +78,7 @@ def module_stash(request): # Please refer the documentation for more details on stash # https://docs.pytest.org/en/latest/reference/reference.html#stash request.node.stash[synced_repos] = {} - yield request.node.stash + return request.node.stash @pytest.fixture(scope='module') diff --git a/tests/foreman/cli/test_model.py b/tests/foreman/cli/test_model.py index fe8050d13e9..ddfd4dfe19c 100644 --- a/tests/foreman/cli/test_model.py +++ b/tests/foreman/cli/test_model.py @@ -33,7 +33,7 @@ class TestModel: """Test class for Model CLI""" - @pytest.fixture() + @pytest.fixture def class_model(self): """Shared model for tests""" return make_model() @@ -41,7 +41,7 @@ def class_model(self): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) ) def test_positive_crud_with_name(self, name, new_name): diff --git a/tests/foreman/cli/test_partitiontable.py b/tests/foreman/cli/test_partitiontable.py index 6540a5d755b..fa8af557d36 100644 --- a/tests/foreman/cli/test_partitiontable.py +++ b/tests/foreman/cli/test_partitiontable.py @@ -51,7 +51,7 @@ def test_positive_create_with_one_character_name(self, name): @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized( list( zip( diff --git a/tests/foreman/cli/test_remoteexecution.py b/tests/foreman/cli/test_remoteexecution.py index d360f1be8f7..f9ba13f1068 100644 --- a/tests/foreman/cli/test_remoteexecution.py +++ b/tests/foreman/cli/test_remoteexecution.py @@ -65,7 +65,7 @@ def valid_feature_names(): ] -@pytest.fixture() +@pytest.fixture def fixture_sca_vmsetup(request, module_sca_manifest_org, target_sat): """Create VM and register content host to Simple Content Access organization""" if '_count' in request.param.keys(): @@ -83,10 +83,10 @@ def fixture_sca_vmsetup(request, module_sca_manifest_org, target_sat): yield client -@pytest.fixture() +@pytest.fixture def infra_host(request, target_sat, module_capsule_configured): infra_hosts = {'target_sat': target_sat, 'module_capsule_configured': module_capsule_configured} - yield infra_hosts[request.param] + return infra_hosts[request.param] def assert_job_invocation_result(invocation_command_id, client_hostname, expected_result='success'): @@ -924,7 +924,7 @@ def class_rexmanager_user(self, module_org): rexmanager = gen_string('alpha') make_user({'login': rexmanager, 'password': password, 'organization-ids': module_org.id}) User.add_role({'login': rexmanager, 'role': 'Remote Execution Manager'}) - yield (rexmanager, password) + return (rexmanager, password) @pytest.fixture(scope='class') def class_rexinfra_user(self, module_org): @@ -950,7 +950,7 @@ def class_rexinfra_user(self, module_org): make_filter({'role-id': role['id'], 'permissions': permissions}) User.add_role({'login': rexinfra, 'role': role['name']}) User.add_role({'login': rexinfra, 'role': 'Remote Execution Manager'}) - yield (rexinfra, password) + return (rexinfra, password) @pytest.mark.tier3 @pytest.mark.upgrade diff --git a/tests/foreman/cli/test_reporttemplates.py b/tests/foreman/cli/test_reporttemplates.py index 7f1b939f1e0..a4beba443b8 100644 --- a/tests/foreman/cli/test_reporttemplates.py +++ b/tests/foreman/cli/test_reporttemplates.py @@ -987,7 +987,7 @@ def test_negative_generate_hostpkgcompare_nonexistent_host(): 'inputs': 'Host 1 = nonexistent1, ' 'Host 2 = nonexistent2', } ) - assert "At least one of the hosts couldn't be found" in cm.exception.stderr + assert "At least one of the hosts couldn't be found" in cm.exception.stderr @pytest.mark.rhel_ver_list([7, 8, 9]) diff --git a/tests/foreman/cli/test_role.py b/tests/foreman/cli/test_role.py index feeb1c928be..21900c30523 100644 --- a/tests/foreman/cli/test_role.py +++ b/tests/foreman/cli/test_role.py @@ -44,7 +44,7 @@ class TestRole: @pytest.mark.tier1 @pytest.mark.parametrize( - 'name, new_name', + ('name', 'new_name'), **parametrized( list(zip(generate_strings_list(length=10), generate_strings_list(length=10))) ), @@ -147,14 +147,13 @@ def test_negative_list_filters_without_parameters(self): :BZ: 1296782 """ - with pytest.raises(CLIReturnCodeError) as err: - try: - Role.filters() - except CLIDataBaseError as err: - pytest.fail(err) + with pytest.raises(CLIReturnCodeError, CLIDataBaseError) as err: + Role.filters() + if isinstance(err.type, CLIDataBaseError): + pytest.fail(err) assert re.search('At least one of options .* is required', err.value.msg) - @pytest.fixture() + @pytest.fixture def make_role_with_permissions(self): """Create new role with a filter""" role = make_role() diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index 1edde19862e..d667429de5a 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -67,7 +67,7 @@ def config_export_import_settings(): Settings.set({'name': 'subscription_connection_enabled', 'value': subs_conn_enabled_value}) -@pytest.fixture(scope='function') +@pytest.fixture def export_import_cleanup_function(target_sat, function_org): """Deletes export/import dirs of function org""" yield @@ -76,7 +76,7 @@ def export_import_cleanup_function(target_sat, function_org): ) -@pytest.fixture(scope='function') # perform the cleanup after each testcase of a module +@pytest.fixture # perform the cleanup after each testcase of a module def export_import_cleanup_module(target_sat, module_org): """Deletes export/import dirs of module_org""" yield @@ -85,19 +85,19 @@ def export_import_cleanup_module(target_sat, module_org): ) -@pytest.fixture(scope='function') +@pytest.fixture def function_import_org(target_sat): """Creates an Organization for content import.""" org = target_sat.api.Organization().create() - yield org + return org -@pytest.fixture(scope='function') +@pytest.fixture def function_import_org_with_manifest(target_sat, function_import_org): """Creates and sets an Organization with a brand-new manifest for content import.""" with Manifester(manifest_category=settings.manifest.golden_ticket) as manifest: target_sat.upload_manifest(function_import_org.id, manifest) - yield function_import_org + return function_import_org @pytest.fixture(scope='module') @@ -111,10 +111,10 @@ def module_synced_custom_repo(module_target_sat, module_org, module_product): } ) module_target_sat.cli.Repository.synchronize({'id': repo['id']}) - yield repo + return repo -@pytest.fixture(scope='function') +@pytest.fixture def function_synced_custom_repo(target_sat, function_org, function_product): repo = target_sat.cli_factory.make_repository( { @@ -125,10 +125,10 @@ def function_synced_custom_repo(target_sat, function_org, function_product): } ) target_sat.cli.Repository.synchronize({'id': repo['id']}) - yield repo + return repo -@pytest.fixture(scope='function') +@pytest.fixture def function_synced_rhel_repo(request, target_sat, function_sca_manifest_org): """Enable and synchronize rhel content with immediate policy""" repo_dict = ( @@ -165,7 +165,7 @@ def function_synced_rhel_repo(request, target_sat, function_sca_manifest_org): return repo -@pytest.fixture(scope='function') +@pytest.fixture def function_synced_file_repo(target_sat, function_org, function_product): repo = target_sat.cli_factory.make_repository( { @@ -176,10 +176,10 @@ def function_synced_file_repo(target_sat, function_org, function_product): } ) target_sat.cli.Repository.synchronize({'id': repo['id']}) - yield repo + return repo -@pytest.fixture(scope='function') +@pytest.fixture def function_synced_docker_repo(target_sat, function_org): product = target_sat.cli_factory.make_product({'organization-id': function_org.id}) repo = target_sat.cli_factory.make_repository( @@ -193,10 +193,10 @@ def function_synced_docker_repo(target_sat, function_org): } ) target_sat.cli.Repository.synchronize({'id': repo['id']}) - yield repo + return repo -@pytest.fixture(scope='function') +@pytest.fixture def function_synced_AC_repo(target_sat, function_org, function_product): repo = target_sat.cli_factory.make_repository( { @@ -210,7 +210,7 @@ def function_synced_AC_repo(target_sat, function_org, function_product): } ) target_sat.cli.Repository.synchronize({'id': repo['id']}) - yield repo + return repo @pytest.mark.run_in_one_thread diff --git a/tests/foreman/cli/test_subnet.py b/tests/foreman/cli/test_subnet.py index 6dc36a640f7..5eefb2c24ad 100644 --- a/tests/foreman/cli/test_subnet.py +++ b/tests/foreman/cli/test_subnet.py @@ -210,10 +210,10 @@ def test_negative_update_attributes(options): options['id'] = subnet['id'] with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): Subnet.update(options) - # check - subnet is not updated - result = Subnet.info({'id': subnet['id']}) - for key in options.keys(): - assert subnet[key] == result[key] + # check - subnet is not updated + result = Subnet.info({'id': subnet['id']}) + for key in options.keys(): + assert subnet[key] == result[key] @pytest.mark.tier2 diff --git a/tests/foreman/cli/test_user.py b/tests/foreman/cli/test_user.py index 9f3537b139a..c9bbd81d145 100644 --- a/tests/foreman/cli/test_user.py +++ b/tests/foreman/cli/test_user.py @@ -72,7 +72,7 @@ def roles_helper(): yield make_role({'name': role_name}) stubbed_roles = {role['id']: role for role in roles_helper()} - yield stubbed_roles + return stubbed_roles @pytest.mark.parametrize('email', **parametrized(valid_emails_list())) @pytest.mark.tier2 diff --git a/tests/foreman/cli/test_usergroup.py b/tests/foreman/cli/test_usergroup.py index 57e3c8b9b17..61feba18d2f 100644 --- a/tests/foreman/cli/test_usergroup.py +++ b/tests/foreman/cli/test_usergroup.py @@ -34,11 +34,11 @@ from robottelo.utils.datafactory import valid_usernames_list -@pytest.fixture(scope='function') +@pytest.fixture def function_user_group(): """Create new usergroup per each test""" user_group = make_usergroup() - yield user_group + return user_group @pytest.mark.tier1 @@ -241,10 +241,10 @@ def test_negative_automate_bz1437578(ldap_auth_source, function_user_group): 'name': 'Domain Users', } ) - assert ( - 'Could not create external user group: ' - 'Name is not found in the authentication source' - 'Name Domain Users is a special group in AD.' - ' Unfortunately, we cannot obtain membership information' - ' from a LDAP search and therefore sync it.' == result - ) + assert ( + 'Could not create external user group: ' + 'Name is not found in the authentication source' + 'Name Domain Users is a special group in AD.' + ' Unfortunately, we cannot obtain membership information' + ' from a LDAP search and therefore sync it.' == result + ) diff --git a/tests/foreman/cli/test_webhook.py b/tests/foreman/cli/test_webhook.py index 3ca83dbe70f..29c08189449 100644 --- a/tests/foreman/cli/test_webhook.py +++ b/tests/foreman/cli/test_webhook.py @@ -28,7 +28,7 @@ from robottelo.constants import WEBHOOK_EVENTS, WEBHOOK_METHODS -@pytest.fixture(scope='function') +@pytest.fixture def webhook_factory(request, class_org, class_location): def _create_webhook(org, loc, options=None): """Function for creating a new Webhook diff --git a/tests/foreman/destructive/test_capsule_loadbalancer.py b/tests/foreman/destructive/test_capsule_loadbalancer.py index 04ba4614438..77adccd1fcf 100644 --- a/tests/foreman/destructive/test_capsule_loadbalancer.py +++ b/tests/foreman/destructive/test_capsule_loadbalancer.py @@ -88,7 +88,7 @@ def setup_capsules( {'id': capsule_id, 'organization-id': module_org.id} ) - yield { + return { 'capsule_1': module_lb_capsule[0], 'capsule_2': module_lb_capsule[1], } diff --git a/tests/foreman/destructive/test_infoblox.py b/tests/foreman/destructive/test_infoblox.py index a9de1690305..1230507f8e0 100644 --- a/tests/foreman/destructive/test_infoblox.py +++ b/tests/foreman/destructive/test_infoblox.py @@ -91,7 +91,7 @@ @pytest.mark.tier4 @pytest.mark.parametrize( - 'command_args,command_opts,rpm_command', + ('command_args', 'command_opts', 'rpm_command'), params, ids=['isc_dhcp', 'infoblox_dhcp', 'infoblox_dns'], ) diff --git a/tests/foreman/destructive/test_ldap_authentication.py b/tests/foreman/destructive/test_ldap_authentication.py index 39f5a6b4f65..65ef31aa2cf 100644 --- a/tests/foreman/destructive/test_ldap_authentication.py +++ b/tests/foreman/destructive/test_ldap_authentication.py @@ -80,7 +80,7 @@ def set_certificate_in_satellite(server_type, sat, hostname=None): raise AssertionError(f'Failed to restart the httpd after applying {server_type} cert') -@pytest.fixture() +@pytest.fixture def ldap_tear_down(module_target_sat): """Teardown the all ldap settings user, usergroup and ldap delete""" yield @@ -92,14 +92,14 @@ def ldap_tear_down(module_target_sat): ldap_auth.delete() -@pytest.fixture() +@pytest.fixture def external_user_count(module_target_sat): """return the external auth source user count""" users = module_target_sat.api.User().search() - yield len([user for user in users if user.auth_source_name == 'External']) + return len([user for user in users if user.auth_source_name == 'External']) -@pytest.fixture() +@pytest.fixture def groups_teardown(module_target_sat): """teardown for groups created for external/remote groups""" yield @@ -112,7 +112,7 @@ def groups_teardown(module_target_sat): user_groups[0].delete() -@pytest.fixture() +@pytest.fixture def rhsso_groups_teardown(module_target_sat, default_sso_host): """Teardown the rhsso groups""" yield @@ -384,6 +384,7 @@ def test_external_new_user_login_and_check_count_rhsso( with module_target_sat.ui_session(login=False) as rhsso_session: with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.rhsso_login.login(login_details) + with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.task.read_all() assert error.typename == 'NavigationTriesExceeded' @@ -430,6 +431,7 @@ def test_login_failure_rhsso_user_if_internal_user_exist( with module_target_sat.ui_session(login=False) as rhsso_session: with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.rhsso_login.login(login_details) + with pytest.raises(NavigationTriesExceeded) as error: rhsso_session.task.read_all() assert error.typename == 'NavigationTriesExceeded' @@ -875,7 +877,7 @@ def test_positive_negotiate_logout( @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], @@ -931,7 +933,7 @@ def test_positive_autonegotiate( @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], @@ -1013,7 +1015,7 @@ def test_positive_negotiate_manual_with_autonegotiation_disabled( ids=['sessions_enabled', 'sessions_disabled'], ) @pytest.mark.parametrize( - 'parametrized_enrolled_sat,user_not_exists', + ('parametrized_enrolled_sat', 'user_not_exists'), [('IDM', settings.ipa.user), ('AD', f'{settings.ldap.username}@{settings.ldap.realm.lower()}')], indirect=True, ids=['IDM', 'AD'], diff --git a/tests/foreman/destructive/test_ldapauthsource.py b/tests/foreman/destructive/test_ldapauthsource.py index fdbc2da11ba..58e3ceb3e84 100644 --- a/tests/foreman/destructive/test_ldapauthsource.py +++ b/tests/foreman/destructive/test_ldapauthsource.py @@ -34,20 +34,16 @@ def configure_hammer_session(sat, enable=True): sat.execute(f"echo ' :use_sessions: {'true' if enable else 'false'}' >> {HAMMER_CONFIG}") -@pytest.fixture() +@pytest.fixture def rh_sso_hammer_auth_setup(module_target_sat, default_sso_host, request): """rh_sso hammer setup before running the auth login tests""" configure_hammer_session(module_target_sat) client_config = {'publicClient': 'true'} default_sso_host.update_client_configuration(client_config) - - def rh_sso_hammer_auth_cleanup(): - """restore the hammer config backup file and rhsso client settings""" - module_target_sat.execute(f'mv {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') - client_config = {'publicClient': 'false'} - default_sso_host.update_client_configuration(client_config) - - request.addfinalizer(rh_sso_hammer_auth_cleanup) + yield + module_target_sat.execute(f'mv {HAMMER_CONFIG}.backup {HAMMER_CONFIG}') + client_config = {'publicClient': 'false'} + default_sso_host.update_client_configuration(client_config) def test_rhsso_login_using_hammer( diff --git a/tests/foreman/destructive/test_ping.py b/tests/foreman/destructive/test_ping.py index 67818b6a74e..e683a84a2b6 100644 --- a/tests/foreman/destructive/test_ping.py +++ b/tests/foreman/destructive/test_ping.py @@ -29,7 +29,7 @@ def tomcat_service_teardown(request, module_target_sat): def _finalize(): assert module_target_sat.cli.Service.start(options={'only': 'tomcat.service'}).status == 0 - yield module_target_sat + return module_target_sat def test_negative_cli_ping_fail_status(tomcat_service_teardown): diff --git a/tests/foreman/maintain/test_health.py b/tests/foreman/maintain/test_health.py index 0dd4320d633..3793dad8b01 100644 --- a/tests/foreman/maintain/test_health.py +++ b/tests/foreman/maintain/test_health.py @@ -589,9 +589,8 @@ def test_positive_health_check_foreman_proxy_verify_dhcp_config_syntax(sat_maint result = sat_maintain.cli.Health.check( options={'label': 'foreman-proxy-verify-dhcp-config-syntax'} ) - assert ( - 'No scenario matching label' and 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout - ) + assert 'No scenario matching label' + assert 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout # Enable DHCP installer = sat_maintain.install( InstallerCommand('enable-foreman-proxy-plugin-dhcp-remote-isc', 'foreman-proxy-dhcp true') @@ -617,9 +616,8 @@ def test_positive_health_check_foreman_proxy_verify_dhcp_config_syntax(sat_maint result = sat_maintain.cli.Health.check( options={'label': 'foreman-proxy-verify-dhcp-config-syntax'} ) - assert ( - 'No scenario matching label' and 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout - ) + assert 'No scenario matching label' + assert 'foreman-proxy-verify-dhcp-config-syntax' in result.stdout def test_positive_remove_job_file(sat_maintain): diff --git a/tests/foreman/sys/test_katello_certs_check.py b/tests/foreman/sys/test_katello_certs_check.py index f058328e2e4..3ce28168e46 100644 --- a/tests/foreman/sys/test_katello_certs_check.py +++ b/tests/foreman/sys/test_katello_certs_check.py @@ -188,7 +188,7 @@ def test_katello_certs_check_output_wildcard_inputs(self, cert_setup_teardown): result = target_sat.execute(command) self.validate_output(result, cert_data) - @pytest.mark.parametrize('error, cert_file, key_file, ca_file', invalid_inputs) + @pytest.mark.parametrize(('error', 'cert_file', 'key_file', 'ca_file'), invalid_inputs) @pytest.mark.tier1 def test_katello_certs_check_output_invalid_input( self, @@ -264,7 +264,7 @@ def test_negative_check_expiration_of_certificate(self, cert_setup_teardown): assert message == check break else: - assert False, f'Failed, Unable to find message "{message}" in result' + pytest.fail(f'Failed, Unable to find message "{message}" in result') target_sat.execute("date -s 'last year'") @pytest.mark.stubbed diff --git a/tests/foreman/ui/test_acs.py b/tests/foreman/ui/test_acs.py index e1c40911c6e..8abe9e70c7d 100644 --- a/tests/foreman/ui/test_acs.py +++ b/tests/foreman/ui/test_acs.py @@ -459,5 +459,5 @@ def test_acs_positive_end_to_end(self, session, acs_setup): # Delete ACS and check if trying to read it afterwards fails session.acs.delete_acs(acs_name='testAcsToBeDeleted') - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 - TODO Ladislav find better exception session.acs.get_row_drawer_content(acs_name='testAcsToBeDeleted') diff --git a/tests/foreman/ui/test_contenthost.py b/tests/foreman/ui/test_contenthost.py index 97cdef261fd..e6d6540ee6b 100644 --- a/tests/foreman/ui/test_contenthost.py +++ b/tests/foreman/ui/test_contenthost.py @@ -76,7 +76,7 @@ def vm(module_repos_collection_with_manifest, rhel7_contenthost, target_sat): module_repos_collection_with_manifest.setup_virtual_machine(rhel7_contenthost) rhel7_contenthost.add_rex_key(target_sat) rhel7_contenthost.run(r'subscription-manager repos --enable \*') - yield rhel7_contenthost + return rhel7_contenthost @pytest.fixture @@ -84,7 +84,7 @@ def vm_module_streams(module_repos_collection_with_manifest, rhel8_contenthost, """Virtual machine registered in satellite""" module_repos_collection_with_manifest.setup_virtual_machine(rhel8_contenthost) rhel8_contenthost.add_rex_key(satellite=target_sat) - yield rhel8_contenthost + return rhel8_contenthost def set_ignore_facts_for_os(value=False): @@ -1006,7 +1006,8 @@ def test_module_stream_actions_on_content_host(session, default_location, vm_mod ) assert module_stream[0]['Name'] == FAKE_2_CUSTOM_PACKAGE_NAME assert module_stream[0]['Stream'] == stream_version - assert 'Enabled' and 'Installed' in module_stream[0]['Status'] + assert 'Enabled' in module_stream[0]['Status'] + assert 'Installed' in module_stream[0]['Status'] # remove Module Stream result = session.contenthost.execute_module_stream_action( diff --git a/tests/foreman/ui/test_contentview_old.py b/tests/foreman/ui/test_contentview_old.py index 173243835e9..cfc135c3c9c 100644 --- a/tests/foreman/ui/test_contentview_old.py +++ b/tests/foreman/ui/test_contentview_old.py @@ -2063,7 +2063,8 @@ def test_positive_update_inclusive_filter_package_version(session, module_org, t cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '0.71' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '0.71' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) @@ -2084,7 +2085,8 @@ def test_positive_update_inclusive_filter_package_version(session, module_org, t cv.name, new_version, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '5.21' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '5.21' @pytest.mark.skip_if_open('BZ:2086957') @@ -2125,7 +2127,8 @@ def test_positive_update_exclusive_filter_package_version(session, module_org, t cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '5.21') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '5.21' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '5.21' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) @@ -2146,7 +2149,8 @@ def test_positive_update_exclusive_filter_package_version(session, module_org, t cv.name, new_version, 'name = "{}" and version = "{}"'.format(package_name, '0.71') ) assert len(packages) == 1 - assert packages[0]['Name'] == package_name and packages[0]['Version'] == '0.71' + assert packages[0]['Name'] == package_name + assert packages[0]['Version'] == '0.71' @pytest.mark.skip_if_open('BZ:2086957') @@ -2516,7 +2520,8 @@ def test_positive_update_filter_affected_repos(session, module_org, target_sat): cv.name, VERSION, 'name = "{}" and version = "{}"'.format(repo1_package_name, '4.2.8') ) assert len(packages) == 1 - assert packages[0]['Name'] == repo1_package_name and packages[0]['Version'] == '4.2.8' + assert packages[0]['Name'] == repo1_package_name + assert packages[0]['Version'] == '4.2.8' packages = session.contentview.search_version_package( cv.name, VERSION, 'name = "{}" and version = "{}"'.format(repo1_package_name, '4.2.9') ) @@ -2529,7 +2534,8 @@ def test_positive_update_filter_affected_repos(session, module_org, target_sat): 'name = "{}" and version = "{}"'.format(repo2_package_name, '3.10.232'), ) assert len(packages) == 1 - assert packages[0]['Name'] == repo2_package_name and packages[0]['Version'] == '3.10.232' + assert packages[0]['Name'] == repo2_package_name + assert packages[0]['Version'] == '3.10.232' @pytest.mark.tier3 @@ -3040,10 +3046,8 @@ def test_positive_search_module_streams_in_content_view(session, module_org, tar f'name = "{module_stream}" and stream = "{module_version}"', ) assert len(module_streams) == 1 - assert ( - module_streams[0]['Name'] == module_stream - and module_streams[0]['Stream'] == module_version - ) + assert module_streams[0]['Name'] == module_stream + assert module_streams[0]['Stream'] == module_version @pytest.mark.tier2 diff --git a/tests/foreman/ui/test_discoveryrule.py b/tests/foreman/ui/test_discoveryrule.py index e529578efca..d61d5067f85 100644 --- a/tests/foreman/ui/test_discoveryrule.py +++ b/tests/foreman/ui/test_discoveryrule.py @@ -155,7 +155,7 @@ def test_negative_delete_rule_with_non_admin_user( location=[module_location], ).create() with Session(user=reader_user.login, password=reader_user.password) as session: - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 - TODO Adarsh determine better exception session.discoveryrule.delete(dr.name) dr_val = session.discoveryrule.read_all() assert dr.name in [rule['Name'] for rule in dr_val] diff --git a/tests/foreman/ui/test_errata.py b/tests/foreman/ui/test_errata.py index 755a69172dc..632fcefad32 100644 --- a/tests/foreman/ui/test_errata.py +++ b/tests/foreman/ui/test_errata.py @@ -155,12 +155,12 @@ def errata_status_installable(): _set_setting_value(errata_status_installable, original_value) -@pytest.fixture(scope='function') +@pytest.fixture def vm(module_repos_collection_with_setup, rhel7_contenthost, target_sat): """Virtual machine registered in satellite""" module_repos_collection_with_setup.setup_virtual_machine(rhel7_contenthost) rhel7_contenthost.add_rex_key(satellite=target_sat) - yield rhel7_contenthost + return rhel7_contenthost @pytest.mark.e2e diff --git a/tests/foreman/ui/test_host.py b/tests/foreman/ui/test_host.py index b76273263bf..ac44d35bf2d 100644 --- a/tests/foreman/ui/test_host.py +++ b/tests/foreman/ui/test_host.py @@ -64,7 +64,7 @@ def ui_user(ui_user, smart_proxy_location, module_target_sat): id=ui_user.id, default_location=smart_proxy_location, ).update(['default_location']) - yield ui_user + return ui_user @pytest.fixture @@ -133,7 +133,7 @@ def tracer_install_host(rex_contenthost, target_sat): rex_contenthost.create_custom_repos( **{f'rhel{rhelver}_os': settings.repos[f'rhel{rhelver}_os']} ) - yield rex_contenthost + return rex_contenthost @pytest.mark.e2e @@ -750,9 +750,9 @@ def test_positive_check_permissions_affect_create_procedure( ] with Session(test_name, user=user.login, password=user_password) as session: for host_field in host_fields: + values = {host_field['name']: host_field['unexpected_value']} + values.update(host_field.get('other_fields_values', {})) with pytest.raises(NoSuchElementException) as context: - values = {host_field['name']: host_field['unexpected_value']} - values.update(host_field.get('other_fields_values', {})) session.host.helper.read_create_view(values) error_message = str(context.value) assert host_field['unexpected_value'] in error_message diff --git a/tests/foreman/ui/test_jobinvocation.py b/tests/foreman/ui/test_jobinvocation.py index 62ab4ed5ef4..bb773841ef6 100644 --- a/tests/foreman/ui/test_jobinvocation.py +++ b/tests/foreman/ui/test_jobinvocation.py @@ -29,7 +29,7 @@ def module_rhel_client_by_ip(module_org, smart_proxy_location, rhel7_contenthost target_sat.api_factory.update_vm_host_location( rhel7_contenthost, location_id=smart_proxy_location.id ) - yield rhel7_contenthost + return rhel7_contenthost @pytest.mark.tier4 diff --git a/tests/foreman/ui/test_ldap_authentication.py b/tests/foreman/ui/test_ldap_authentication.py index e1983767e77..b72d1bbcd8f 100644 --- a/tests/foreman/ui/test_ldap_authentication.py +++ b/tests/foreman/ui/test_ldap_authentication.py @@ -59,7 +59,7 @@ def set_certificate_in_satellite(server_type, target_sat, hostname=None): raise AssertionError(f'Failed to restart the httpd after applying {server_type} cert') -@pytest.fixture() +@pytest.fixture def ldap_usergroup_name(): """Return some random usergroup name, and attempt to delete such usergroup when test finishes. @@ -71,7 +71,7 @@ def ldap_usergroup_name(): user_groups[0].delete() -@pytest.fixture() +@pytest.fixture def ldap_tear_down(): """Teardown the all ldap settings user, usergroup and ldap delete""" yield @@ -83,14 +83,14 @@ def ldap_tear_down(): ldap_auth.delete() -@pytest.fixture() +@pytest.fixture def external_user_count(): """return the external auth source user count""" users = entities.User().search() - yield len([user for user in users if user.auth_source_name == 'External']) + return len([user for user in users if user.auth_source_name == 'External']) -@pytest.fixture() +@pytest.fixture def groups_teardown(): """teardown for groups created for external/remote groups""" yield @@ -101,7 +101,7 @@ def groups_teardown(): user_groups[0].delete() -@pytest.fixture() +@pytest.fixture def rhsso_groups_teardown(default_sso_host): """Teardown the rhsso groups""" yield @@ -109,7 +109,7 @@ def rhsso_groups_teardown(default_sso_host): default_sso_host.delete_rhsso_group(group_name) -@pytest.fixture() +@pytest.fixture def multigroup_setting_cleanup(default_ipa_host): """Adding and removing the user to/from ipa group""" sat_users = settings.ipa.groups @@ -119,7 +119,7 @@ def multigroup_setting_cleanup(default_ipa_host): default_ipa_host.remove_user_from_usergroup(idm_users[1], sat_users[0]) -@pytest.fixture() +@pytest.fixture def ipa_add_user(default_ipa_host): """Create an IPA user and delete it""" test_user = gen_string('alpha') diff --git a/tests/foreman/ui/test_partitiontable.py b/tests/foreman/ui/test_partitiontable.py index 393a2ca634f..e36d448698d 100644 --- a/tests/foreman/ui/test_partitiontable.py +++ b/tests/foreman/ui/test_partitiontable.py @@ -167,7 +167,7 @@ def test_positive_delete_with_lock_and_unlock(session): ) assert session.partitiontable.search(name)[0]['Name'] == name session.partitiontable.lock(name) - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 - TODO determine better exception session.partitiontable.delete(name) session.partitiontable.unlock(name) session.partitiontable.delete(name) diff --git a/tests/foreman/ui/test_provisioningtemplate.py b/tests/foreman/ui/test_provisioningtemplate.py index 579ca711f7b..62b9169a729 100644 --- a/tests/foreman/ui/test_provisioningtemplate.py +++ b/tests/foreman/ui/test_provisioningtemplate.py @@ -27,7 +27,7 @@ def template_data(): return DataFile.OS_TEMPLATE_DATA_FILE.read_text() -@pytest.fixture() +@pytest.fixture def clone_setup(target_sat, module_org, module_location): name = gen_string('alpha') content = gen_string('alpha') diff --git a/tests/foreman/ui/test_repository.py b/tests/foreman/ui/test_repository.py index 63b7df93577..8395d096967 100644 --- a/tests/foreman/ui/test_repository.py +++ b/tests/foreman/ui/test_repository.py @@ -227,8 +227,8 @@ def test_positive_create_as_non_admin_user_with_cv_published(module_org, test_na with Session(test_name, user_login, user_password) as session: # ensure that the created user is not a global admin user # check administer->users page + pswd = gen_string('alphanumeric') with pytest.raises(NavigationTriesExceeded): - pswd = gen_string('alphanumeric') session.user.create( { 'user.login': gen_string('alphanumeric'), @@ -774,7 +774,8 @@ def test_positive_reposet_disable(session, target_sat, function_entitlement_mani ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) session.redhatrepository.disable(repository_name) assert not session.redhatrepository.search( f'name = "{repository_name}"', category='Enabled' @@ -825,7 +826,8 @@ def test_positive_reposet_disable_after_manifest_deleted( ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) # Delete manifest sub.delete_manifest(data={'organization_id': org.id}) # Verify that the displayed repository name is correct @@ -904,7 +906,8 @@ def test_positive_delete_rhel_repo(session, module_entitlement_manifest_org, tar ) ] ) - assert results and all([result == 'Syncing Complete.' for result in results]) + assert results + assert all([result == 'Syncing Complete.' for result in results]) session.repository.delete(product_name, repository_name) assert not session.redhatrepository.search( f'name = "{repository_name}"', category='Enabled' diff --git a/tests/foreman/ui/test_rhc.py b/tests/foreman/ui/test_rhc.py index 6fa17a18589..2ffe7b11e12 100644 --- a/tests/foreman/ui/test_rhc.py +++ b/tests/foreman/ui/test_rhc.py @@ -64,7 +64,7 @@ def module_rhc_org(module_target_sat): return org -@pytest.fixture() +@pytest.fixture def fixture_setup_rhc_satellite( request, module_target_sat, diff --git a/tests/foreman/ui/test_settings.py b/tests/foreman/ui/test_settings.py index ade4cd9afbe..923d542443d 100644 --- a/tests/foreman/ui/test_settings.py +++ b/tests/foreman/ui/test_settings.py @@ -83,10 +83,10 @@ def test_positive_update_restrict_composite_view(session, setting_update, repo_s session.contentview.promote( composite_cv.name, 'Version 1.0', repo_setup['lce'].name ) - assert ( - 'Administrator -> Settings -> Content page using the ' - 'restrict_composite_view flag.' in str(context.value) - ) + assert ( + 'Administrator -> Settings -> Content page using the ' + 'restrict_composite_view flag.' in str(context.value) + ) else: result = session.contentview.promote( composite_cv.name, 'Version 1.0', repo_setup['lce'].name @@ -139,7 +139,7 @@ def test_negative_validate_foreman_url_error_message(session, setting_update): invalid_value = [invalid_value for invalid_value in invalid_settings_values()][0] with pytest.raises(AssertionError) as context: session.settings.update(f'name = {property_name}', invalid_value) - assert 'Value is invalid: must be integer' in str(context.value) + assert 'Value is invalid: must be integer' in str(context.value) @pytest.mark.tier2 @@ -509,7 +509,7 @@ def test_negative_update_hostname_with_empty_fact(session, setting_update): with session: with pytest.raises(AssertionError) as context: session.settings.update(property_name, new_hostname) - assert 'can\'t be blank' in str(context.value) + assert 'can\'t be blank' in str(context.value) @pytest.mark.run_in_one_thread diff --git a/tests/foreman/ui/test_subscription.py b/tests/foreman/ui/test_subscription.py index 2196eabb60b..13878f24ab9 100644 --- a/tests/foreman/ui/test_subscription.py +++ b/tests/foreman/ui/test_subscription.py @@ -397,7 +397,8 @@ def test_positive_view_vdc_guest_subscription_products( f'subscription_name = "{VDC_SUBSCRIPTION_NAME}" ' f'and name = "{virt_who_hypervisor_host["name"]}"' ) - assert content_hosts and content_hosts[0]['Name'] == virt_who_hypervisor_host['name'] + assert content_hosts + assert content_hosts[0]['Name'] == virt_who_hypervisor_host['name'] # ensure that hypervisor guests subscription provided products list is not empty and # that the product is in provided products. provided_products = session.subscription.provided_products( diff --git a/tests/foreman/virtwho/api/test_esx.py b/tests/foreman/virtwho/api/test_esx.py index 1e0396d4ce5..e7544047d7e 100644 --- a/tests/foreman/virtwho/api/test_esx.py +++ b/tests/foreman/virtwho/api/test_esx.py @@ -383,10 +383,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/api/test_esx_sca.py b/tests/foreman/virtwho/api/test_esx_sca.py index 3a967cbded1..3ac780254f0 100644 --- a/tests/foreman/virtwho/api/test_esx_sca.py +++ b/tests/foreman/virtwho/api/test_esx_sca.py @@ -431,10 +431,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/api/test_nutanix.py b/tests/foreman/virtwho/api/test_nutanix.py index d9ddc34938f..ae2726cf610 100644 --- a/tests/foreman/virtwho/api/test_nutanix.py +++ b/tests/foreman/virtwho/api/test_nutanix.py @@ -250,10 +250,9 @@ def test_positive_ahv_internal_debug_option( config_file = get_configure_file(virtwho_config_api.id) option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message diff --git a/tests/foreman/virtwho/cli/test_esx.py b/tests/foreman/virtwho/cli/test_esx.py index fe2b1827bf6..c604fc6f3f2 100644 --- a/tests/foreman/virtwho/cli/test_esx.py +++ b/tests/foreman/virtwho/cli/test_esx.py @@ -38,7 +38,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, default_org): form = { 'name': gen_string('alpha'), @@ -56,7 +56,7 @@ def form_data(target_sat, default_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config @@ -446,10 +446,9 @@ def test_positive_remove_env_option(self, default_org, form_data, virtwho_config env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/cli/test_esx_sca.py b/tests/foreman/virtwho/cli/test_esx_sca.py index df15cce8105..d8ec7c71d9d 100644 --- a/tests/foreman/virtwho/cli/test_esx_sca.py +++ b/tests/foreman/virtwho/cli/test_esx_sca.py @@ -36,7 +36,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, module_sca_manifest_org): form = { 'name': gen_string('alpha'), @@ -54,7 +54,7 @@ def form_data(target_sat, module_sca_manifest_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config @@ -545,10 +545,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/cli/test_hyperv.py b/tests/foreman/virtwho/cli/test_hyperv.py index 1fa31db9f00..35e90d61eb1 100644 --- a/tests/foreman/virtwho/cli/test_hyperv.py +++ b/tests/foreman/virtwho/cli/test_hyperv.py @@ -29,7 +29,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, default_org): form = { 'name': gen_string('alpha'), @@ -47,7 +47,7 @@ def form_data(target_sat, default_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_hyperv_sca.py b/tests/foreman/virtwho/cli/test_hyperv_sca.py index e3909489e21..6d60e285c9d 100644 --- a/tests/foreman/virtwho/cli/test_hyperv_sca.py +++ b/tests/foreman/virtwho/cli/test_hyperv_sca.py @@ -29,7 +29,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, module_sca_manifest_org): form = { 'name': gen_string('alpha'), @@ -47,7 +47,7 @@ def form_data(target_sat, module_sca_manifest_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_kubevirt.py b/tests/foreman/virtwho/cli/test_kubevirt.py index c6d38de60dd..e003294de87 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt.py +++ b/tests/foreman/virtwho/cli/test_kubevirt.py @@ -29,7 +29,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, default_org): form = { 'name': gen_string('alpha'), @@ -45,7 +45,7 @@ def form_data(target_sat, default_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_kubevirt_sca.py b/tests/foreman/virtwho/cli/test_kubevirt_sca.py index 99e4335c9f6..9746d80c34b 100644 --- a/tests/foreman/virtwho/cli/test_kubevirt_sca.py +++ b/tests/foreman/virtwho/cli/test_kubevirt_sca.py @@ -27,7 +27,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, module_sca_manifest_org): form = { 'name': gen_string('alpha'), @@ -43,7 +43,7 @@ def form_data(target_sat, module_sca_manifest_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_libvirt.py b/tests/foreman/virtwho/cli/test_libvirt.py index 1f5b034b473..5cd4280e4f3 100644 --- a/tests/foreman/virtwho/cli/test_libvirt.py +++ b/tests/foreman/virtwho/cli/test_libvirt.py @@ -29,7 +29,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, default_org): form = { 'name': gen_string('alpha'), @@ -46,7 +46,7 @@ def form_data(target_sat, default_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_libvirt_sca.py b/tests/foreman/virtwho/cli/test_libvirt_sca.py index b29ffaf667f..b1a359c0095 100644 --- a/tests/foreman/virtwho/cli/test_libvirt_sca.py +++ b/tests/foreman/virtwho/cli/test_libvirt_sca.py @@ -27,7 +27,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, module_sca_manifest_org): form = { 'name': gen_string('alpha'), @@ -44,7 +44,7 @@ def form_data(target_sat, module_sca_manifest_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/cli/test_nutanix.py b/tests/foreman/virtwho/cli/test_nutanix.py index 9b9437d0386..ed9ad20ce49 100644 --- a/tests/foreman/virtwho/cli/test_nutanix.py +++ b/tests/foreman/virtwho/cli/test_nutanix.py @@ -31,7 +31,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, default_org): form = { 'name': gen_string('alpha'), @@ -51,7 +51,7 @@ def form_data(target_sat, default_org): return form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config @@ -260,10 +260,9 @@ def test_positive_ahv_internal_debug_option( config_file = get_configure_file(virtwho_config['id']) option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message diff --git a/tests/foreman/virtwho/cli/test_nutanix_sca.py b/tests/foreman/virtwho/cli/test_nutanix_sca.py index 59ba58f6f60..44876700c23 100644 --- a/tests/foreman/virtwho/cli/test_nutanix_sca.py +++ b/tests/foreman/virtwho/cli/test_nutanix_sca.py @@ -29,7 +29,7 @@ ) -@pytest.fixture() +@pytest.fixture def form_data(target_sat, module_sca_manifest_org): sca_form = { 'name': gen_string('alpha'), @@ -48,7 +48,7 @@ def form_data(target_sat, module_sca_manifest_org): return sca_form -@pytest.fixture() +@pytest.fixture def virtwho_config(form_data, target_sat): virtwho_config = target_sat.cli.VirtWhoConfig.create(form_data)['general-information'] yield virtwho_config diff --git a/tests/foreman/virtwho/conftest.py b/tests/foreman/virtwho/conftest.py index 0bd09647bf6..8e422b22940 100644 --- a/tests/foreman/virtwho/conftest.py +++ b/tests/foreman/virtwho/conftest.py @@ -36,7 +36,7 @@ def module_user(request, module_target_sat, default_org, default_location): logger.warning('Unable to delete session user: %s', str(err)) -@pytest.fixture() +@pytest.fixture def session(test_name, module_user): """Session fixture which automatically initializes (but does not start!) airgun UI session and correctly passes current test name to it. Uses shared @@ -84,7 +84,7 @@ def module_user_sca(request, module_target_sat, module_org, module_location): logger.warning('Unable to delete session user: %s', str(err)) -@pytest.fixture() +@pytest.fixture def session_sca(test_name, module_user_sca): """Session fixture which automatically initializes (but does not start!) airgun UI session and correctly passes current test name to it. Uses shared diff --git a/tests/foreman/virtwho/ui/test_esx.py b/tests/foreman/virtwho/ui/test_esx.py index 2715febb34f..75652da9bd6 100644 --- a/tests/foreman/virtwho/ui/test_esx.py +++ b/tests/foreman/virtwho/ui/test_esx.py @@ -752,10 +752,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/ui/test_esx_sca.py b/tests/foreman/virtwho/ui/test_esx_sca.py index f8eb03fea98..63c4c55a16b 100644 --- a/tests/foreman/virtwho/ui/test_esx_sca.py +++ b/tests/foreman/virtwho/ui/test_esx_sca.py @@ -327,10 +327,9 @@ def test_positive_remove_env_option( env_error = ( f"option {{\'{option}\'}} is not exist or not be enabled in {{\'{config_file}\'}}" ) - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option({option}, {config_file}) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # Check /var/log/messages should not display warning message env_warning = f"Ignoring unknown configuration option \"{option}\"" result = target_sat.execute(f'grep "{env_warning}" /var/log/messages') diff --git a/tests/foreman/virtwho/ui/test_nutanix.py b/tests/foreman/virtwho/ui/test_nutanix.py index 8bd1a3b23c7..652f3354559 100644 --- a/tests/foreman/virtwho/ui/test_nutanix.py +++ b/tests/foreman/virtwho/ui/test_nutanix.py @@ -236,10 +236,9 @@ def test_positive_ahv_internal_debug_option( # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message diff --git a/tests/foreman/virtwho/ui/test_nutanix_sca.py b/tests/foreman/virtwho/ui/test_nutanix_sca.py index 42de668055e..eb2d7d889ad 100644 --- a/tests/foreman/virtwho/ui/test_nutanix_sca.py +++ b/tests/foreman/virtwho/ui/test_nutanix_sca.py @@ -205,10 +205,9 @@ def test_positive_ahv_internal_debug_option( # ahv_internal_debug does not set in virt-who-config-X.conf option = 'ahv_internal_debug' env_error = f"option {option} is not exist or not be enabled in {config_file}" - try: + with pytest.raises(Exception) as exc_info: # noqa: PT011 - TODO determine better exception get_configure_option("ahv_internal_debug", config_file) - except Exception as VirtWhoError: - assert env_error == str(VirtWhoError) + assert str(exc_info.value) == env_error # check message exist in log file /var/log/rhsm/rhsm.log message = 'Value for "ahv_internal_debug" not set, using default: False' assert check_message_in_rhsm_log(message) == message diff --git a/tests/robottelo/conftest.py b/tests/robottelo/conftest.py index df6419a4f24..d8ffb1ad75b 100644 --- a/tests/robottelo/conftest.py +++ b/tests/robottelo/conftest.py @@ -13,13 +13,13 @@ def align_to_satellite(): pass -@pytest.fixture(scope='function') +@pytest.fixture def dummy_test(request): """This should be indirectly parametrized to provide dynamic dummy_tests to exec_test""" return request.param -@pytest.fixture(scope='function') +@pytest.fixture def exec_test(request, dummy_test): """Create a temporary file with the string provided by dummy_test, and run it with pytest.main diff --git a/tests/robottelo/test_cli.py b/tests/robottelo/test_cli.py index 78b0f6f0cf8..706a1d47785 100644 --- a/tests/robottelo/test_cli.py +++ b/tests/robottelo/test_cli.py @@ -301,7 +301,7 @@ def test_exists_with_option_and_no_empty_return(self, lst_method): assert 1 == response @mock.patch('robottelo.cli.base.Base.command_requires_org') - def test_info_requires_organization_id(self, _): + def test_info_requires_organization_id(self, _): # noqa: PT019 - not a fixture """Check info raises CLIError with organization-id is not present in options """ diff --git a/tests/robottelo/test_datafactory.py b/tests/robottelo/test_datafactory.py index 04e081a9b39..b93d401ed89 100644 --- a/tests/robottelo/test_datafactory.py +++ b/tests/robottelo/test_datafactory.py @@ -13,7 +13,7 @@ class TestFilteredDataPoint: """Tests for :meth:`robottelo.utils.datafactory.filtered_datapoint` decorator""" - @pytest.fixture(scope="function") + @pytest.fixture def run_one_datapoint(self, request): # Modify run_one_datapoint on settings singleton based on the indirect param # default to false when not parametrized diff --git a/tests/robottelo/test_decorators.py b/tests/robottelo/test_decorators.py index ff333fbb7e1..0f8ac441ce9 100644 --- a/tests/robottelo/test_decorators.py +++ b/tests/robottelo/test_decorators.py @@ -9,7 +9,7 @@ class TestCacheable: """Tests for :func:`robottelo.utils.decorators.cacheable`.""" - @pytest.fixture(scope="function") + @pytest.fixture def make_foo(self): mocked_object_cache_patcher = mock.patch.dict('robottelo.utils.decorators.OBJECT_CACHE') mocked_object_cache_patcher.start() diff --git a/tests/robottelo/test_func_locker.py b/tests/robottelo/test_func_locker.py index ed010e56076..ad4ab5c74aa 100644 --- a/tests/robottelo/test_func_locker.py +++ b/tests/robottelo/test_func_locker.py @@ -193,7 +193,7 @@ def simple_function_not_locked(): class TestFuncLocker: - @pytest.fixture(scope="function", autouse=True) + @pytest.fixture(autouse=True) def count_and_pool(self): global counter_file counter_file.write('0') @@ -371,7 +371,9 @@ def test_recursive_lock_function(self, count_and_pool, recursive_function): """Ensure that recursive calls to locked function is detected using lock_function decorator""" res = count_and_pool.apply_async(recursive_function, ()) - with pytest.raises(func_locker.FunctionLockerError, match=r'.*recursion detected.*'): + with pytest.raises( # noqa: PT012 + func_locker.FunctionLockerError, match=r'.*recursion detected.*' + ): try: res.get(timeout=5) except multiprocessing.TimeoutError: diff --git a/tests/robottelo/test_func_shared.py b/tests/robottelo/test_func_shared.py index 8ab9fb2f06d..7cc635e1aa0 100644 --- a/tests/robottelo/test_func_shared.py +++ b/tests/robottelo/test_func_shared.py @@ -163,13 +163,13 @@ def scope(self): # generate a new namespace scope = gen_string('alpha', 10) set_default_scope(scope) - yield scope + return scope - @pytest.fixture(scope='function', autouse=True) + @pytest.fixture(autouse=True) def enable(self): enable_shared_function(True) - @pytest.fixture(scope='function') + @pytest.fixture def pool(self): pool = multiprocessing.Pool(DEFAULT_POOL_SIZE) yield pool diff --git a/tests/robottelo/test_issue_handlers.py b/tests/robottelo/test_issue_handlers.py index 090e031b6f7..4da0b6df011 100644 --- a/tests/robottelo/test_issue_handlers.py +++ b/tests/robottelo/test_issue_handlers.py @@ -342,8 +342,8 @@ def test_bz_should_not_deselect(self): @pytest.mark.parametrize('issue', ["BZ123456", "XX:123456", "KK:89456", "123456", 999999]) def test_invalid_handler(self, issue): """Assert is_open w/ invalid handlers raise AttributeError""" + issue_deselect = should_deselect(issue) with pytest.raises(AttributeError): - issue_deselect = should_deselect(issue) is_open(issue) assert issue_deselect is None diff --git a/tests/upgrades/test_activation_key.py b/tests/upgrades/test_activation_key.py index 7e7cdf9de21..e58c06fee13 100644 --- a/tests/upgrades/test_activation_key.py +++ b/tests/upgrades/test_activation_key.py @@ -26,7 +26,7 @@ class TestActivationKey: operated/modified. """ - @pytest.fixture(scope='function') + @pytest.fixture def activation_key_setup(self, request, target_sat): """ The purpose of this fixture is to setup the activation key based on the provided @@ -45,7 +45,7 @@ def activation_key_setup(self, request, target_sat): content_view=cv, organization=org, name=f"{request.param}_ak" ).create() ak_details = {'org': org, "cv": cv, 'ak': ak, 'custom_repo': custom_repo} - yield ak_details + return ak_details @pytest.mark.pre_upgrade @pytest.mark.parametrize( diff --git a/tests/upgrades/test_classparameter.py b/tests/upgrades/test_classparameter.py index c22d39c48b1..f113f7ab235 100644 --- a/tests/upgrades/test_classparameter.py +++ b/tests/upgrades/test_classparameter.py @@ -51,7 +51,7 @@ class TestScenarioPositivePuppetParameterAndDatatypeIntact: """ @pytest.fixture(scope="class") - def _setup_scenario(self, class_target_sat): + def setup_scenario(self, class_target_sat): """Import some parametrized puppet classes. This is required to make sure that we have smart class variable available. Read all available smart class parameters for imported puppet class to @@ -100,7 +100,7 @@ def _validate_value(self, data, sc_param): @pytest.mark.pre_upgrade @pytest.mark.parametrize('count', list(range(1, 10))) def test_pre_puppet_class_parameter_data_and_type( - self, class_target_sat, count, _setup_scenario, save_test_data + self, class_target_sat, count, setup_scenario, save_test_data ): """Puppet Class parameters with different data type are created @@ -116,7 +116,7 @@ def test_pre_puppet_class_parameter_data_and_type( :expectedresults: The parameters are updated with different data types """ - save_test_data(_setup_scenario) + save_test_data(setup_scenario) data = _valid_sc_parameters_data()[count - 1] sc_param = class_target_sat.api.SmartClassParameters().search( query={'search': f'parameter="api_classparameters_scp_00{count}"'} @@ -130,9 +130,10 @@ def test_pre_puppet_class_parameter_data_and_type( self._validate_value(data, sc_param) @pytest.mark.post_upgrade(depend_on=test_pre_puppet_class_parameter_data_and_type) + @pytest.mark.usefixtures('_clean_scenario') @pytest.mark.parametrize('count', list(range(1, 10))) def test_post_puppet_class_parameter_data_and_type( - self, count, _clean_scenario, class_pre_upgrade_data, class_target_sat + self, count, class_pre_upgrade_data, class_target_sat ): """Puppet Class Parameters value and type is intact post upgrade diff --git a/tests/upgrades/test_client.py b/tests/upgrades/test_client.py index 7cb4ce6fe7c..4e1f1720d57 100644 --- a/tests/upgrades/test_client.py +++ b/tests/upgrades/test_client.py @@ -29,7 +29,7 @@ @pytest.fixture def client_for_upgrade(module_target_sat, rex_contenthost, module_org): rex_contenthost.create_custom_repos(fake_yum=settings.repos.yum_1.url) - yield rex_contenthost + return rex_contenthost class TestScenarioUpgradeOldClientAndPackageInstallation: diff --git a/tests/upgrades/test_host.py b/tests/upgrades/test_host.py index 72a4465eea9..a53a36814ff 100644 --- a/tests/upgrades/test_host.py +++ b/tests/upgrades/test_host.py @@ -90,7 +90,7 @@ def class_host( image=module_gce_finishimg, root_pass=gen_string('alphanumeric'), ).create() - yield host + return host def google_host(self, googleclient): """Returns the Google Client Host object to perform the assertions""" From ee1ee6a3e56bcc7a3e30083b2760c916438a2bca Mon Sep 17 00:00:00 2001 From: Devendra Date: Fri, 3 Nov 2023 13:27:43 +0530 Subject: [PATCH 72/96] Adding the image config details in the docker config (#12958) --- conf/docker.yaml.template | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/conf/docker.yaml.template b/conf/docker.yaml.template index 81fc6a84dd0..4176b38ee8a 100644 --- a/conf/docker.yaml.template +++ b/conf/docker.yaml.template @@ -9,3 +9,23 @@ DOCKER: PRIVATE_REGISTRY_USERNAME: # Private docker registry password PRIVATE_REGISTRY_PASSWORD: + # Image Pass Registry + IMAGE_REGISTRY: + # image repository URL + URL: + # Pull a non-namespace image using the image pass registry proxy + NON_NAMESPACE: + # Proxy for the non-namespace image + PROXY: + # Username for the non-namespace image pass registry proxy + USERNAME: + # Password for the non-namespace image pass registry proxy + PASSWORD: + # Pull a namespace image using the image pass registry proxy + NAMESPACE: + # proxy for the namespace image + PROXY: + # Username for the namespace image pass registry proxy + USERNAME: + # Password for the namespace image pass registry proxy + PASSWORD: From 17d6f86bdd79d88dc9614a788831e5d984456bd8 Mon Sep 17 00:00:00 2001 From: vijay sawant Date: Fri, 3 Nov 2023 16:46:29 +0530 Subject: [PATCH 73/96] update markdown file name (#13044) * add GitHub issue template (.yaml) and Pull request template (.md) files * final changes after reivew * changing PR Template file name --- .github/{new_pull_request.md => PULL_REQUEST_TEMPLATE.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{new_pull_request.md => PULL_REQUEST_TEMPLATE.md} (100%) diff --git a/.github/new_pull_request.md b/.github/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from .github/new_pull_request.md rename to .github/PULL_REQUEST_TEMPLATE.md From 0a0966ba4a895bf91635eac4b0a7e44b2aaed92d Mon Sep 17 00:00:00 2001 From: Griffin Sullivan <48397354+Griffin-Sullivan@users.noreply.github.com> Date: Mon, 6 Nov 2023 12:08:54 -0500 Subject: [PATCH 74/96] Add monitoring install params and sections (#13046) --- tests/foreman/installer/test_installer.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/foreman/installer/test_installer.py b/tests/foreman/installer/test_installer.py index 178707a8f9c..e9d954847e1 100644 --- a/tests/foreman/installer/test_installer.py +++ b/tests/foreman/installer/test_installer.py @@ -29,6 +29,7 @@ '-', '--[no-]colors', '--[no-]enable-certs', + '--[no-]enable-apache-mod-status', '--[no-]enable-foreman', '--[no-]enable-foreman-cli', '--[no-]enable-foreman-cli-ansible', @@ -76,6 +77,9 @@ '--[no-]lock-package-versions', '--[no-]parser-cache', '--[no-]verbose', + '--apache-mod-status-extended-status', + '--apache-mod-status-requires', + '--apache-mod-status-status-path', '--certs-ca-common-name', '--certs-ca-expiration', '--certs-city', @@ -240,6 +244,7 @@ '--foreman-proxy-content-pulpcore-telemetry', '--foreman-proxy-content-pulpcore-worker-count', '--foreman-proxy-content-reverse-proxy', + '--foreman-proxy-content-reverse-proxy-backend-protocol', '--foreman-proxy-content-reverse-proxy-port', '--foreman-proxy-dhcp', '--foreman-proxy-dhcp-additional-interfaces', @@ -499,6 +504,7 @@ '--puppet-ca-crl-filepath', '--puppet-ca-port', '--puppet-ca-server', + '--puppet-certificate-revocation', '--puppet-classfile', '--puppet-client-certname', '--puppet-client-package', @@ -662,6 +668,9 @@ '--puppet-vardir', '--puppet-version', '--register-with-insights', + '--reset-apache-mod-status-extended-status', + '--reset-apache-mod-status-requires', + '--reset-apache-mod-status-status-path', '--reset-certs-ca-common-name', '--reset-certs-ca-expiration', '--reset-certs-city', @@ -815,6 +824,7 @@ '--reset-foreman-proxy-content-pulpcore-telemetry', '--reset-foreman-proxy-content-pulpcore-worker-count', '--reset-foreman-proxy-content-reverse-proxy', + '--reset-foreman-proxy-content-reverse-proxy-backend-protocol', '--reset-foreman-proxy-content-reverse-proxy-port', '--reset-foreman-proxy-dhcp', '--reset-foreman-proxy-dhcp-additional-interfaces', @@ -1065,6 +1075,7 @@ '--reset-puppet-ca-crl-filepath', '--reset-puppet-ca-port', '--reset-puppet-ca-server', + '--reset-puppet-certificate-revocation', '--reset-puppet-classfile', '--reset-puppet-client-certname', '--reset-puppet-client-package', @@ -1244,6 +1255,7 @@ LAST_SAVED_SECTIONS = { '= Generic:', + '= Module apache_mod_status:', '= Module certs:', '= Module foreman:', '= Module foreman_cli:', From c0971e5c8baf29baebf640d37d881fe76d9c594e Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Tue, 7 Nov 2023 12:06:18 +0530 Subject: [PATCH 75/96] Fix convert2rhel tests by CVV sort and other fixes (#12959) Fix convert2rhel tests by CVV sort Signed-off-by: Gaurav Talreja --- tests/foreman/api/test_convert2rhel.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/foreman/api/test_convert2rhel.py b/tests/foreman/api/test_convert2rhel.py index ce2e78d85b2..026acb467bc 100644 --- a/tests/foreman/api/test_convert2rhel.py +++ b/tests/foreman/api/test_convert2rhel.py @@ -61,6 +61,7 @@ def update_cv(sat, cv, lce, repos): cv = sat.api.ContentView(id=cv.id, repository=repos).update(["repository"]) cv.publish() cv = cv.read() + cv.version.sort(key=lambda version: version.id) cv.version[-1].promote(data={'environment_ids': lce.id, 'force': False}) return cv From 01aeddd9b6c2568aa187ef3dde4f60e54d245ac1 Mon Sep 17 00:00:00 2001 From: omkarkhatavkar Date: Wed, 18 Oct 2023 16:14:10 +0530 Subject: [PATCH 76/96] removing the import entities from nailgun and moving target_sat --- tests/foreman/api/test_activationkey.py | 108 +++-- tests/foreman/api/test_architecture.py | 17 +- tests/foreman/api/test_audit.py | 89 ++-- tests/foreman/api/test_bookmarks.py | 65 ++- tests/foreman/api/test_capsulecontent.py | 99 ++-- tests/foreman/api/test_computeprofile.py | 31 +- tests/foreman/api/test_contentcredentials.py | 49 +- tests/foreman/api/test_contentview.py | 201 ++++---- tests/foreman/api/test_contentviewfilter.py | 226 +++++---- tests/foreman/api/test_contentviewversion.py | 209 +++++--- tests/foreman/api/test_discoveryrule.py | 40 +- tests/foreman/api/test_docker.py | 305 +++++++----- tests/foreman/api/test_errata.py | 73 +-- tests/foreman/api/test_filter.py | 19 +- tests/foreman/api/test_foremantask.py | 9 +- tests/foreman/api/test_host.py | 246 +++++----- tests/foreman/api/test_hostcollection.py | 115 +++-- tests/foreman/api/test_hostgroup.py | 76 +-- tests/foreman/api/test_http_proxy.py | 23 +- tests/foreman/api/test_ldapauthsource.py | 7 +- .../foreman/api/test_lifecycleenvironment.py | 43 +- tests/foreman/api/test_media.py | 61 +-- tests/foreman/api/test_multiple_paths.py | 38 +- tests/foreman/api/test_organization.py | 58 +-- .../foreman/api/test_oscap_tailoringfiles.py | 21 +- tests/foreman/api/test_oscappolicy.py | 19 +- tests/foreman/api/test_permission.py | 26 +- tests/foreman/api/test_product.py | 97 ++-- tests/foreman/api/test_reporttemplates.py | 85 ++-- tests/foreman/api/test_repositories.py | 3 +- tests/foreman/api/test_repository.py | 202 ++++---- tests/foreman/api/test_repository_set.py | 9 +- tests/foreman/api/test_role.py | 455 ++++++++++-------- tests/foreman/api/test_settings.py | 9 +- tests/foreman/api/test_subnet.py | 75 ++- tests/foreman/api/test_subscription.py | 80 +-- tests/foreman/api/test_syncplan.py | 130 ++--- tests/foreman/api/test_templatesync.py | 131 ++--- tests/foreman/api/test_user.py | 205 ++++---- tests/foreman/api/test_usergroup.py | 89 ++-- tests/foreman/api/test_webhook.py | 29 +- 41 files changed, 2118 insertions(+), 1754 deletions(-) diff --git a/tests/foreman/api/test_activationkey.py b/tests/foreman/api/test_activationkey.py index ae5cb15125a..16e968ed7e8 100644 --- a/tests/foreman/api/test_activationkey.py +++ b/tests/foreman/api/test_activationkey.py @@ -19,7 +19,7 @@ import http from fauxfactory import gen_integer, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -46,7 +46,7 @@ def _bad_max_hosts(): @pytest.mark.tier1 -def test_positive_create_unlimited_hosts(): +def test_positive_create_unlimited_hosts(target_sat): """Create a plain vanilla activation key. :id: 1d73b8cc-a754-4637-8bae-d9d2aaf89003 @@ -56,12 +56,12 @@ def test_positive_create_unlimited_hosts(): :CaseImportance: Critical """ - assert entities.ActivationKey().create().unlimited_hosts is True + assert target_sat.api.ActivationKey().create().unlimited_hosts is True @pytest.mark.tier1 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_create_limited_hosts(max_host): +def test_positive_create_limited_hosts(max_host, target_sat): """Create an activation key with limited hosts. :id: 9bbba620-fd98-4139-a44b-af8ce330c7a4 @@ -73,14 +73,14 @@ def test_positive_create_limited_hosts(max_host): :parametrized: yes """ - act_key = entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + act_key = target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() assert act_key.max_hosts == max_host assert act_key.unlimited_hosts is False @pytest.mark.tier1 @pytest.mark.parametrize('key_name', **parametrized(valid_data_list())) -def test_positive_create_with_name(key_name): +def test_positive_create_with_name(key_name, target_sat): """Create an activation key providing the initial name. :id: 749e0d28-640e-41e5-89d6-b92411ce73a3 @@ -91,13 +91,13 @@ def test_positive_create_with_name(key_name): :parametrized: yes """ - act_key = entities.ActivationKey(name=key_name).create() + act_key = target_sat.api.ActivationKey(name=key_name).create() assert key_name == act_key.name @pytest.mark.tier2 @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create an activation key and provide a description. :id: 64d93726-6f96-4a2e-ab29-eb5bfa2ff8ff @@ -106,12 +106,12 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - act_key = entities.ActivationKey(description=desc).create() + act_key = target_sat.api.ActivationKey(description=desc).create() assert desc == act_key.description @pytest.mark.tier2 -def test_negative_create_with_no_host_limit(): +def test_negative_create_with_no_host_limit(target_sat): """Create activation key without providing limitation for hosts number :id: a9e756e1-886d-4f0d-b685-36ce4247517d @@ -121,12 +121,12 @@ def test_negative_create_with_no_host_limit(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ActivationKey(unlimited_hosts=False).create() + target_sat.api.ActivationKey(unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_create_with_invalid_host_limit(max_host): +def test_negative_create_with_invalid_host_limit(max_host, target_sat): """Create activation key with invalid limit values for hosts number. :id: c018b177-2074-4f1a-a7e0-9f38d6c9a1a6 @@ -138,12 +138,12 @@ def test_negative_create_with_invalid_host_limit(max_host): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create activation key providing an invalid name. :id: 5f7051be-0320-4d37-9085-6904025ad909 @@ -155,12 +155,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(name=name).create() + target_sat.api.ActivationKey(name=name).create() @pytest.mark.tier2 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_update_limited_host(max_host): +def test_positive_update_limited_host(max_host, target_sat): """Create activation key then update it to limited hosts. :id: 34ca8303-8135-4694-9cf7-b20f8b4b0a1e @@ -170,7 +170,7 @@ def test_positive_update_limited_host(max_host): :parametrized: yes """ # unlimited_hosts defaults to True. - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': max_host, 'unlimited_hosts': False} for key, value in want.items(): setattr(act_key, key, value) @@ -181,7 +181,7 @@ def test_positive_update_limited_host(max_host): @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat): """Create activation key providing the initial name, then update its name to another valid name. @@ -192,14 +192,14 @@ def test_positive_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() - updated = entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) + act_key = target_sat.api.ActivationKey().create() + updated = target_sat.api.ActivationKey(id=act_key.id, name=new_name).update(['name']) assert new_name == updated.name @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_update_limit(max_host): +def test_negative_update_limit(max_host, target_sat): """Create activation key then update its limit to invalid value. :id: 0f857d2f-81ed-4b8b-b26e-34b4f294edbc @@ -214,7 +214,7 @@ def test_negative_update_limit(max_host): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': act_key.max_hosts, 'unlimited_hosts': act_key.unlimited_hosts} act_key.max_hosts = max_host act_key.unlimited_hosts = False @@ -227,7 +227,7 @@ def test_negative_update_limit(max_host): @pytest.mark.tier3 @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat): """Create activation key then update its name to an invalid name. :id: da85a32c-942b-4ab8-a133-36b028208c4d @@ -239,16 +239,16 @@ def test_negative_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) - new_key = entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id, name=new_name).update(['name']) + new_key = target_sat.api.ActivationKey(id=act_key.id).read() assert new_key.name != new_name assert new_key.name == act_key.name @pytest.mark.tier3 -def test_negative_update_max_hosts(): +def test_negative_update_max_hosts(target_sat): """Create an activation key with ``max_hosts == 1``, then update that field with a string value. @@ -258,14 +258,14 @@ def test_negative_update_max_hosts(): :CaseImportance: Low """ - act_key = entities.ActivationKey(max_hosts=1).create() + act_key = target_sat.api.ActivationKey(max_hosts=1).create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, max_hosts='foo').update(['max_hosts']) + target_sat.api.ActivationKey(id=act_key.id, max_hosts='foo').update(['max_hosts']) assert act_key.read().max_hosts == 1 @pytest.mark.tier2 -def test_positive_get_releases_status_code(): +def test_positive_get_releases_status_code(target_sat): """Get an activation key's releases. Check response format. :id: e1ea4797-8d92-4bec-ae6b-7a26599825ab @@ -275,7 +275,7 @@ def test_positive_get_releases_status_code(): :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() path = act_key.path('releases') response = client.get(path, auth=get_credentials(), verify=False) status_code = http.client.OK @@ -284,7 +284,7 @@ def test_positive_get_releases_status_code(): @pytest.mark.tier2 -def test_positive_get_releases_content(): +def test_positive_get_releases_content(target_sat): """Get an activation key's releases. Check response contents. :id: 2fec3d71-33e9-40e5-b934-90b03afc26a1 @@ -293,14 +293,14 @@ def test_positive_get_releases_content(): :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() response = client.get(act_key.path('releases'), auth=get_credentials(), verify=False).json() assert 'results' in response.keys() assert isinstance(response['results'], list) @pytest.mark.tier2 -def test_positive_add_host_collections(module_org): +def test_positive_add_host_collections(module_org, module_target_sat): """Associate an activation key with several host collections. :id: 1538808c-621e-4cf9-9b9b-840c5dd54644 @@ -318,23 +318,27 @@ def test_positive_add_host_collections(module_org): :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 # Give activation key one host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 1 # Give activation key second host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 2 @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_host_collection(module_org): +def test_positive_remove_host_collection(module_org, module_target_sat): """Disassociate host collection from the activation key :id: 31992ac4-fe55-45bb-bd17-a191928ec2ab @@ -353,10 +357,10 @@ def test_positive_remove_host_collection(module_org): :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() # Associate host collection with activation key. act_key.add_host_collection(data={'host_collection_ids': [host_collection.id]}) @@ -368,7 +372,7 @@ def test_positive_remove_host_collection(module_org): @pytest.mark.tier1 -def test_positive_update_auto_attach(): +def test_positive_update_auto_attach(target_sat): """Create an activation key, then update the auto_attach field with the inverse boolean value. @@ -378,17 +382,17 @@ def test_positive_update_auto_attach(): :CaseImportance: Critical """ - act_key = entities.ActivationKey().create() - act_key_2 = entities.ActivationKey(id=act_key.id, auto_attach=(not act_key.auto_attach)).update( - ['auto_attach'] - ) + act_key = target_sat.api.ActivationKey().create() + act_key_2 = target_sat.api.ActivationKey( + id=act_key.id, auto_attach=(not act_key.auto_attach) + ).update(['auto_attach']) assert act_key.auto_attach != act_key_2.auto_attach @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete(name): +def test_positive_delete(name, target_sat): """Create activation key and then delete it. :id: aa28d8fb-e07d-45fa-b43a-fc90c706d633 @@ -399,10 +403,10 @@ def test_positive_delete(name): :parametrized: yes """ - act_key = entities.ActivationKey(name=name).create() + act_key = target_sat.api.ActivationKey(name=name).create() act_key.delete() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id).read() @pytest.mark.tier2 @@ -503,7 +507,7 @@ def test_positive_add_future_subscription(): @pytest.mark.tier1 -def test_positive_search_by_org(): +def test_positive_search_by_org(target_sat): """Search for all activation keys in an organization. :id: aedba598-2e47-44a8-826c-4dc304ba00be @@ -513,8 +517,8 @@ def test_positive_search_by_org(): :CaseImportance: Critical """ - org = entities.Organization().create() - act_key = entities.ActivationKey(organization=org).create() - keys = entities.ActivationKey(organization=org).search() + org = target_sat.api.Organization().create() + act_key = target_sat.api.ActivationKey(organization=org).create() + keys = target_sat.api.ActivationKey(organization=org).search() assert len(keys) == 1 assert act_key.id == keys[0].id diff --git a/tests/foreman/api/test_architecture.py b/tests/foreman/api/test_architecture.py index f88c8adba18..f0b7c428b0f 100644 --- a/tests/foreman/api/test_architecture.py +++ b/tests/foreman/api/test_architecture.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_choice -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -29,7 +28,7 @@ @pytest.mark.tier1 -def test_positive_CRUD(default_os): +def test_positive_CRUD(default_os, target_sat): """Create a new Architecture with several attributes, update the name and delete the Architecture itself. @@ -43,13 +42,13 @@ def test_positive_CRUD(default_os): # Create name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(name=name, operatingsystem=[default_os]).create() + arch = target_sat.api.Architecture(name=name, operatingsystem=[default_os]).create() assert {default_os.id} == {os.id for os in arch.operatingsystem} assert name == arch.name # Update name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(id=arch.id, name=name).update(['name']) + arch = target_sat.api.Architecture(id=arch.id, name=name).update(['name']) assert name == arch.name # Delete @@ -60,7 +59,7 @@ def test_positive_CRUD(default_os): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create architecture providing an invalid initial name. :id: 0fa6377d-063a-4e24-b606-b342e0d9108b @@ -74,12 +73,12 @@ def test_negative_create_with_invalid_name(name): :BZ: 1401519 """ with pytest.raises(HTTPError): - entities.Architecture(name=name).create() + target_sat.api.Architecture(name=name).create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_update_with_invalid_name(name, module_architecture): +def test_negative_update_with_invalid_name(name, module_architecture, module_target_sat): """Update architecture's name to an invalid name. :id: cb27b69b-14e0-42d0-9e44-e09d68324803 @@ -91,6 +90,6 @@ def test_negative_update_with_invalid_name(name, module_architecture): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Architecture(id=module_architecture.id, name=name).update(['name']) - arch = entities.Architecture(id=module_architecture.id).read() + module_target_sat.api.Architecture(id=module_architecture.id, name=name).update(['name']) + arch = module_target_sat.api.Architecture(id=module_architecture.id).read() assert arch.name != name diff --git a/tests/foreman/api/test_audit.py b/tests/foreman/api/test_audit.py index 59b19d0806c..1aa9c28cc41 100644 --- a/tests/foreman/api/test_audit.py +++ b/tests/foreman/api/test_audit.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.utils.datafactory import gen_string @@ -24,7 +23,7 @@ @pytest.mark.e2e @pytest.mark.tier1 -def test_positive_create_by_type(): +def test_positive_create_by_type(target_sat): """Create entities of different types and check audit logs for these events using entity type as search criteria @@ -40,45 +39,49 @@ def test_positive_create_by_type(): :CaseImportance: Medium """ for entity_item in [ - {'entity': entities.Architecture()}, + {'entity': target_sat.api.Architecture()}, { - 'entity': entities.AuthSourceLDAP(), + 'entity': target_sat.api.AuthSourceLDAP(), 'entity_type': 'auth_source', 'value_template': 'LDAP-{entity.name}', }, - {'entity': entities.ComputeProfile(), 'entity_type': 'compute_profile'}, + {'entity': target_sat.api.ComputeProfile(), 'entity_type': 'compute_profile'}, { - 'entity': entities.LibvirtComputeResource(), + 'entity': target_sat.api.LibvirtComputeResource(), 'entity_type': 'compute_resource', 'value_template': '{entity.name} (Libvirt)', }, - {'entity': entities.Domain()}, - {'entity': entities.Host()}, - {'entity': entities.HostGroup()}, - {'entity': entities.Image(compute_resource=entities.LibvirtComputeResource().create())}, - {'entity': entities.Location()}, - {'entity': entities.Media(), 'entity_type': 'medium'}, + {'entity': target_sat.api.Domain()}, + {'entity': target_sat.api.Host()}, + {'entity': target_sat.api.HostGroup()}, { - 'entity': entities.OperatingSystem(), + 'entity': target_sat.api.Image( + compute_resource=target_sat.api.LibvirtComputeResource().create() + ) + }, + {'entity': target_sat.api.Location()}, + {'entity': target_sat.api.Media(), 'entity_type': 'medium'}, + { + 'entity': target_sat.api.OperatingSystem(), 'entity_type': 'os', 'value_template': '{entity.name} {entity.major}', }, - {'entity': entities.PartitionTable(), 'entity_type': 'ptable'}, - {'entity': entities.Role()}, + {'entity': target_sat.api.PartitionTable(), 'entity_type': 'ptable'}, + {'entity': target_sat.api.Role()}, { - 'entity': entities.Subnet(), + 'entity': target_sat.api.Subnet(), 'value_template': '{entity.name} ({entity.network}/{entity.cidr})', }, - {'entity': entities.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, - {'entity': entities.User(), 'value_template': '{entity.login}'}, - {'entity': entities.UserGroup()}, - {'entity': entities.ContentView(), 'entity_type': 'katello/content_view'}, - {'entity': entities.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, - {'entity': entities.ActivationKey(), 'entity_type': 'katello/activation_key'}, - {'entity': entities.HostCollection(), 'entity_type': 'katello/host_collection'}, - {'entity': entities.Product(), 'entity_type': 'katello/product'}, + {'entity': target_sat.api.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, + {'entity': target_sat.api.User(), 'value_template': '{entity.login}'}, + {'entity': target_sat.api.UserGroup()}, + {'entity': target_sat.api.ContentView(), 'entity_type': 'katello/content_view'}, + {'entity': target_sat.api.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, + {'entity': target_sat.api.ActivationKey(), 'entity_type': 'katello/activation_key'}, + {'entity': target_sat.api.HostCollection(), 'entity_type': 'katello/host_collection'}, + {'entity': target_sat.api.Product(), 'entity_type': 'katello/product'}, { - 'entity': entities.SyncPlan(organization=entities.Organization(id=1)), + 'entity': target_sat.api.SyncPlan(organization=target_sat.api.Organization(id=1)), 'entity_type': 'katello/sync_plan', }, ]: @@ -86,7 +89,7 @@ def test_positive_create_by_type(): entity_type = entity_item.get('entity_type', created_entity.__class__.__name__.lower()) value_template = entity_item.get('value_template', '{entity.name}') entity_value = value_template.format(entity=created_entity) - audits = entities.Audit().search(query={'search': f'type={entity_type}'}) + audits = target_sat.api.Audit().search(query={'search': f'type={entity_type}'}) entity_audits = [entry for entry in audits if entry.auditable_name == entity_value] assert entity_audits, ( f'audit not found by name "{entity_value}" for entity: ' @@ -99,7 +102,7 @@ def test_positive_create_by_type(): @pytest.mark.tier1 -def test_positive_update_by_type(): +def test_positive_update_by_type(target_sat): """Update some entities of different types and check audit logs for these events using entity type as search criteria @@ -111,19 +114,19 @@ def test_positive_update_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + target_sat.api.Architecture(), + target_sat.api.Domain(), + target_sat.api.HostGroup(), + target_sat.api.Location(), + target_sat.api.Role(), + target_sat.api.UserGroup(), ]: created_entity = entity.create() name = created_entity.name new_name = gen_string('alpha') created_entity.name = new_name created_entity = created_entity.update(['name']) - audits = entities.Audit().search( + audits = target_sat.api.Audit().search( query={'search': f'type={created_entity.__class__.__name__.lower()}'} ) entity_audits = [entry for entry in audits if entry.auditable_name == name] @@ -136,7 +139,7 @@ def test_positive_update_by_type(): @pytest.mark.tier1 -def test_positive_delete_by_type(): +def test_positive_delete_by_type(target_sat): """Delete some entities of different types and check audit logs for these events using entity type as search criteria @@ -148,17 +151,17 @@ def test_positive_delete_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.Host(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + target_sat.api.Architecture(), + target_sat.api.Domain(), + target_sat.api.Host(), + target_sat.api.HostGroup(), + target_sat.api.Location(), + target_sat.api.Role(), + target_sat.api.UserGroup(), ]: created_entity = entity.create() created_entity.delete() - audits = entities.Audit().search( + audits = target_sat.api.Audit().search( query={'search': f'type={created_entity.__class__.__name__.lower()}'} ) entity_audits = [entry for entry in audits if entry.auditable_name == created_entity.name] diff --git a/tests/foreman/api/test_bookmarks.py b/tests/foreman/api/test_bookmarks.py index ad48053079b..694d0a6654a 100644 --- a/tests/foreman/api/test_bookmarks.py +++ b/tests/foreman/api/test_bookmarks.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -32,7 +31,7 @@ @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_name(controller): +def test_positive_create_with_name(controller, target_sat): """Create a bookmark :id: aeef0944-379a-4a27-902d-aa5969dbd441 @@ -51,14 +50,14 @@ def test_positive_create_with_name(controller): :CaseImportance: Critical """ name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, name=name, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, name=name, public=False).create() assert bm.controller == controller assert bm.name == name @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_query(controller): +def test_positive_create_with_query(controller, target_sat): """Create a bookmark :id: 9fb6d485-92b5-43ea-b776-012c13734100 @@ -77,7 +76,7 @@ def test_positive_create_with_query(controller): :CaseImportance: Critical """ query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, query=query).create() + bm = target_sat.api.Bookmark(controller=controller, query=query).create() assert bm.controller == controller assert bm.query == query @@ -85,7 +84,7 @@ def test_positive_create_with_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_public(controller, public): +def test_positive_create_public(controller, public, target_sat): """Create a public bookmark :id: 511b9bcf-0661-4e44-b1bc-475a1c207aa9 @@ -103,14 +102,14 @@ def test_positive_create_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=public).create() + bm = target_sat.api.Bookmark(controller=controller, public=public).create() assert bm.controller == controller assert bm.public == public @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_with_invalid_name(controller): +def test_negative_create_with_invalid_name(controller, target_sat): """Create a bookmark with invalid name :id: 9a79c561-8225-43fc-8ec7-b6858e9665e2 @@ -131,14 +130,14 @@ def test_negative_create_with_invalid_name(controller): """ name = random.choice(invalid_values_list()) with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=False).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=False).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_empty_query(controller): +def test_negative_create_empty_query(controller, target_sat): """Create a bookmark with empty query :id: 674d569f-6f86-43ba-b9cc-f43e05e8ab1c @@ -159,14 +158,14 @@ def test_negative_create_empty_query(controller): """ name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, query='').create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, query='').create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_same_name(controller): +def test_negative_create_same_name(controller, target_sat): """Create bookmarks with the same names :id: f78f6e97-da77-4a61-95c2-622c439d325d @@ -187,16 +186,16 @@ def test_negative_create_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() + target_sat.api.Bookmark(controller=controller, name=name).create() with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 1 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_null_public(controller): +def test_negative_create_null_public(controller, target_sat): """Create a bookmark omitting the public parameter :id: 0a4cb5ea-912b-445e-a874-b345e43d3eac @@ -220,14 +219,14 @@ def test_negative_create_null_public(controller): """ name = gen_string('alphanumeric') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=None).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=None).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_name(controller): +def test_positive_update_name(controller, target_sat): """Update a bookmark :id: 1cde270a-26fb-4cff-bdff-89fef17a7624 @@ -246,7 +245,7 @@ def test_positive_update_name(controller): :CaseImportance: Critical """ new_name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name bm = bm.update(['name']) assert bm.name == new_name @@ -254,7 +253,7 @@ def test_positive_update_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_same_name(controller): +def test_negative_update_same_name(controller, target_sat): """Update a bookmark with name already taken :id: 6becf121-2bea-4f7e-98f4-338bd88b8f4b @@ -274,8 +273,8 @@ def test_negative_update_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() - bm = entities.Bookmark(controller=controller).create() + target_sat.api.Bookmark(controller=controller, name=name).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.name = name with pytest.raises(HTTPError): bm.update(['name']) @@ -285,7 +284,7 @@ def test_negative_update_same_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_invalid_name(controller): +def test_negative_update_invalid_name(controller, target_sat): """Update a bookmark with an invalid name :id: 479795bb-aeed-45b3-a7e3-d3449c808087 @@ -304,7 +303,7 @@ def test_negative_update_invalid_name(controller): :CaseImportance: Critical """ new_name = random.choice(invalid_values_list()) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name with pytest.raises(HTTPError): bm.update(['name']) @@ -314,7 +313,7 @@ def test_negative_update_invalid_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_query(controller): +def test_positive_update_query(controller, target_sat): """Update a bookmark query :id: 92a31de2-bebf-4396-94f5-adf59f8d66a5 @@ -333,7 +332,7 @@ def test_positive_update_query(controller): :CaseImportance: Critical """ new_query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = new_query bm = bm.update(['query']) assert bm.query == new_query @@ -341,7 +340,7 @@ def test_positive_update_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_empty_query(controller): +def test_negative_update_empty_query(controller, target_sat): """Update a bookmark with an empty query :id: 948602d3-532a-47fe-b313-91e3fab809bf @@ -359,7 +358,7 @@ def test_negative_update_empty_query(controller): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = '' with pytest.raises(HTTPError): bm.update(['query']) @@ -370,7 +369,7 @@ def test_negative_update_empty_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('public', [True, False]) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_public(controller, public): +def test_positive_update_public(controller, public, target_sat): """Update a bookmark public state to private and vice versa :id: 2717360d-37c4-4bb9-bce1-b1edabdf11b3 @@ -389,7 +388,7 @@ def test_positive_update_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=not public).create() + bm = target_sat.api.Bookmark(controller=controller, public=not public).create() assert bm.public != public bm.public = public bm = bm.update(['public']) diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index acf756d02df..2e72bc8998a 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -21,7 +21,7 @@ import re from time import sleep -from nailgun import client, entities +from nailgun import client from nailgun.entity_mixins import call_entity_method_with_timeout import pytest @@ -43,9 +43,13 @@ class TestCapsuleContentManagement: interactions and use capsule. """ - def update_capsule_download_policy(self, module_capsule_configured, download_policy): + def update_capsule_download_policy( + self, module_capsule_configured, download_policy, module_target_sat + ): """Updates capsule's download policy to desired value""" - proxy = entities.SmartProxy(id=module_capsule_configured.nailgun_capsule.id).read() + proxy = module_target_sat.api.SmartProxy( + id=module_capsule_configured.nailgun_capsule.id + ).read() proxy.download_policy = download_policy proxy.update(['download_policy']) @@ -78,7 +82,12 @@ def test_positive_insights_puppet_package_availability(self, module_capsule_conf @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_uploaded_content_library_sync( - self, module_capsule_configured, function_org, function_product, function_lce_library + self, + module_capsule_configured, + function_org, + function_product, + function_lce_library, + target_sat, ): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule @@ -92,7 +101,7 @@ def test_positive_uploaded_content_library_sync( :expectedresults: custom content is present on external capsule """ - repo = entities.Repository(product=function_product, url=None).create() + repo = target_sat.api.Repository(product=function_product, url=None).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce_library.id} @@ -103,7 +112,7 @@ def test_positive_uploaded_content_library_sync( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Upload custom content into the repo with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -134,7 +143,7 @@ def test_positive_uploaded_content_library_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_checksum_sync( - self, module_capsule_configured, function_org, function_product, function_lce + self, module_capsule_configured, function_org, function_product, function_lce, target_sat ): """Synchronize repository to capsule, update repository's checksum type, trigger capsule sync and make sure checksum type was updated on @@ -152,7 +161,7 @@ def test_positive_checksum_sync( :CaseImportance: Critical """ # Create repository with sha256 checksum type - repo = entities.Repository( + repo = target_sat.api.Repository( product=function_product, checksum_type='sha256', mirroring_policy='additive', @@ -168,7 +177,7 @@ def test_positive_checksum_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync, publish and promote a repo - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() repo.sync() repo = repo.read() cv.publish() @@ -228,7 +237,12 @@ def test_positive_checksum_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_updated_repo( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Sync a custom repo with no upstream url but uploaded content to the Capsule via promoted CV, update content of the repo, publish and promote the CV again, resync @@ -256,7 +270,7 @@ def test_positive_sync_updated_repo( :BZ: 2025494 """ - repo = entities.Repository(url=None, product=function_product).create() + repo = target_sat.api.Repository(url=None, product=function_product).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -274,7 +288,7 @@ def test_positive_sync_updated_repo( assert repo.read().content_counts['rpm'] == 1 # Create, publish and promote CV with the repository to the Capsule's LCE - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 @@ -329,7 +343,12 @@ def test_positive_sync_updated_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_capsule_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create repository, add it to lifecycle environment, assign lifecycle environment with a capsule, sync repository, sync it once again, update @@ -353,7 +372,7 @@ def test_positive_capsule_sync( capsule """ repo_url = settings.repos.yum_1.url - repo = entities.Repository(product=function_product, url=repo_url).create() + repo = target_sat.api.Repository(product=function_product, url=repo_url).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce.id} @@ -364,7 +383,7 @@ def test_positive_capsule_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -491,12 +510,12 @@ def test_positive_iso_library_sync( reposet=constants.REPOSET['rhsc7_iso'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization - lce = entities.LifecycleEnvironment(organization=module_entitlement_manifest_org).search( - query={'search': f'name={constants.ENVIRONMENT}'} - )[0] + lce = module_target_sat.api.LifecycleEnvironment( + organization=module_entitlement_manifest_org + ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -508,7 +527,7 @@ def test_positive_iso_library_sync( assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView( + cv = module_target_sat.api.ContentView( organization=module_entitlement_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view @@ -536,7 +555,12 @@ def test_positive_iso_library_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_on_demand_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with 'on_demand' policy, add it to a CV, promote to an 'on_demand' Capsule's LCE, download a published package, @@ -555,7 +579,7 @@ def test_positive_on_demand_sync( repo_url = settings.repos.yum_3.url packages_count = constants.FAKE_3_YUM_REPOS_COUNT package = constants.FAKE_3_YUM_REPO_RPMS[0] - repo = entities.Repository( + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, @@ -574,7 +598,7 @@ def test_positive_on_demand_sync( self.update_capsule_download_policy(module_capsule_configured, 'on_demand') # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -616,7 +640,12 @@ def test_positive_on_demand_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_update_with_immediate_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once @@ -633,7 +662,7 @@ def test_positive_update_with_immediate_sync( """ repo_url = settings.repos.yum_1.url packages_count = constants.FAKE_1_YUM_REPOS_COUNT - repo = entities.Repository( + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, @@ -651,7 +680,7 @@ def test_positive_update_with_immediate_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -765,8 +794,10 @@ def test_positive_sync_kickstart_repo( repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - repo = entities.Repository(id=repo_id).read() - lce = entities.LifecycleEnvironment(organization=function_entitlement_manifest_org).create() + repo = target_sat.api.Repository(id=repo_id).read() + lce = target_sat.api.LifecycleEnvironment( + organization=function_entitlement_manifest_org + ).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -780,7 +811,7 @@ def test_positive_sync_kickstart_repo( self.update_capsule_download_policy(module_capsule_configured, 'on_demand') # Create a content view with the repository - cv = entities.ContentView( + cv = target_sat.api.ContentView( organization=function_entitlement_manifest_org, repository=[repo] ).create() # Sync repository @@ -868,7 +899,7 @@ def test_positive_sync_container_repo_end_to_end( repos = [] for ups_name in upstream_names: - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='docker', docker_upstream_name=ups_name, product=function_product, @@ -886,7 +917,7 @@ def test_positive_sync_container_repo_end_to_end( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=repos).create() + cv = target_sat.api.ContentView(organization=function_org, repository=repos).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 @@ -995,7 +1026,7 @@ def test_positive_sync_collection_repo( - name: theforeman.operations version: "0.1.0" ''' - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='ansible_collection', ansible_collection_requirements=requirements, product=function_product, @@ -1066,7 +1097,7 @@ def test_positive_sync_file_repo( :BZ: 1985122 """ - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='file', product=function_product, url=constants.FAKE_FILE_LARGE_URL, @@ -1086,7 +1117,7 @@ def test_positive_sync_file_repo( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 diff --git a/tests/foreman/api/test_computeprofile.py b/tests/foreman/api/test_computeprofile.py index d0ee003c7a3..dffe47620b4 100644 --- a/tests/foreman/api/test_computeprofile.py +++ b/tests/foreman/api/test_computeprofile.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -29,7 +28,7 @@ @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create new Compute Profile using different names :id: 97d04911-9368-4674-92c7-1e3ff114bc18 @@ -42,13 +41,13 @@ def test_positive_create_with_name(name): :parametrized: yes """ - profile = entities.ComputeProfile(name=name).create() + profile = target_sat.api.ComputeProfile(name=name).create() assert name == profile.name @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create(name): +def test_negative_create(name, target_sat): """Attempt to create Compute Profile using invalid names only :id: 2d34a1fd-70a5-4e59-b2e2-86fbfe8e31ab @@ -62,12 +61,12 @@ def test_negative_create(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ComputeProfile(name=name).create() + target_sat.api.ComputeProfile(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat): """Update selected Compute Profile entity using proper names :id: c79193d7-2e0f-4ed9-b947-05feeddabfda @@ -80,15 +79,15 @@ def test_positive_update_name(new_name): :parametrized: yes """ - profile = entities.ComputeProfile().create() - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + profile = target_sat.api.ComputeProfile().create() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name == updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat): """Attempt to update Compute Profile entity using invalid names only :id: 042b40d5-a78b-4e65-b5cb-5b270b800b37 @@ -101,16 +100,16 @@ def test_negative_update_name(new_name): :parametrized: yes """ - profile = entities.ComputeProfile().create() + profile = target_sat.api.ComputeProfile().create() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name != updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_delete(new_name): +def test_positive_delete(new_name, target_sat): """Delete Compute Profile entity :id: 0a620e23-7ba6-4178-af7a-fd1e332f478f @@ -123,7 +122,7 @@ def test_positive_delete(new_name): :parametrized: yes """ - profile = entities.ComputeProfile(name=new_name).create() + profile = target_sat.api.ComputeProfile(name=new_name).create() profile.delete() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id).read() diff --git a/tests/foreman/api/test_contentcredentials.py b/tests/foreman/api/test_contentcredentials.py index 81602c953e4..aaad0322976 100644 --- a/tests/foreman/api/test_contentcredentials.py +++ b/tests/foreman/api/test_contentcredentials.py @@ -19,7 +19,6 @@ from copy import copy from fauxfactory import gen_string -from nailgun import entities import pytest from requests import HTTPError @@ -35,7 +34,7 @@ @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a GPG key with valid name. :id: 741d969b-28ef-481f-bcf7-ed4cd920b030 @@ -46,12 +45,12 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, name=name).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert name == gpg_key.name @pytest.mark.tier1 -def test_positive_create_with_content(module_org): +def test_positive_create_with_content(module_org, module_target_sat): """Create a GPG key with valid name and valid gpg key text. :id: cfa6690e-fed7-49cf-94f9-fd2deed941c0 @@ -60,13 +59,13 @@ def test_positive_create_with_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() assert key_content == gpg_key.content @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_name(module_org, name): +def test_negative_create_name(module_org, name, module_target_sat): """Attempt to create GPG key with invalid names only. :id: 904a3ed0-7d50-495e-a700-b4f1ae913599 @@ -78,13 +77,13 @@ def test_negative_create_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(module_org, module_target_sat): """Attempt to create a GPG key providing a name of already existent entity @@ -95,15 +94,15 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_content(module_org): +def test_negative_create_with_content(module_org, module_target_sat): """Attempt to create GPG key with empty content. :id: fc79c840-6bcb-4d97-9145-c0008d5b028d @@ -113,14 +112,14 @@ def test_negative_create_with_content(module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, content='').create() + module_target_sat.api.GPGKey(organization=module_org, content='').create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Update GPG key name to another valid name. :id: 9868025d-5346-42c9-b850-916ce37a9541 @@ -131,14 +130,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name gpg_key = gpg_key.update(['name']) assert new_name == gpg_key.name @pytest.mark.tier1 -def test_positive_update_content(module_org): +def test_positive_update_content(module_org, module_target_sat): """Update GPG key content text to another valid one. :id: 62fdaf55-c931-4be6-9857-68cc816046ad @@ -147,7 +146,7 @@ def test_positive_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -158,7 +157,7 @@ def test_positive_update_content(module_org): @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, new_name): +def test_negative_update_name(module_org, new_name, module_target_sat): """Attempt to update GPG key name to invalid one :id: 1a43f610-8969-4f08-967f-fb6af0fca31b @@ -169,7 +168,7 @@ def test_negative_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name with pytest.raises(HTTPError) as error: gpg_key.update(['name']) @@ -178,7 +177,7 @@ def test_negative_update_name(module_org, new_name): @pytest.mark.tier1 -def test_negative_update_same_name(module_org): +def test_negative_update_same_name(module_org, module_target_sat): """Attempt to update GPG key name to the name of existing GPG key entity @@ -189,8 +188,8 @@ def test_negative_update_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alpha') - entities.GPGKey(organization=module_org, name=name).create() - new_gpg_key = entities.GPGKey(organization=module_org).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() + new_gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() new_gpg_key.name = name with pytest.raises(HTTPError) as error: new_gpg_key.update(['name']) @@ -199,7 +198,7 @@ def test_negative_update_same_name(module_org): @pytest.mark.tier1 -def test_negative_update_content(module_org): +def test_negative_update_content(module_org, module_target_sat): """Attempt to update GPG key content to invalid one :id: fee30ef8-370a-4fdd-9e45-e7ab95dade8b @@ -208,7 +207,7 @@ def test_negative_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() gpg_key.content = '' with pytest.raises(HTTPError) as error: gpg_key.update(['content']) @@ -218,7 +217,7 @@ def test_negative_update_content(module_org): @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Create a GPG key with different names and then delete it. :id: b06d211f-2827-40f7-b627-8b1fbaee2eb4 @@ -227,7 +226,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.delete() with pytest.raises(HTTPError): gpg_key.read() diff --git a/tests/foreman/api/test_contentview.py b/tests/foreman/api/test_contentview.py index 4bf80287c3f..f2015bd11cf 100644 --- a/tests/foreman/api/test_contentview.py +++ b/tests/foreman/api/test_contentview.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_integer, gen_string, gen_utf8 -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -48,8 +47,8 @@ @pytest.fixture(scope='class') -def class_cv(module_org): - return entities.ContentView(organization=module_org).create() +def class_cv(module_org, class_target_sat): + return class_target_sat.api.ContentView(organization=module_org).create() @pytest.fixture(scope='class') @@ -65,22 +64,22 @@ def class_promoted_cv(class_published_cv, module_lce): @pytest.fixture(scope='class') -def class_cloned_cv(class_cv): - copied_cv_id = entities.ContentView(id=class_cv.id).copy( +def class_cloned_cv(class_cv, class_target_sat): + copied_cv_id = class_target_sat.api.ContentView(id=class_cv.id).copy( data={'name': gen_string('alpha', gen_integer(3, 30))} )['id'] - return entities.ContentView(id=copied_cv_id).read() + return class_target_sat.api.ContentView(id=copied_cv_id).read() @pytest.fixture(scope='class') -def class_published_cloned_cv(class_cloned_cv): +def class_published_cloned_cv(class_cloned_cv, class_target_sat): class_cloned_cv.publish() - return entities.ContentView(id=class_cloned_cv.id).read() + return class_target_sat.api.ContentView(id=class_cloned_cv.id).read() @pytest.fixture -def content_view(module_org): - return entities.ContentView(organization=module_org).create() +def content_view(module_org, module_target_sat): + return module_target_sat.api.ContentView(organization=module_org).create() def apply_package_filter(content_view, repo, package, target_sat, inclusion=True): @@ -93,7 +92,7 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True :return list : list of content view versions """ - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[repo] ).create() cv_filter_rule = target_sat.api.ContentViewFilterRule( @@ -109,7 +108,9 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True class TestContentView: @pytest.mark.upgrade @pytest.mark.tier3 - def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, module_org): + def test_positive_subscribe_host( + self, class_cv, class_promoted_cv, module_lce, module_org, module_target_sat + ): """Subscribe a host to a content view :id: b5a08369-bf92-48ab-b9aa-10f5b9774b79 @@ -130,7 +131,7 @@ def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, # Check that no host associated to just created content view assert class_cv.content_host_count == 0 assert len(class_promoted_cv.version) == 1 - host = entities.Host( + host = module_target_sat.api.Host( content_facet_attributes={ 'content_view_id': class_cv.id, 'lifecycle_environment_id': module_lce.id, @@ -160,7 +161,9 @@ def test_positive_clone_within_same_env(self, class_published_cloned_cv, module_ @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_cv): + def test_positive_clone_with_diff_env( + self, module_org, class_published_cloned_cv, module_target_sat + ): """attempt to create, publish and promote new content view based on existing view but promoted to a different environment @@ -174,11 +177,11 @@ def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_c :CaseImportance: Medium """ - le_clone = entities.LifecycleEnvironment(organization=module_org).create() + le_clone = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() class_published_cloned_cv.read().version[0].promote(data={'environment_ids': le_clone.id}) @pytest.mark.tier2 - def test_positive_add_custom_content(self, module_product, module_org): + def test_positive_add_custom_content(self, module_product, module_org, module_target_sat): """Associate custom content in a view :id: db452e0c-0c17-40f2-bab4-8467e7a875f1 @@ -189,9 +192,9 @@ def test_positive_add_custom_content(self, module_product, module_org): :CaseImportance: Critical """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() - content_view = entities.ContentView(organization=module_org.id).create() + content_view = module_target_sat.api.ContentView(organization=module_org.id).create() assert len(content_view.repository) == 0 content_view.repository = [yum_repo] content_view = content_view.update(['repository']) @@ -202,7 +205,9 @@ def test_positive_add_custom_content(self, module_product, module_org): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_custom_module_streams(self, content_view, module_product, module_org): + def test_positive_add_custom_module_streams( + self, content_view, module_product, module_org, module_target_sat + ): """Associate custom content (module streams) in a view :id: 9e4821cb-293a-4d84-bd1f-bb9fff36b143 @@ -213,7 +218,7 @@ def test_positive_add_custom_module_streams(self, content_view, module_product, :CaseImportance: High """ - yum_repo = entities.Repository( + yum_repo = module_target_sat.api.Repository( product=module_product, url=settings.repos.module_stream_1.url ).create() yum_repo.sync() @@ -226,7 +231,9 @@ def test_positive_add_custom_module_streams(self, content_view, module_product, assert repo.content_counts['module_stream'] == 7 @pytest.mark.tier2 - def test_negative_add_dupe_repos(self, content_view, module_product, module_org): + def test_negative_add_dupe_repos( + self, content_view, module_product, module_org, module_target_sat + ): """Attempt to associate the same repo multiple times within a content view @@ -238,7 +245,7 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) :CaseImportance: Low """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() assert len(content_view.repository) == 0 content_view.repository = [yum_repo, yum_repo] @@ -251,7 +258,7 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_sha512_rpm(self, content_view, module_org): + def test_positive_add_sha512_rpm(self, content_view, module_org, module_target_sat): """Associate sha512 RPM content in a view :id: 1f473b02-5e2b-41ff-a706-c0635abc2476 @@ -270,8 +277,10 @@ def test_positive_add_sha512_rpm(self, content_view, module_org): :BZ: 1639406 """ - product = entities.Product(organization=module_org).create() - yum_sha512_repo = entities.Repository(product=product, url=CUSTOM_RPM_SHA_512).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_sha512_repo = module_target_sat.api.Repository( + product=product, url=CUSTOM_RPM_SHA_512 + ).create() yum_sha512_repo.sync() repo_content = yum_sha512_repo.read() # Assert that the repository content was properly synced @@ -295,7 +304,7 @@ class TestContentViewCreate: @pytest.mark.parametrize('composite', [True, False]) @pytest.mark.tier1 - def test_positive_create_composite(self, composite): + def test_positive_create_composite(self, composite, target_sat): """Create composite and non-composite content views. :id: 4a3b616d-53ab-4396-9a50-916d6c42a401 @@ -307,11 +316,11 @@ def test_positive_create_composite(self, composite): :CaseImportance: Critical """ - assert entities.ContentView(composite=composite).create().composite == composite + assert target_sat.api.ContentView(composite=composite).create().composite == composite @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, name, target_sat): """Create empty content-view with random names. :id: 80d36498-2e71-4aa9-b696-f0a45e86267f @@ -322,11 +331,11 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - assert entities.ContentView(name=name).create().name == name + assert target_sat.api.ContentView(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_description(self, desc): + def test_positive_create_with_description(self, desc, target_sat): """Create empty content view with random description. :id: 068e3e7c-34ac-47cb-a1bb-904d12c74cc7 @@ -337,10 +346,10 @@ def test_positive_create_with_description(self, desc): :CaseImportance: High """ - assert entities.ContentView(description=desc).create().description == desc + assert target_sat.api.ContentView(description=desc).create().description == desc @pytest.mark.tier1 - def test_positive_clone(self, content_view, module_org): + def test_positive_clone(self, content_view, module_org, module_target_sat): """Create a content view by copying an existing one :id: ee03dc63-e2b0-4a89-a828-2910405279ff @@ -349,7 +358,7 @@ def test_positive_clone(self, content_view, module_org): :CaseImportance: Critical """ - cloned_cv = entities.ContentView( + cloned_cv = module_target_sat.api.ContentView( id=content_view.copy(data={'name': gen_string('alpha', gen_integer(3, 30))})['id'] ).read_json() cv_origin = content_view.read_json() @@ -361,7 +370,7 @@ def test_positive_clone(self, content_view, module_org): @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Create content view providing an invalid name. :id: 261376ca-7d12-41b6-9c36-5f284865243e @@ -373,7 +382,7 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.ContentView(name=name).create() + target_sat.api.ContentView(name=name).create() class TestContentViewPublishPromote: @@ -383,16 +392,18 @@ class TestContentViewPublishPromote: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.fixture(scope='class', autouse=True) - def class_setup(self, request, module_product): + def class_setup(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.yum_repo = entities.Repository(product=module_product).create() + request.cls.yum_repo = class_target_sat.api.Repository(product=module_product).create() self.yum_repo.sync() - request.cls.swid_repo = entities.Repository( + request.cls.swid_repo = class_target_sat.api.Repository( product=module_product, url=settings.repos.swid_tag.url ).create() self.swid_repo.sync() - def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): + def add_content_views_to_composite( + self, module_target_sat, composite_cv, module_org, cv_amount=1 + ): """Add necessary number of content views to the composite one :param composite_cv: Composite content view object @@ -400,7 +411,7 @@ def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): """ cv_versions = [] for _ in range(cv_amount): - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() cv_versions.append(content_view.read().version[0]) composite_cv.component = cv_versions @@ -435,7 +446,7 @@ def test_positive_publish_with_content_multiple(self, content_view, module_org): assert cvv.read_json()['package_count'] > 0 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_once(self, module_org): + def test_positive_publish_composite_multiple_content_once(self, module_org, module_target_sat): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view once. @@ -449,16 +460,20 @@ def test_positive_publish_composite_multiple_content_once(self, module_org): :CaseImportance: Critical """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() - self.add_content_views_to_composite(composite_cv, module_org, random.randint(2, 3)) + self.add_content_views_to_composite( + module_target_sat, composite_cv, module_org, random.randint(2, 3) + ) composite_cv.publish() assert len(composite_cv.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_multiple(self, module_org): + def test_positive_publish_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view several times. @@ -472,7 +487,7 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -483,7 +498,7 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): assert len(composite_cv.read().version) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_yum_multiple(self, content_view, module_org): + def test_positive_promote_with_yum_multiple(self, content_view, module_org, module_target_sat): """Give a content view a yum repo, publish it once and promote the content view version ``REPEAT + 1`` times. @@ -504,7 +519,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): # Promote the content view version. for _ in range(REPEAT): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id}) # Everything's done - check some content view attributes... @@ -518,7 +533,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): assert cvv_attrs['package_count'] > 0 @pytest.mark.tier2 - def test_positive_add_to_composite(self, content_view, module_org): + def test_positive_add_to_composite(self, content_view, module_org, module_target_sat): """Create normal content view, publish and add it to a new composite content view @@ -536,7 +551,7 @@ def test_positive_add_to_composite(self, content_view, module_org): content_view.publish() content_view = content_view.read() - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -549,7 +564,9 @@ def test_positive_add_to_composite(self, content_view, module_org): assert composite_cv.component[0].read().content_view.id == content_view.id @pytest.mark.tier2 - def test_negative_add_components_to_composite(self, content_view, module_org): + def test_negative_add_components_to_composite( + self, content_view, module_org, module_target_sat + ): """Attempt to associate components in a non-composite content view @@ -565,7 +582,7 @@ def test_negative_add_components_to_composite(self, content_view, module_org): content_view.update(['repository']) content_view.publish() content_view = content_view.read() - non_composite_cv = entities.ContentView( + non_composite_cv = module_target_sat.api.ContentView( composite=False, organization=module_org, ).create() @@ -576,7 +593,9 @@ def test_negative_add_components_to_composite(self, content_view, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_once(self, module_lce, module_org): + def test_positive_promote_composite_multiple_content_once( + self, module_lce, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view once. @@ -590,7 +609,7 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -603,7 +622,9 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_multiple(self, module_org): + def test_positive_promote_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view ``Library + random`` times. @@ -617,7 +638,7 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -627,7 +648,7 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): envs_amount = random.randint(2, 3) for _ in range(envs_amount): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() composite_cv.version[0].promote(data={'environment_ids': lce.id}) composite_cv = composite_cv.read() assert len(composite_cv.version) == 1 @@ -671,7 +692,7 @@ def test_positive_promote_out_of_sequence(self, content_view, module_org): @pytest.mark.tier3 @pytest.mark.pit_server - def test_positive_publish_multiple_repos(self, content_view, module_org): + def test_positive_publish_multiple_repos(self, content_view, module_org, module_target_sat): """Attempt to publish a content view with multiple YUM repos. :id: 5557a33b-7a6f-45f5-9fe4-23a704ed9e21 @@ -690,9 +711,9 @@ def test_positive_publish_multiple_repos(self, content_view, module_org): :BZ: 1651930 """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(10): - repo = entities.Repository(product=product).create() + repo = module_target_sat.api.Repository(product=product).create() repo.sync() content_view.repository.append(repo) content_view = content_view.update(['repository']) @@ -721,13 +742,13 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): :CaseImportance: Medium """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository( content_type='yum', product=product, url=settings.repos.module_stream_1.url ).create() repo.sync() - content_view_1 = entities.ContentView(organization=module_org).create() - content_view_2 = entities.ContentView(organization=module_org).create() + content_view_1 = target_sat.api.ContentView(organization=module_org).create() + content_view_2 = target_sat.api.ContentView(organization=module_org).create() # create content views with same repo and different filter for content_view, package in [(content_view_1, 'camel'), (content_view_2, 'cow')]: @@ -737,7 +758,7 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): assert content_view_info.package_count == 35 # create composite content view with these two published content views - comp_content_view = entities.ContentView( + comp_content_view = target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -877,7 +898,7 @@ def test_positive_update_attributes(self, module_cv, key, value): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_update_name(self, module_cv, new_name): + def test_positive_update_name(self, module_cv, new_name, module_target_sat): """Create content view providing the initial name, then update its name to another valid name. @@ -891,12 +912,12 @@ def test_positive_update_name(self, module_cv, new_name): """ module_cv.name = new_name module_cv.update(['name']) - updated = entities.ContentView(id=module_cv.id).read() + updated = module_target_sat.api.ContentView(id=module_cv.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_update_name(self, module_cv, new_name): + def test_negative_update_name(self, module_cv, new_name, module_target_sat): """Create content view then update its name to an invalid name. @@ -920,7 +941,7 @@ class TestContentViewDelete: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_delete(self, content_view, name): + def test_positive_delete(self, content_view, name, target_sat): """Create content view and then delete it. :id: d582f1b3-8118-4e78-a639-237c6f9d27c6 @@ -933,7 +954,7 @@ def test_positive_delete(self, content_view, name): """ content_view.delete() with pytest.raises(HTTPError): - entities.ContentView(id=content_view.id).read() + target_sat.api.ContentView(id=content_view.id).read() @pytest.mark.run_in_one_thread @@ -942,11 +963,11 @@ class TestContentViewRedHatContent: @pytest.fixture(scope='class', autouse=True) def initiate_testclass( - self, request, module_cv, module_entitlement_manifest_org, module_target_sat + self, request, module_cv, module_entitlement_manifest_org, class_target_sat ): """Set up organization, product and repositories for tests.""" - repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + repo_id = class_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_entitlement_manifest_org.id, product=PRDS['rhel'], @@ -954,7 +975,7 @@ def initiate_testclass( reposet=REPOSET['rhst7'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() module_cv.repository = [self.repo] module_cv.update(['repository']) @@ -976,7 +997,7 @@ def test_positive_add_rh(self): assert self.yumcv.repository[0].read().name == REPOS['rhst7']['name'] @pytest.mark.tier2 - def test_positive_add_rh_custom_spin(self): + def test_positive_add_rh_custom_spin(self, target_sat): """Associate Red Hat content in a view and filter it using rule :id: 30c3103d-9503-4501-8117-1f2d25353215 @@ -989,7 +1010,7 @@ def test_positive_add_rh_custom_spin(self): :CaseImportance: High """ # content_view ← cv_filter - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=self.yumcv, inclusion='true', name=gen_string('alphanumeric'), @@ -997,13 +1018,13 @@ def test_positive_add_rh_custom_spin(self): assert self.yumcv.id == cv_filter.content_view.id # content_view ← cv_filter ← cv_filter_rule - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, name=gen_string('alphanumeric'), version='1.0' ).create() assert cv_filter.id == cv_filter_rule.content_view_filter.id @pytest.mark.tier2 - def test_positive_update_rh_custom_spin(self): + def test_positive_update_rh_custom_spin(self, target_sat): """Edit content views for a custom rh spin. For example, modify a filter @@ -1016,12 +1037,12 @@ def test_positive_update_rh_custom_spin(self): :CaseImportance: High """ - cvf = entities.ErratumContentViewFilter( + cvf = target_sat.api.ErratumContentViewFilter( content_view=self.yumcv, ).create() assert self.yumcv.id == cvf.content_view.id - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cvf, types=[FILTER_ERRATA_TYPE['enhancement']] ).create() assert cv_filter_rule.types == [FILTER_ERRATA_TYPE['enhancement']] @@ -1048,7 +1069,7 @@ def test_positive_publish_rh(self, module_org, content_view): assert len(content_view.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_rh_custom_spin(self, module_org, content_view): + def test_positive_publish_rh_custom_spin(self, module_org, content_view, module_target_sat): """Attempt to publish a content view containing Red Hat spin - i.e., contains filters. @@ -1062,7 +1083,7 @@ def test_positive_publish_rh_custom_spin(self, module_org, content_view): """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() @@ -1093,7 +1114,7 @@ def test_positive_promote_rh(self, module_org, content_view, module_lce): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_rh_custom_spin(self, content_view, module_lce): + def test_positive_promote_rh_custom_spin(self, content_view, module_lce, module_target_sat): """Attempt to promote a content view containing Red Hat spin - i.e., contains filters. @@ -1107,7 +1128,7 @@ def test_positive_promote_rh_custom_spin(self, content_view, module_lce): """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() @@ -1357,7 +1378,7 @@ def test_negative_readonly_user_actions( # create a role with content views read only permissions target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( filters={'name': 'view_content_views'}, query={'search': 'resource_type="Katello::ContentView"'}, ), @@ -1366,7 +1387,7 @@ def test_negative_readonly_user_actions( # create environment permissions and assign it to our role target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( query={'search': 'resource_type="Katello::KTEnvironment"'} ), role=function_role, @@ -1471,9 +1492,9 @@ class TestOstreeContentView: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.fixture(scope='class', autouse=True) - def initiate_testclass(self, request, module_product): + def initiate_testclass(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.ostree_repo = entities.Repository( + request.cls.ostree_repo = class_target_sat.api.Repository( product=module_product, content_type='ostree', url=FEDORA_OSTREE_REPO, @@ -1481,13 +1502,13 @@ def initiate_testclass(self, request, module_product): ).create() self.ostree_repo.sync() # Create new yum repository - request.cls.yum_repo = entities.Repository( + request.cls.yum_repo = class_target_sat.api.Repository( url=settings.repos.yum_1.url, product=module_product, ).create() self.yum_repo.sync() # Create new docker repository - request.cls.docker_repo = entities.Repository( + request.cls.docker_repo = class_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product, @@ -1595,7 +1616,7 @@ def initiate_testclass(self, request, module_entitlement_manifest_org, class_tar reposet=REPOSET['rhaht'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() @pytest.mark.tier2 @@ -1681,7 +1702,7 @@ def test_positive_publish_promote_with_RH_ostree_and_other( releasever=None, ) # Sync repository - rpm_repo = entities.Repository(id=repo_id) + rpm_repo = module_target_sat.api.Repository(id=repo_id) rpm_repo.sync() content_view.repository = [self.repo, rpm_repo] content_view = content_view.update(['repository']) diff --git a/tests/foreman/api/test_contentviewfilter.py b/tests/foreman/api/test_contentviewfilter.py index 5c1e175ed4c..dd07d57dd27 100644 --- a/tests/foreman/api/test_contentviewfilter.py +++ b/tests/foreman/api/test_contentviewfilter.py @@ -24,7 +24,7 @@ from random import randint from fauxfactory import gen_integer, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -38,15 +38,15 @@ @pytest.fixture(scope='module') -def sync_repo(module_product): - repo = entities.Repository(product=module_product).create() +def sync_repo(module_product, module_target_sat): + repo = module_target_sat.api.Repository(product=module_product).create() repo.sync() return repo @pytest.fixture(scope='module') -def sync_repo_module_stream(module_product): - repo = entities.Repository( +def sync_repo_module_stream(module_product, module_target_sat): + repo = module_target_sat.api.Repository( content_type='yum', product=module_product, url=settings.repos.module_stream_1.url ).create() repo.sync() @@ -54,13 +54,15 @@ def sync_repo_module_stream(module_product): @pytest.fixture -def content_view(module_org, sync_repo): - return entities.ContentView(organization=module_org, repository=[sync_repo]).create() +def content_view(module_org, sync_repo, module_target_sat): + return module_target_sat.api.ContentView( + organization=module_org, repository=[sync_repo] + ).create() @pytest.fixture -def content_view_module_stream(module_org, sync_repo_module_stream): - return entities.ContentView( +def content_view_module_stream(module_org, sync_repo_module_stream, module_target_sat): + return module_target_sat.api.ContentView( organization=module_org, repository=[sync_repo_module_stream] ).create() @@ -69,7 +71,7 @@ class TestContentViewFilter: """Tests for content view filters.""" @pytest.mark.tier2 - def test_negative_get_with_no_args(self): + def test_negative_get_with_no_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: da29fd90-cd96-49f9-b94e-71d4e3a35a57 @@ -82,14 +84,14 @@ def test_negative_get_with_no_args(self): :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, ) assert response.status_code == http.client.OK @pytest.mark.tier2 - def test_negative_get_with_bad_args(self): + def test_negative_get_with_bad_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: e6fea726-930b-4b74-b784-41528811994f @@ -102,7 +104,7 @@ def test_negative_get_with_bad_args(self): :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, data={'foo': 'bar'}, @@ -111,7 +113,7 @@ def test_negative_get_with_bad_args(self): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_erratum_with_name(self, name, content_view): + def test_positive_create_erratum_with_name(self, name, content_view, target_sat): """Create new erratum content filter using different inputs as a name :id: f78a133f-441f-4fcc-b292-b9eed228d755 @@ -123,13 +125,13 @@ def test_positive_create_erratum_with_name(self, name, content_view): :CaseLevel: Integration """ - cvf = entities.ErratumContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.ErratumContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'erratum' @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_pkg_group_with_name(self, name, content_view): + def test_positive_create_pkg_group_with_name(self, name, content_view, target_sat): """Create new package group content filter using different inputs as a name :id: f9bfb6bf-a879-4f1a-970d-8f4df533cd59 @@ -143,7 +145,7 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): :CaseImportance: Medium """ - cvf = entities.PackageGroupContentViewFilter( + cvf = target_sat.api.PackageGroupContentViewFilter( content_view=content_view, name=name, ).create() @@ -152,7 +154,7 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_rpm_with_name(self, name, content_view): + def test_positive_create_rpm_with_name(self, name, content_view, target_sat): """Create new RPM content filter using different inputs as a name :id: f1c88e72-7993-47ac-8fbc-c749d32bc768 @@ -166,13 +168,13 @@ def test_positive_create_rpm_with_name(self, name, content_view): :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'rpm' @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_create_with_inclusion(self, inclusion, content_view): + def test_positive_create_with_inclusion(self, inclusion, content_view, target_sat): """Create new content view filter with different inclusion values :id: 81130dc9-ae33-48bc-96a7-d54d3e99448e @@ -184,12 +186,14 @@ def test_positive_create_with_inclusion(self, inclusion, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view, inclusion=inclusion).create() + cvf = target_sat.api.RPMContentViewFilter( + content_view=content_view, inclusion=inclusion + ).create() assert cvf.inclusion == inclusion @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description, content_view): + def test_positive_create_with_description(self, description, content_view, target_sat): """Create new content filter using different inputs as a description :id: e057083f-e69d-46e7-b336-45faaf67fa52 @@ -203,14 +207,14 @@ def test_positive_create_with_description(self, description, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, description=description, ).create() assert cvf.description == description @pytest.mark.tier2 - def test_positive_create_with_repo(self, content_view, sync_repo): + def test_positive_create_with_repo(self, content_view, sync_repo, target_sat): """Create new content filter with repository assigned :id: 7207d4cf-3ccf-4d63-a50a-1373b16062e2 @@ -220,7 +224,7 @@ def test_positive_create_with_repo(self, content_view, sync_repo): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -230,7 +234,7 @@ def test_positive_create_with_repo(self, content_view, sync_repo): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) def test_positive_create_with_original_packages( - self, original_packages, content_view, sync_repo + self, original_packages, content_view, sync_repo, target_sat ): """Create new content view filter with different 'original packages' option values @@ -246,7 +250,7 @@ def test_positive_create_with_original_packages( :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -255,7 +259,9 @@ def test_positive_create_with_original_packages( assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_create_with_docker_repos(self, module_product, sync_repo, content_view): + def test_positive_create_with_docker_repos( + self, module_product, sync_repo, content_view, module_target_sat + ): """Create new docker repository and add to content view that has yum repo already assigned to it. Create new content view filter and assign it to the content view. @@ -267,7 +273,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont :CaseLevel: Integration """ - docker_repository = entities.Repository( + docker_repository = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -276,7 +282,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont content_view.repository = [sync_repo, docker_repository] content_view.update(['repository']) - cvf = entities.RPMContentViewFilter( + cvf = module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo, docker_repository], @@ -290,7 +296,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) def test_positive_create_with_module_streams( - self, module_product, sync_repo, sync_repo_module_stream, content_view + self, module_product, sync_repo, sync_repo_module_stream, content_view, target_sat ): """Verify Include and Exclude Filters creation for modulemd (module streams) @@ -304,7 +310,7 @@ def test_positive_create_with_module_streams( content_view.repository += [sync_repo_module_stream] content_view.update(['repository']) for inclusion in (True, False): - cvf = entities.ModuleStreamContentViewFilter( + cvf = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[sync_repo, sync_repo_module_stream], @@ -316,7 +322,7 @@ def test_positive_create_with_module_streams( @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_name(self, name, content_view): + def test_negative_create_with_invalid_name(self, name, content_view, target_sat): """Try to create content view filter using invalid names only :id: 8cf4227b-75c4-4d6f-b94f-88e4eb586435 @@ -330,10 +336,10 @@ def test_negative_create_with_invalid_name(self, name, content_view): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=content_view, name=name).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() @pytest.mark.tier2 - def test_negative_create_with_same_name(self, content_view): + def test_negative_create_with_same_name(self, content_view, target_sat): """Try to create content view filter using same name twice :id: 73a64ca7-07a3-49ee-8921-0474a16a23ff @@ -345,12 +351,12 @@ def test_negative_create_with_same_name(self, content_view): :CaseImportance: Low """ kwargs = {'content_view': content_view, 'name': gen_string('alpha')} - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() with pytest.raises(HTTPError): - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() @pytest.mark.tier2 - def test_negative_create_without_cv(self): + def test_negative_create_without_cv(self, target_sat): """Try to create content view filter without providing content view @@ -363,10 +369,10 @@ def test_negative_create_without_cv(self): :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=None).create() + target_sat.api.RPMContentViewFilter(content_view=None).create() @pytest.mark.tier2 - def test_negative_create_with_invalid_repo_id(self, content_view): + def test_negative_create_with_invalid_repo_id(self, content_view, target_sat): """Try to create content view filter using incorrect repository id @@ -379,13 +385,13 @@ def test_negative_create_with_invalid_repo_id(self, content_view): :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[gen_integer(10000, 99999)], ).create() @pytest.mark.tier2 - def test_positive_delete_by_id(self, content_view): + def test_positive_delete_by_id(self, content_view, target_sat): """Delete content view filter :id: 07caeb9d-419d-43f8-996b-456b0cc0f70d @@ -396,14 +402,14 @@ def test_positive_delete_by_id(self, content_view): :CaseImportance: Critical """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.delete() with pytest.raises(HTTPError): cvf.read() @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_update_name(self, name, content_view): + def test_positive_update_name(self, name, content_view, target_sat): """Update content view filter with new name :id: f310c161-00d2-4281-9721-6e45cbc5e4ec @@ -415,13 +421,13 @@ def test_positive_update_name(self, name, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name assert cvf.update(['name']).name == name @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_update_description(self, description, content_view): + def test_positive_update_description(self, description, content_view, target_sat): """Update content view filter with new description :id: f2c5db28-0163-4cf3-929a-16ba1cb98c34 @@ -435,14 +441,14 @@ def test_positive_update_description(self, description, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.description = description cvf = cvf.update(['description']) assert cvf.description == description @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_update_inclusion(self, inclusion, content_view): + def test_positive_update_inclusion(self, inclusion, content_view, target_sat): """Update content view filter with new inclusion value :id: 0aedd2d6-d020-4a90-adcd-01694b47c0b0 @@ -454,13 +460,13 @@ def test_positive_update_inclusion(self, inclusion, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.inclusion = inclusion cvf = cvf.update(['inclusion']) assert cvf.inclusion == inclusion @pytest.mark.tier2 - def test_positive_update_repo(self, module_product, sync_repo, content_view): + def test_positive_update_repo(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with new repository :id: 329ef155-c2d0-4aa2-bac3-79087ae49bdf @@ -470,12 +476,12 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() content_view.repository = [new_repo] content_view.update(['repository']) @@ -485,7 +491,7 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): assert cvf.repository[0].id == new_repo.id @pytest.mark.tier2 - def test_positive_update_repos(self, module_product, sync_repo, content_view): + def test_positive_update_repos(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with multiple repositories :id: 478fbb1c-fa1d-4fcd-93d6-3a7f47092ed3 @@ -497,12 +503,14 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - repos = [entities.Repository(product=module_product).create() for _ in range(randint(3, 5))] + repos = [ + target_sat.api.Repository(product=module_product).create() for _ in range(randint(3, 5)) + ] for repo in repos: repo.sync() content_view.repository = repos @@ -513,7 +521,9 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) - def test_positive_update_original_packages(self, original_packages, sync_repo, content_view): + def test_positive_update_original_packages( + self, original_packages, sync_repo, content_view, target_sat + ): """Update content view filter with new 'original packages' option value :id: 0c41e57a-afa3-479e-83ba-01f09f0fd2b6 @@ -525,7 +535,7 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -535,7 +545,9 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_update_repo_with_docker(self, module_product, sync_repo, content_view): + def test_positive_update_repo_with_docker( + self, module_product, sync_repo, content_view, target_sat + ): """Update existing content view filter which has yum repository assigned with new docker repository @@ -546,12 +558,12 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - docker_repository = entities.Repository( + docker_repository = target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -567,7 +579,7 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_update_name(self, name, content_view): + def test_negative_update_name(self, name, content_view, target_sat): """Try to update content view filter using invalid names only :id: 9799648a-3900-4186-8271-6b2dedb547ab @@ -580,13 +592,13 @@ def test_negative_update_name(self, name, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_same_name(self, content_view): + def test_negative_update_same_name(self, content_view, target_sat): """Try to update content view filter's name to already used one :id: b68569f1-9f7b-4a95-9e2a-a5da348abff7 @@ -598,14 +610,14 @@ def test_negative_update_same_name(self, content_view): :CaseImportance: Low """ name = gen_string('alpha', 8) - entities.RPMContentViewFilter(content_view=content_view, name=name).create() - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_cv_by_id(self, content_view): + def test_negative_update_cv_by_id(self, content_view, target_sat): """Try to update content view filter using incorrect content view ID @@ -615,13 +627,13 @@ def test_negative_update_cv_by_id(self, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.content_view.id = gen_integer(10000, 99999) with pytest.raises(HTTPError): cvf.update(['content_view']) @pytest.mark.tier2 - def test_negative_update_repo_by_id(self, sync_repo, content_view): + def test_negative_update_repo_by_id(self, sync_repo, content_view, target_sat): """Try to update content view filter using incorrect repository ID @@ -631,7 +643,7 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[sync_repo], ).create() @@ -640,7 +652,7 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): cvf.update(['repository']) @pytest.mark.tier2 - def test_negative_update_repo(self, module_product, sync_repo, content_view): + def test_negative_update_repo(self, module_product, sync_repo, content_view, target_sat): """Try to update content view filter with new repository which doesn't belong to filter's content view @@ -652,12 +664,12 @@ def test_negative_update_repo(self, module_product, sync_repo, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() cvf.repository = [new_repo] with pytest.raises(HTTPError): @@ -714,7 +726,7 @@ class TestContentViewFilterSearch: """Tests that search through content view filters.""" @pytest.mark.tier1 - def test_positive_search_erratum(self, content_view): + def test_positive_search_erratum(self, content_view, target_sat): """Search for an erratum content view filter's rules. :id: 6a86060f-6b4f-4688-8ea9-c198e0aeb3f6 @@ -725,11 +737,11 @@ def test_positive_search_erratum(self, content_view): :BZ: 1242534 """ - cv_filter = entities.ErratumContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.ErratumContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_package_group(self, content_view): + def test_positive_search_package_group(self, content_view, target_sat): """Search for an package group content view filter's rules. :id: 832c50cc-c2c8-48c9-9a23-80956baf5f3c @@ -738,11 +750,11 @@ def test_positive_search_package_group(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.PackageGroupContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.PackageGroupContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_rpm(self, content_view): + def test_positive_search_rpm(self, content_view, target_sat): """Search for an rpm content view filter's rules. :id: 1c9058f1-35c4-46f2-9b21-155ef988564a @@ -751,8 +763,8 @@ def test_positive_search_rpm(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.RPMContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() class TestContentViewFilterRule: @@ -762,7 +774,9 @@ class TestContentViewFilterRule: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_promote_module_stream_filter(self, module_org, content_view_module_stream): + def test_positive_promote_module_stream_filter( + self, module_org, content_view_module_stream, target_sat + ): """Verify Module Stream, Errata Count after Promote, Publish for Content View with Module Stream Exclude Filter @@ -775,14 +789,14 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo """ # Exclude module stream filter content_view = content_view_module_stream - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('duck')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -796,7 +810,7 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo assert content_view_version_info.errata_counts['total'] == 3 # Promote Content View - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) content_view = content_view.read() content_view_version_info = content_view.version[0].read() @@ -809,7 +823,9 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_include_exclude_module_stream_filter(self, content_view_module_stream): + def test_positive_include_exclude_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Include and Exclude Errata filter(modular errata) automatically force the copy of the module streams associated to it. @@ -828,14 +844,14 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module :CaseLevel: Integration """ content_view = content_view_module_stream - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view = content_view.read() @@ -847,14 +863,14 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module # delete the previous content_view_filter cv_filter.delete() - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=False, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view_version_info = content_view.read().version[1].read() @@ -867,7 +883,7 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_multi_level_filters(self, content_view_module_stream): + def test_positive_multi_level_filters(self, content_view_module_stream, target_sat): """Verify promotion of Content View and Verify count after applying multi_filters (errata and module stream) @@ -879,24 +895,24 @@ def test_positive_multi_level_filters(self, content_view_module_stream): """ content_view = content_view_module_stream # apply include errata filter - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() # apply exclude module filter - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('walrus')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -910,7 +926,9 @@ def test_positive_multi_level_filters(self, content_view_module_stream): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_dependency_solving_module_stream_filter(self, content_view_module_stream): + def test_positive_dependency_solving_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Module Stream Content View Filter's with Dependency Solve 'Yes'. If dependency solving enabled then module streams with deps will not get fetched over even if the exclude filter has been applied. @@ -928,14 +946,14 @@ def test_positive_dependency_solving_module_stream_filter(self, content_view_mod content_view = content_view_module_stream content_view.solve_dependencies = True content_view = content_view.update(['solve_dependencies']) - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}" and version="{}'.format('duck', '20180704244205')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() diff --git a/tests/foreman/api/test_contentviewversion.py b/tests/foreman/api/test_contentviewversion.py index ca0e5e7cf36..5953edeed42 100644 --- a/tests/foreman/api/test_contentviewversion.py +++ b/tests/foreman/api/test_contentviewversion.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -31,11 +30,13 @@ @pytest.fixture(scope='module') -def module_lce_cv(module_org): +def module_lce_cv(module_org, module_target_sat): """Create some entities for all tests.""" - lce1 = entities.LifecycleEnvironment(organization=module_org).create() - lce2 = entities.LifecycleEnvironment(organization=module_org, prior=lce1).create() - default_cv = entities.ContentView(organization=module_org, name=DEFAULT_CV).search() + lce1 = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce2 = module_target_sat.api.LifecycleEnvironment(organization=module_org, prior=lce1).create() + default_cv = module_target_sat.api.ContentView( + organization=module_org, name=DEFAULT_CV + ).search() default_cvv = default_cv[0].version[0] return lce1, lce2, default_cvv @@ -68,7 +69,7 @@ def test_positive_create(module_cv): @pytest.mark.tier2 -def test_negative_create(module_org): +def test_negative_create(module_org, module_target_sat): """Attempt to create content view version using the 'Default Content View'. :id: 0afd49c6-f3a4-403e-9929-849f51ffa922 @@ -80,7 +81,7 @@ def test_negative_create(module_org): :CaseImportance: Critical """ # The default content view cannot be published - cv = entities.ContentView(organization=module_org.id, name=DEFAULT_CV).search() + cv = module_target_sat.api.ContentView(organization=module_org.id, name=DEFAULT_CV).search() # There should be only 1 record returned assert len(cv) == 1 with pytest.raises(HTTPError): @@ -91,8 +92,8 @@ def test_negative_create(module_org): @pytest.mark.tier2 -def test_positive_promote_valid_environment(module_lce_cv, module_org): - """Promote a content view version to 'next in sequence lifecycle environment. +def test_positive_promote_valid_environment(module_lce_cv, module_org, module_target_sat): + """Promote a content view version to next in sequence lifecycle environment. :id: f205ca06-8ab5-4546-83bd-deac4363d487 @@ -103,7 +104,7 @@ def test_positive_promote_valid_environment(module_lce_cv, module_org): :CaseImportance: Critical """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and promote it. cv.publish() # Refresh the entity @@ -123,7 +124,7 @@ def test_positive_promote_valid_environment(module_lce_cv, module_org): @pytest.mark.tier2 -def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv): +def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv, module_target_sat): """Promote a content view version to a lifecycle environment that is 'out of sequence'. @@ -134,7 +135,7 @@ def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv) :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -168,7 +169,7 @@ def test_negative_promote_valid_environment(module_lce_cv): @pytest.mark.tier2 -def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org): +def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org, module_target_sat): """Attempt to promote a content view version to a Lifecycle environment that is 'out of sequence'. @@ -179,7 +180,7 @@ def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org) :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -197,7 +198,7 @@ def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org) @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete(module_org, module_product): +def test_positive_delete(module_org, module_product, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -213,15 +214,15 @@ def test_positive_delete(module_org, module_product): :CaseImportance: Critical """ key_content = DataFile.ZOO_CUSTOM_GPG_KEY.read_text() - gpgkey = entities.GPGKey(content=key_content, organization=module_org).create() + gpgkey = module_target_sat.api.GPGKey(content=key_content, organization=module_org).create() # Creates new repository with GPGKey - repo = entities.Repository( + repo = module_target_sat.api.Repository( gpg_key=gpgkey, product=module_product, url=settings.repos.yum_1.url ).create() # sync repository repo.sync() # Create content view - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Associate repository to new content view content_view.repository = [repo] content_view = content_view.update(['repository']) @@ -242,7 +243,7 @@ def test_positive_delete(module_org, module_product): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_delete_non_default(module_org): +def test_positive_delete_non_default(module_org, module_target_sat): """Create content view and publish and promote it to new environment. After that try to disassociate content view from 'Library' and one more non-default environment through 'delete_from_environment' @@ -256,13 +257,13 @@ def test_positive_delete_non_default(module_org): :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() assert len(content_view.version) == 1 assert len(content_view.version[0].read().environment) == 1 - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) cvv = content_view.version[0].read() assert len(cvv.environment) == 2 @@ -277,7 +278,7 @@ def test_positive_delete_non_default(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_composite_version(module_org): +def test_positive_delete_composite_version(module_org, module_target_sat): """Create composite content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -293,14 +294,16 @@ def test_positive_delete_composite_version(module_org): :BZ: 1276479 """ # Create product with repository and publish it - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() repo.sync() # Create and publish content views - content_view = entities.ContentView(organization=module_org, repository=[repo]).create() + content_view = module_target_sat.api.ContentView( + organization=module_org, repository=[repo] + ).create() content_view.publish() # Create and publish composite content view - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( organization=module_org, composite=True, component=[content_view.read().version[0]] ).create() composite_cv.publish() @@ -318,7 +321,7 @@ def test_positive_delete_composite_version(module_org): @pytest.mark.tier2 -def test_negative_delete(module_org): +def test_negative_delete(module_org, module_target_sat): """Create content view and publish it. Try to delete content view version while content view is still associated with lifecycle environment @@ -331,7 +334,7 @@ def test_negative_delete(module_org): :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() @@ -344,7 +347,7 @@ def test_negative_delete(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_renamed_cv_version_from_default_env(module_org): +def test_positive_remove_renamed_cv_version_from_default_env(module_org, module_target_sat): """Remove version of renamed content view from Library environment :id: 7d5961d0-6a9a-4610-979e-cbc4ddbc50ca @@ -364,11 +367,13 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): """ new_name = gen_string('alpha') # create yum product and repo - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo] content_view = content_view.update(['repository']) # publish the content view @@ -377,7 +382,9 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() # ensure that the content view version is promoted to the Library # lifecycle environment assert lce_library.name == ENVIRONMENT @@ -393,7 +400,7 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): @pytest.mark.tier2 -def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove QE promoted content view version from Library environment :id: c7795762-93bd-419c-ac49-d10dc26b842b @@ -412,10 +419,12 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - product = entities.Product(organization=module_org).create() - docker_repo = entities.Repository( + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -423,7 +432,7 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the docker repo - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -432,7 +441,9 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV and QE lifecycle environments for lce in [lce_dev, lce_qe]: @@ -449,7 +460,7 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove PROD promoted content view version from Library environment :id: 24911876-7c2a-4a12-a3aa-98051dfda29d @@ -468,13 +479,19 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -482,7 +499,7 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the yum and docker repos - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -491,7 +508,9 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE PROD lifecycle environments for lce in [lce_dev, lce_qe, lce_prod]: @@ -510,7 +529,7 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_env(module_org): +def test_positive_remove_cv_version_from_env(module_org, module_target_sat): """Remove promoted content view version from environment :id: 17cf18bf-09d5-4641-b0e0-c50e628fa6c8 @@ -532,15 +551,23 @@ def test_positive_remove_cv_version_from_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # docker repo - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -548,7 +575,7 @@ def test_positive_remove_cv_version_from_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum and docker repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -557,7 +584,9 @@ def test_positive_remove_cv_version_from_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -582,7 +611,7 @@ def test_positive_remove_cv_version_from_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_multi_env(module_org): +def test_positive_remove_cv_version_from_multi_env(module_org, module_target_sat): """Remove promoted content view version from multiple environments :id: 18b86a68-8e6a-43ea-b95e-188fba125a26 @@ -602,14 +631,22 @@ def test_positive_remove_cv_version_from_multi_env(module_org): :CaseImportance: Low """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -617,7 +654,7 @@ def test_positive_remove_cv_version_from_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -626,7 +663,9 @@ def test_positive_remove_cv_version_from_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -648,7 +687,7 @@ def test_positive_remove_cv_version_from_multi_env(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_cv_promoted_to_multi_env(module_org): +def test_positive_delete_cv_promoted_to_multi_env(module_org, module_target_sat): """Delete published content view with version promoted to multiple environments @@ -664,20 +703,28 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): 5. Delete the content view, this should delete the content with all it's published/promoted versions from all environments - :expectedresults: The content view doesn't exists + :expectedresults: The content view doesn't exist :CaseLevel: Integration :CaseImportance: Critical """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -685,7 +732,7 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -694,7 +741,9 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index 51ac5fec162..9420c01cf84 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -16,16 +16,35 @@ :Upstream: No """ -from fauxfactory import gen_choice, gen_integer +from fauxfactory import gen_choice, gen_integer, gen_string import pytest from requests.exceptions import HTTPError from robottelo.utils.datafactory import valid_data_list +@pytest.fixture(scope='module') +def module_hostgroup(module_org, module_target_sat): + module_hostgroup = module_target_sat.api.HostGroup(organization=[module_org]).create() + yield module_hostgroup + module_hostgroup.delete() + + +@pytest.fixture(scope='module') +def module_location(module_location): + yield module_location + module_location.delete() + + +@pytest.fixture(scope='module') +def module_org(module_org): + yield module_org + module_org.delete() + + @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, target_sat): +def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, module_target_sat): """Create a new discovery rule with several attributes, update them and delete the rule itself. @@ -47,7 +66,7 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, name = gen_choice(list(valid_data_list().values())) search = gen_choice(searches) hostname = 'myhost-<%= rand(99999) %>' - discovery_rule = target_sat.api.DiscoveryRule( + discovery_rule = module_target_sat.api.DiscoveryRule( name=name, search_=search, hostname=hostname, @@ -83,6 +102,21 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, discovery_rule.read() +@pytest.mark.tier1 +def test_negative_create_with_invalid_host_limit_and_priority(module_target_sat): + """Create a discovery rule with invalid host limit and priority + + :id: e3c7acb1-ac56-496b-ac04-2a83f66ec290 + + :expectedresults: Validation error should be raised + """ + with pytest.raises(HTTPError): + module_target_sat.api.DiscoveryRule(max_count=gen_string('alpha')).create() + with pytest.raises(HTTPError): + module_target_sat.api.DiscoveryRule(priority=gen_string('alpha')).create() + + +@pytest.mark.stubbed @pytest.mark.tier3 def test_positive_update_and_provision_with_rule_priority( module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org diff --git a/tests/foreman/api/test_docker.py b/tests/foreman/api/test_docker.py index 51a709d7bdc..68a2fe6e378 100644 --- a/tests/foreman/api/test_docker.py +++ b/tests/foreman/api/test_docker.py @@ -15,7 +15,6 @@ from random import choice, randint, shuffle from fauxfactory import gen_string, gen_url -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -31,7 +30,7 @@ DOCKER_PROVIDER = 'Docker' -def _create_repository(product, name=None, upstream_name=None): +def _create_repository(module_target_sat, product, name=None, upstream_name=None): """Create a Docker-based repository. :param product: A ``Product`` object. @@ -45,7 +44,7 @@ def _create_repository(product, name=None, upstream_name=None): name = choice(generate_strings_list(15, ['numeric', 'html'])) if upstream_name is None: upstream_name = CONTAINER_UPSTREAM_NAME - return entities.Repository( + return module_target_sat.api.Repository( content_type='docker', docker_upstream_name=upstream_name, name=name, @@ -55,21 +54,21 @@ def _create_repository(product, name=None, upstream_name=None): @pytest.fixture -def repo(module_product): +def repo(module_product, module_target_sat): """Create a single repository.""" - return _create_repository(module_product) + return _create_repository(module_target_sat, module_product) @pytest.fixture -def repos(module_product): +def repos(module_product, module_target_sat): """Create and return a list of repositories.""" - return [_create_repository(module_product) for _ in range(randint(2, 5))] + return [_create_repository(module_target_sat, module_product) for _ in range(randint(2, 5))] @pytest.fixture -def content_view(module_org): +def content_view(module_org, module_target_sat): """Create a content view.""" - return entities.ContentView(composite=False, organization=module_org).create() + return module_target_sat.api.ContentView(composite=False, organization=module_org).create() @pytest.fixture @@ -107,7 +106,7 @@ class TestDockerRepository: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_docker_repository_names())) - def test_positive_create_with_name(self, module_product, name): + def test_positive_create_with_name(self, module_product, name, module_target_sat): """Create one Docker-type repository :id: 3360aab2-74f3-4f6e-a083-46498ceacad2 @@ -119,14 +118,16 @@ def test_positive_create_with_name(self, module_product, name): :CaseImportance: Critical """ - repo = _create_repository(module_product, name) + repo = _create_repository(module_target_sat, module_product, name) assert repo.name == name assert repo.docker_upstream_name == CONTAINER_UPSTREAM_NAME assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(valid_docker_upstream_names())) - def test_positive_create_with_upstream_name(self, module_product, upstream_name): + def test_positive_create_with_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a valid docker upstream name @@ -139,13 +140,15 @@ def test_positive_create_with_upstream_name(self, module_product, upstream_name) :CaseImportance: Critical """ - repo = _create_repository(module_product, upstream_name=upstream_name) + repo = _create_repository(module_target_sat, module_product, upstream_name=upstream_name) assert repo.docker_upstream_name == upstream_name assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(invalid_docker_upstream_names())) - def test_negative_create_with_invalid_upstream_name(self, module_product, upstream_name): + def test_negative_create_with_invalid_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a invalid docker upstream name. @@ -159,25 +162,25 @@ def test_negative_create_with_invalid_upstream_name(self, module_product, upstre :CaseImportance: Critical """ with pytest.raises(HTTPError): - _create_repository(module_product, upstream_name=upstream_name) + _create_repository(module_target_sat, module_product, upstream_name=upstream_name) @pytest.mark.tier2 - def test_positive_create_repos_using_same_product(self, module_product): + def test_positive_create_repos_using_same_product(self, module_product, module_target_sat): """Create multiple Docker-type repositories :id: 4a6929fc-5111-43ff-940c-07a754828630 :expectedresults: Multiple docker repositories are created with a - Docker usptream repository and they all belong to the same product. + Docker upstream repository and they all belong to the same product. :CaseLevel: Integration """ for _ in range(randint(2, 5)): - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) assert repo.id in [repo_.id for repo_ in module_product.read().repository] @pytest.mark.tier2 - def test_positive_create_repos_using_multiple_products(self, module_org): + def test_positive_create_repos_using_multiple_products(self, module_org, module_target_sat): """Create multiple Docker-type repositories on multiple products :id: 5a65d20b-d3b5-4bd7-9c8f-19c8af190558 @@ -189,14 +192,14 @@ def test_positive_create_repos_using_multiple_products(self, module_org): :CaseLevel: Integration """ for _ in range(randint(2, 5)): - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 3)): - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) product = product.read() assert repo.id in [repo_.id for repo_ in product.repository] @pytest.mark.tier1 - def test_positive_sync(self, module_product): + def test_positive_sync(self, module_product, module_target_sat): """Create and sync a Docker-type repository :id: 80fbcd84-1c6f-444f-a44e-7d2738a0cba2 @@ -206,14 +209,14 @@ def test_positive_sync(self, module_product): :CaseImportance: Critical """ - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['docker_manifest'] >= 1 @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_docker_repository_names())) - def test_positive_update_name(self, module_product, repo, new_name): + def test_positive_update_name(self, repo, new_name): """Create a Docker-type repository and update its name. :id: 7967e6b5-c206-4ad0-bcf5-64a7ce85233b @@ -249,7 +252,7 @@ def test_positive_update_upstream_name(self, repo): assert repo.docker_upstream_name == new_upstream_name @pytest.mark.tier2 - def test_positive_update_url(self, module_product, repo): + def test_positive_update_url(self, repo): """Create a Docker-type repository and update its URL. :id: 6a588e65-bf1d-4ca9-82ce-591f9070215f @@ -285,7 +288,7 @@ def test_positive_delete(self, repo): repo.read() @pytest.mark.tier2 - def test_positive_delete_random_repo(self, module_org): + def test_positive_delete_random_repo(self, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -296,10 +299,11 @@ def test_positive_delete_random_repo(self, module_org): """ repos = [] products = [ - entities.Product(organization=module_org).create() for _ in range(randint(2, 5)) + module_target_sat.api.Product(organization=module_org).create() + for _ in range(randint(2, 5)) ] for product in products: - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) assert repo.content_type == 'docker' repos.append(repo) @@ -341,7 +345,7 @@ def test_positive_add_docker_repo(self, repo, content_view): assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repos(self, module_org, module_product, content_view): + def test_positive_add_docker_repos(self, module_target_sat, module_product, content_view): """Add multiple Docker-type repositories to a non-composite content view. @@ -351,7 +355,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie and the product is added to a non-composite content view. """ repos = [ - _create_repository(module_product, name=gen_string('alpha')) + _create_repository(module_target_sat, module_product, name=gen_string('alpha')) for _ in range(randint(2, 5)) ] repo_ids = {r.id for r in repos} @@ -369,7 +373,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie assert r.docker_upstream_name == CONTAINER_UPSTREAM_NAME @pytest.mark.tier2 - def test_positive_add_synced_docker_repo(self, module_org, module_product): + def test_positive_add_synced_docker_repo(self, module_org, module_product, module_target_sat): """Create and sync a Docker-type repository :id: 3c7d6f17-266e-43d3-99f8-13bf0251eca6 @@ -377,19 +381,21 @@ def test_positive_add_synced_docker_repo(self, module_org, module_product): :expectedresults: A repository is created with a Docker repository and it is synchronized. """ - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['docker_manifest'] > 0 # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repo_to_ccv(self, module_org): + def test_positive_add_docker_repo_to_ccv(self, module_org, module_target_sat): """Add one Docker-type repository to a composite content view :id: fe278275-2bb2-4d68-8624-f0cfd63ecb57 @@ -398,10 +404,14 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): the product is added to a content view which is then added to a composite content view. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -412,7 +422,9 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): assert len(content_view.version) == 1 # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = content_view.version comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -420,7 +432,7 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): ] @pytest.mark.tier2 - def test_positive_add_docker_repos_to_ccv(self, module_org): + def test_positive_add_docker_repos_to_ccv(self, module_org, module_target_sat): """Add multiple Docker-type repositories to a composite content view. @@ -431,11 +443,13 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): views which are then added to a composite content view. """ cv_versions = [] - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 5)): # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() - repo = _create_repository(product) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() + repo = _create_repository(module_target_sat, product) content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -446,14 +460,16 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): cv_versions.append(content_view.version[0]) # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() for cv_version in cv_versions: comp_content_view.component.append(cv_version) comp_content_view = comp_content_view.update(['component']) assert cv_version.id in [component.id for component in comp_content_view.component] @pytest.mark.tier2 - def test_positive_publish_with_docker_repo(self, module_org): + def test_positive_publish_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it once. :id: 86a73e96-ead6-41fb-8095-154a0b83e344 @@ -462,9 +478,13 @@ def test_positive_publish_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published only once. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -481,7 +501,7 @@ def test_positive_publish_with_docker_repo(self, module_org): assert float(content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_with_docker_repo_composite(self, module_org): + def test_positive_publish_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to composite content view and publish it once. @@ -494,8 +514,12 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): :BZ: 1217635 """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -512,7 +536,9 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(content_view.next_version) > 1.0 # Create composite content view… - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -526,7 +552,7 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(comp_content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo(self, module_org): + def test_positive_publish_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it multiple times. @@ -536,8 +562,12 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -551,7 +581,9 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): assert len(content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): + def test_positive_publish_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it multiple times. @@ -562,8 +594,12 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): added to a composite content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -573,7 +609,9 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): content_view = content_view.read() assert content_view.last_published is not None - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert [content_view.version[0].id] == [comp.id for comp in comp_content_view.component] @@ -587,7 +625,7 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): assert len(comp_content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_promote_with_docker_repo(self, module_org): + def test_positive_promote_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to the next available lifecycle-environment. @@ -596,10 +634,14 @@ def test_positive_promote_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -613,7 +655,7 @@ def test_positive_promote_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 2 @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo(self, module_org): + def test_positive_promote_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to multiple available lifecycle-environments. @@ -622,9 +664,13 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -634,12 +680,12 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(cvv.read().environment) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_docker_repo_composite(self, module_org): + def test_positive_promote_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the next available lifecycle-environment. @@ -649,9 +695,13 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -659,7 +709,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -673,7 +725,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): + def test_positive_promote_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the multiple available lifecycle-environments @@ -683,8 +737,12 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -692,7 +750,9 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -702,13 +762,13 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): assert len(comp_cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() comp_cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(comp_cvv.read().environment) == i + 1 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_name_pattern_change(self, module_org): + def test_positive_name_pattern_change(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change registry name pattern for that environment. Verify that repository name on product changed according to new pattern. @@ -725,19 +785,23 @@ def test_positive_name_pattern_change(self, module_org): ) repo = _create_repository( - entities.Product(organization=module_org).create(), upstream_name=docker_upstream_name + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), + upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -746,7 +810,7 @@ def test_positive_name_pattern_change(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_product_name_change_after_promotion(self, module_org): + def test_positive_product_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change product name. Verify that repository name on product changed according to new pattern. @@ -761,21 +825,23 @@ def test_positive_product_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = "<%= organization.label %>/<%= product.name %>" - prod = entities.Product(organization=module_org, name=old_prod_name).create() - repo = _create_repository(prod, upstream_name=docker_upstream_name) + prod = module_target_sat.api.Product(organization=module_org, name=old_prod_name).create() + repo = _create_repository(module_target_sat, prod, upstream_name=docker_upstream_name) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) prod.name = new_prod_name prod.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -785,7 +851,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -793,7 +859,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_repo_name_change_after_promotion(self, module_org): + def test_positive_repo_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change repository name. Verify that Docker repository name on product changed according to new pattern. @@ -809,23 +875,26 @@ def test_positive_repo_name_change_after_promotion(self, module_org): new_pattern = "<%= organization.label %>/<%= repository.name %>" repo = _create_repository( - entities.Product(organization=module_org).create(), + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), name=old_repo_name, upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) repo.name = new_repo_name repo.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -835,7 +904,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -843,7 +912,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_lce): + def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_target_sat): """Set registry name pattern to one that does not guarantee uniqueness. Try to promote content view with multiple Docker repositories to lifecycle environment. Verify that content has not been promoted. @@ -855,16 +924,18 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() @@ -873,7 +944,7 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu cvv.promote(data={'environment_ids': lce.id, 'force': False}) @pytest.mark.tier2 - def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): + def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, module_target_sat): """Promote content view with multiple Docker repositories to lifecycle environment. Set registry name pattern to one that does not guarantee uniqueness. Verify that pattern has not been @@ -886,18 +957,20 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) lce.registry_name_pattern = new_pattern with pytest.raises(HTTPError): @@ -916,7 +989,7 @@ class TestDockerActivationKey: @pytest.mark.tier2 def test_positive_add_docker_repo_cv( - self, module_lce, module_org, repo, content_view_publish_promote + self, module_lce, module_org, repo, content_view_publish_promote, module_target_sat ): """Add Docker-type repository to a non-composite content view and publish it. Then create an activation key and associate it with the @@ -928,7 +1001,7 @@ def test_positive_add_docker_repo_cv( key """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -936,7 +1009,7 @@ def test_positive_add_docker_repo_cv( @pytest.mark.tier2 def test_positive_remove_docker_repo_cv( - self, module_org, module_lce, content_view_publish_promote + self, module_org, module_lce, content_view_publish_promote, module_target_sat ): """Create an activation key and associate it with the Docker content view. Then remove this content view from the activation key. @@ -949,7 +1022,7 @@ def test_positive_remove_docker_repo_cv( :CaseLevel: Integration """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -957,7 +1030,9 @@ def test_positive_remove_docker_repo_cv( assert ak.update(['content_view']).content_view is None @pytest.mark.tier2 - def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, module_org): + def test_positive_add_docker_repo_ccv( + self, content_view_version, module_lce, module_org, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -969,7 +1044,9 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo key """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -978,13 +1055,15 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id @pytest.mark.tier2 - def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_view_version): + def test_positive_remove_docker_repo_ccv( + self, module_lce, module_org, content_view_version, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -997,7 +1076,9 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v then removed from the activation key. """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -1006,7 +1087,7 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index a20e38e7a90..7f9df5f7d17 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -19,7 +19,6 @@ # For ease of use hc refers to host-collection throughout this document from time import sleep -from nailgun import entities import pytest from robottelo import constants @@ -40,8 +39,8 @@ @pytest.fixture(scope='module') -def activation_key(module_org, module_lce): - activation_key = entities.ActivationKey( +def activation_key(module_org, module_lce, module_target_sat): + activation_key = module_target_sat.api.ActivationKey( environment=module_lce, organization=module_org ).create() return activation_key @@ -302,7 +301,7 @@ def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, t :CaseLevel: System """ # Errata is sorted by issued date. - erratum_list = entities.Errata(repository=custom_repo['repository-id']).search( + erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'issued ASC', 'per_page': '1000'} ) issued = [errata.issued for errata in erratum_list] @@ -337,28 +336,28 @@ def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): reposet=constants.REPOSET['rhva6'], releasever=constants.DEFAULT_RELEASE_VERSION, ) - rh_repo = entities.Repository(id=rh_repo_id_rhva).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id_rhva).read() rh_repo.sync() - host_tools_product = entities.Product(organization=org).create() - host_tools_repo = entities.Repository( + host_tools_product = module_target_sat.api.Product(organization=org).create() + host_tools_repo = module_target_sat.api.Repository( product=host_tools_product, ).create() host_tools_repo.url = settings.repos.SATCLIENT_REPO.RHEL6 host_tools_repo = host_tools_repo.update(['url']) host_tools_repo.sync() - custom_product = entities.Product(organization=org).create() - custom_repo = entities.Repository( + custom_product = module_target_sat.api.Product(organization=org).create() + custom_repo = module_target_sat.api.Repository( product=custom_product, ).create() custom_repo.url = CUSTOM_REPO_URL custom_repo = custom_repo.update(['url']) custom_repo.sync() - lce = entities.LifecycleEnvironment(organization=org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() - cv = entities.ContentView( + cv = module_target_sat.api.ContentView( organization=org, repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], ).create() @@ -366,11 +365,13 @@ def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): cvv = cv.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - ak = entities.ActivationKey(content_view=cv, organization=org, environment=lce).create() + ak = module_target_sat.api.ActivationKey( + content_view=cv, organization=org, environment=lce + ).create() sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] for sub_name in sub_list: - subscription = entities.Subscription(organization=org).search( + subscription = module_target_sat.api.Subscription(organization=org).search( query={'search': f'name="{sub_name}"'} )[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) @@ -467,7 +468,7 @@ def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost @pytest.mark.tier3 -def test_positive_get_diff_for_cv_envs(): +def test_positive_get_diff_for_cv_envs(target_sat): """Generate a difference in errata between a set of environments for a content view @@ -485,10 +486,10 @@ def test_positive_get_diff_for_cv_envs(): :CaseLevel: System """ - org = entities.Organization().create() - env = entities.LifecycleEnvironment(organization=org).create() - content_view = entities.ContentView(organization=org).create() - activation_key = entities.ActivationKey(environment=env, organization=org).create() + org = target_sat.api.Organization().create() + env = target_sat.api.LifecycleEnvironment(organization=org).create() + content_view = target_sat.api.ContentView(organization=org).create() + activation_key = target_sat.api.ActivationKey(environment=env, organization=org).create() for repo_url in [settings.repos.yum_9.url, CUSTOM_REPO_URL]: setup_org_for_a_custom_repo( { @@ -499,10 +500,10 @@ def test_positive_get_diff_for_cv_envs(): 'activationkey-id': activation_key.id, } ) - new_env = entities.LifecycleEnvironment(organization=org, prior=env).create() + new_env = target_sat.api.LifecycleEnvironment(organization=org, prior=env).create() cvvs = content_view.read().version[-2:] cvvs[-1].promote(data={'environment_ids': new_env.id, 'force': False}) - result = entities.Errata().compare( + result = target_sat.api.Errata().compare( data={'content_view_version_ids': [cvv.id for cvv in cvvs], 'per_page': '9999'} ) cvv2_only_errata = next( @@ -566,7 +567,7 @@ def test_positive_incremental_update_required( # install package to create demand for an Erratum assert rhel7_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}').status == 0 # Call nailgun to make the API POST to see if any incremental updates are required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ 'organization_id': module_org.id, 'included': {'ids': [host.id]}, @@ -576,7 +577,7 @@ def test_positive_incremental_update_required( assert not response, 'Incremental update should not be required at this point' # Add filter of type include but do not include anything # this will hide all RPMs from selected erratum before publishing - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=module_cv, inclusion=True, name='Include Nothing' ).create() module_cv.publish() @@ -586,7 +587,7 @@ def test_positive_incremental_update_required( CV1V.promote(data={'environment_ids': module_lce.id, 'force': False}) module_cv = module_cv.read() # Call nailgun to make the API POST to ensure an incremental update is required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ 'organization_id': module_org.id, 'included': {'ids': [host.id]}, @@ -726,7 +727,7 @@ def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): releasever='None', ) # Sync step because repo is not synced by default - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @@ -744,11 +745,17 @@ def rhel8_custom_repo_cv(module_entitlement_manifest_org): @pytest.fixture(scope='module') def rhel8_module_ak( - module_entitlement_manifest_org, default_lce, rh_repo_module_manifest, rhel8_custom_repo_cv + module_entitlement_manifest_org, + default_lce, + rh_repo_module_manifest, + rhel8_custom_repo_cv, + module_target_sat, ): - rhel8_module_ak = entities.ActivationKey( + rhel8_module_ak = module_target_sat.api.ActivationKey( content_view=module_entitlement_manifest_org.default_content_view, - environment=entities.LifecycleEnvironment(id=module_entitlement_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment( + id=module_entitlement_manifest_org.library.id + ), organization=module_entitlement_manifest_org, ).create() # Ensure tools repo is enabled in the activation key @@ -758,19 +765,19 @@ def rhel8_module_ak( } ) # Fetch available subscriptions - subs = entities.Subscription(organization=module_entitlement_manifest_org).search( + subs = module_target_sat.api.Subscription(organization=module_entitlement_manifest_org).search( query={'search': f'{constants.DEFAULT_SUBSCRIPTION_NAME}'} ) assert subs # Add default subscription to activation key rhel8_module_ak.add_subscriptions(data={'subscription_id': subs[0].id}) # Add custom subscription to activation key - product = entities.Product(organization=module_entitlement_manifest_org).search( - query={'search': "redhat=false"} - ) - custom_sub = entities.Subscription(organization=module_entitlement_manifest_org).search( - query={'search': f"name={product[0].name}"} + product = module_target_sat.api.Product(organization=module_entitlement_manifest_org).search( + query={'search': 'redhat=false'} ) + custom_sub = module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org + ).search(query={'search': f'name={product[0].name}'}) rhel8_module_ak.add_subscriptions(data={'subscription_id': custom_sub[0].id}) return rhel8_module_ak diff --git a/tests/foreman/api/test_filter.py b/tests/foreman/api/test_filter.py index 75d42707e4a..c4f2e2d12c7 100644 --- a/tests/foreman/api/test_filter.py +++ b/tests/foreman/api/test_filter.py @@ -20,22 +20,21 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @pytest.fixture(scope='module') -def module_perms(): +def module_perms(module_target_sat): """Search for provisioning template permissions. Set ``cls.ct_perms``.""" - ct_perms = entities.Permission().search( + ct_perms = module_target_sat.api.Permission().search( query={'search': 'resource_type="ProvisioningTemplate"'} ) return ct_perms @pytest.mark.tier1 -def test_positive_create_with_permission(module_perms): +def test_positive_create_with_permission(module_perms, module_target_sat): """Create a filter and assign it some permissions. :id: b8631d0a-a71a-41aa-9f9a-d12d62adc496 @@ -45,14 +44,14 @@ def test_positive_create_with_permission(module_perms): :CaseImportance: Critical """ # Create a filter and assign all ProvisioningTemplate permissions to it - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_perms = [perm.id for perm in filter_.permission] perms = [perm.id for perm in module_perms] assert filter_perms == perms @pytest.mark.tier1 -def test_positive_delete(module_perms): +def test_positive_delete(module_perms, module_target_sat): """Create a filter and delete it afterwards. :id: f0c56fd8-c91d-48c3-ad21-f538313b17eb @@ -61,14 +60,14 @@ def test_positive_delete(module_perms): :CaseImportance: Critical """ - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_.delete() with pytest.raises(HTTPError): filter_.read() @pytest.mark.tier1 -def test_positive_delete_role(module_perms): +def test_positive_delete_role(module_perms, module_target_sat): """Create a filter and delete the role it points at. :id: b129642d-926d-486a-84d9-5952b44ac446 @@ -77,8 +76,8 @@ def test_positive_delete_role(module_perms): :CaseImportance: Critical """ - role = entities.Role().create() - filter_ = entities.Filter(permission=module_perms, role=role).create() + role = module_target_sat.api.Role().create() + filter_ = module_target_sat.api.Filter(permission=module_perms, role=role).create() # A filter depends on a role. Deleting a role implicitly deletes the # filter pointing at it. diff --git a/tests/foreman/api/test_foremantask.py b/tests/foreman/api/test_foremantask.py index 23076dd5751..6d8bee0b42d 100644 --- a/tests/foreman/api/test_foremantask.py +++ b/tests/foreman/api/test_foremantask.py @@ -16,13 +16,12 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @pytest.mark.tier1 -def test_negative_fetch_non_existent_task(): +def test_negative_fetch_non_existent_task(target_sat): """Fetch a non-existent task. :id: a2a81ca2-63c4-47f5-9314-5852f5e2617f @@ -32,13 +31,13 @@ def test_negative_fetch_non_existent_task(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ForemanTask(id='abc123').read() + target_sat.api.ForemanTask(id='abc123').read() @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.e2e -def test_positive_get_summary(): +def test_positive_get_summary(target_sat): """Get a summary of foreman tasks. :id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce @@ -47,7 +46,7 @@ def test_positive_get_summary(): :CaseImportance: Critical """ - summary = entities.ForemanTask().summary() + summary = target_sat.api.ForemanTask().summary() assert isinstance(summary, list) for item in summary: assert isinstance(item, dict) diff --git a/tests/foreman/api/test_host.py b/tests/foreman/api/test_host.py index 8393dd5ecf0..15411a4741b 100644 --- a/tests/foreman/api/test_host.py +++ b/tests/foreman/api/test_host.py @@ -23,7 +23,7 @@ import http from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_mac, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -32,14 +32,14 @@ from robottelo.utils import datafactory -def update_smart_proxy(location, smart_proxy): - if location.id not in [location.id for location in smart_proxy.location]: - smart_proxy.location.append(entities.Location(id=location.id)) - smart_proxy.update(['location']) +def update_smart_proxy(smart_proxy_location, smart_proxy): + if smart_proxy_location.id not in [location.id for location in smart_proxy.location]: + smart_proxy.location.append(smart_proxy_location) + smart_proxy.update(['location']) @pytest.mark.tier1 -def test_positive_get_search(): +def test_positive_get_search(target_sat): """GET ``api/v2/hosts`` and specify the ``search`` parameter. :id: d63f87e5-66e6-4886-8b44-4129259493a6 @@ -50,7 +50,7 @@ def test_positive_get_search(): """ query = gen_string('utf8', gen_integer(1, 100)) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'search': query}, verify=False, @@ -60,7 +60,7 @@ def test_positive_get_search(): @pytest.mark.tier1 -def test_positive_get_per_page(): +def test_positive_get_per_page(target_sat): """GET ``api/v2/hosts`` and specify the ``per_page`` parameter. :id: 9086f41c-b3b9-4af2-b6c4-46b80b4d1cfd @@ -72,7 +72,7 @@ def test_positive_get_per_page(): """ per_page = gen_integer(1, 1000) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'per_page': str(per_page)}, verify=False, @@ -82,7 +82,7 @@ def test_positive_get_per_page(): @pytest.mark.tier2 -def test_positive_search_by_org_id(): +def test_positive_search_by_org_id(target_sat): """Search for host by specifying host's organization id :id: 56353f7c-b77e-4b6c-9ec3-51b58f9a18d8 @@ -96,9 +96,9 @@ def test_positive_search_by_org_id(): :CaseLevel: Integration """ - host = entities.Host().create() + host = target_sat.api.Host().create() # adding org id as GET parameter for correspondence with BZ - query = entities.Host() + query = target_sat.api.Host() query._meta['api_path'] += f'?organization_id={host.organization.id}' results = query.search() assert len(results) == 1 @@ -107,7 +107,7 @@ def test_positive_search_by_org_id(): @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_negative_create_with_owner_type(owner_type): +def test_negative_create_with_owner_type(owner_type, target_sat): """Create a host and specify only ``owner_type``. :id: cdf9d16f-1c47-498a-be48-901355385dde @@ -119,13 +119,15 @@ def test_negative_create_with_owner_type(owner_type): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.Host(owner_type=owner_type).create() + target_sat.api.Host(owner_type=owner_type).create() assert str(422) in str(error) @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_positive_update_owner_type(owner_type, module_org, module_location, module_user): +def test_positive_update_owner_type( + owner_type, module_org, module_location, module_user, module_target_sat +): """Update a host's ``owner_type``. :id: b72cd8ef-3a0b-4d2d-94f9-9b64908d699a @@ -141,9 +143,9 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod """ owners = { 'User': module_user, - 'Usergroup': entities.UserGroup().create(), + 'Usergroup': module_target_sat.api.UserGroup().create(), } - host = entities.Host(organization=module_org, location=module_location).create() + host = module_target_sat.api.Host(organization=module_org, location=module_location).create() host.owner_type = owner_type host.owner = owners[owner_type] host = host.update(['owner_type', 'owner']) @@ -152,7 +154,7 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod @pytest.mark.tier1 -def test_positive_create_and_update_with_name(): +def test_positive_create_and_update_with_name(target_sat): """Create and update a host with different names and minimal input parameters :id: a7c0e8ec-3816-4092-88b1-0324cb271752 @@ -162,7 +164,7 @@ def test_positive_create_and_update_with_name(): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name).create() + host = target_sat.api.Host(name=name).create() assert host.name == f'{name}.{host.domain.read().name}' new_name = gen_choice(datafactory.valid_hosts_list()) host.name = new_name @@ -171,7 +173,7 @@ def test_positive_create_and_update_with_name(): @pytest.mark.tier1 -def test_positive_create_and_update_with_ip(): +def test_positive_create_and_update_with_ip(target_sat): """Create and update host with IP address specified :id: 3f266906-c509-42ce-9b20-def448bf8d86 @@ -181,7 +183,7 @@ def test_positive_create_and_update_with_ip(): :CaseImportance: Critical """ ip_addr = gen_ipaddr() - host = entities.Host(ip=ip_addr).create() + host = target_sat.api.Host(ip=ip_addr).create() assert host.ip == ip_addr new_ip_addr = gen_ipaddr() host.ip = new_ip_addr @@ -190,7 +192,7 @@ def test_positive_create_and_update_with_ip(): @pytest.mark.tier1 -def test_positive_create_and_update_mac(): +def test_positive_create_and_update_mac(target_sat): """Create host with MAC address and update it :id: 72e3b020-7347-4500-8669-c6ddf6dfd0b6 @@ -201,7 +203,7 @@ def test_positive_create_and_update_mac(): """ mac = gen_mac(multicast=False) - host = entities.Host(mac=mac).create() + host = target_sat.api.Host(mac=mac).create() assert host.mac == mac new_mac = gen_mac(multicast=False) host.mac = new_mac @@ -211,7 +213,7 @@ def test_positive_create_and_update_mac(): @pytest.mark.tier2 def test_positive_create_and_update_with_hostgroup( - module_org, module_location, module_lce, module_published_cv + module_org, module_location, module_lce, module_published_cv, module_target_sat ): """Create and update host with hostgroup specified @@ -222,8 +224,10 @@ def test_positive_create_and_update_with_hostgroup( :CaseLevel: Integration """ module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - hostgroup = entities.HostGroup(location=[module_location], organization=[module_org]).create() - host = entities.Host( + hostgroup = module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() + host = module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -233,7 +237,7 @@ def test_positive_create_and_update_with_hostgroup( }, ).create() assert host.hostgroup.read().name == hostgroup.name - new_hostgroup = entities.HostGroup( + new_hostgroup = module_target_sat.api.HostGroup( location=[host.location], organization=[host.organization] ).create() host.hostgroup = new_hostgroup @@ -246,7 +250,9 @@ def test_positive_create_and_update_with_hostgroup( @pytest.mark.tier2 -def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_library, module_org): +def test_positive_create_inherit_lce_cv( + module_default_org_view, module_lce_library, module_org, module_target_sat +): """Create a host with hostgroup specified. Make sure host inherited hostgroup's lifecycle environment and content-view @@ -259,12 +265,12 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr :BZ: 1391656 """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( content_view=module_default_org_view, lifecycle_environment=module_lce_library, organization=[module_org], ).create() - host = entities.Host(hostgroup=hostgroup, organization=module_org).create() + host = module_target_sat.api.Host(hostgroup=hostgroup, organization=module_org).create() assert ( host.content_facet_attributes['lifecycle_environment_id'] == hostgroup.lifecycle_environment.id @@ -273,7 +279,7 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr @pytest.mark.tier2 -def test_positive_create_with_inherited_params(module_org, module_location): +def test_positive_create_with_inherited_params(module_org, module_location, module_target_sat): """Create a new Host in organization and location with parameters :BZ: 1287223 @@ -287,18 +293,20 @@ def test_positive_create_with_inherited_params(module_org, module_location): :CaseImportance: High """ - org_param = entities.Parameter(organization=module_org).create() - loc_param = entities.Parameter(location=module_location).create() - host = entities.Host(location=module_location, organization=module_org).create() + org_param = module_target_sat.api.Parameter(organization=module_org).create() + loc_param = module_target_sat.api.Parameter(location=module_location).create() + host = module_target_sat.api.Host(location=module_location, organization=module_org).create() # get global parameters - glob_param_list = {(param.name, param.value) for param in entities.CommonParameter().search()} + glob_param_list = { + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() + } # if there are no global parameters, create one if len(glob_param_list) == 0: param_name = gen_string('alpha') param_global_value = gen_string('numeric') - entities.CommonParameter(name=param_name, value=param_global_value).create() + module_target_sat.api.CommonParameter(name=param_name, value=param_global_value).create() glob_param_list = { - (param.name, param.value) for param in entities.CommonParameter().search() + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() } assert len(host.all_parameters) == 2 + len(glob_param_list) innerited_params = {(org_param.name, org_param.value), (loc_param.name, loc_param.value)} @@ -397,7 +405,9 @@ def test_positive_end_to_end_with_puppet_class( @pytest.mark.tier2 -def test_positive_create_and_update_with_subnet(module_location, module_org, module_default_subnet): +def test_positive_create_and_update_with_subnet( + module_location, module_org, module_default_subnet, module_target_sat +): """Create and update a host with subnet specified :id: 9aa97aff-8439-4027-89ee-01c643fbf7d1 @@ -406,11 +416,13 @@ def test_positive_create_and_update_with_subnet(module_location, module_org, mod :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( location=module_location, organization=module_org, subnet=module_default_subnet ).create() assert host.subnet.read().name == module_default_subnet.name - new_subnet = entities.Subnet(location=[module_location], organization=[module_org]).create() + new_subnet = module_target_sat.api.Subnet( + location=[module_location], organization=[module_org] + ).create() host.subnet = new_subnet host = host.update(['subnet']) assert host.subnet.read().name == new_subnet.name @@ -418,7 +430,7 @@ def test_positive_create_and_update_with_subnet(module_location, module_org, mod @pytest.mark.tier2 def test_positive_create_and_update_with_compresource( - module_org, module_location, module_cr_libvirt + module_org, module_location, module_cr_libvirt, module_target_sat ): """Create and update a host with compute resource specified @@ -429,11 +441,11 @@ def test_positive_create_and_update_with_compresource( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( compute_resource=module_cr_libvirt, location=module_location, organization=module_org ).create() assert host.compute_resource.read().name == module_cr_libvirt.name - new_compresource = entities.LibvirtComputeResource( + new_compresource = module_target_sat.api.LibvirtComputeResource( location=[host.location], organization=[host.organization] ).create() host.compute_resource = new_compresource @@ -442,7 +454,7 @@ def test_positive_create_and_update_with_compresource( @pytest.mark.tier2 -def test_positive_create_and_update_with_model(module_model): +def test_positive_create_and_update_with_model(module_model, module_target_sat): """Create and update a host with model specified :id: 7a912a19-71e4-4843-87fd-bab98c156f4a @@ -451,16 +463,18 @@ def test_positive_create_and_update_with_model(module_model): :CaseLevel: Integration """ - host = entities.Host(model=module_model).create() + host = module_target_sat.api.Host(model=module_model).create() assert host.model.read().name == module_model.name - new_model = entities.Model().create() + new_model = module_target_sat.api.Model().create() host.model = new_model host = host.update(['model']) assert host.model.read().name == new_model.name @pytest.mark.tier2 -def test_positive_create_and_update_with_user(module_org, module_location, module_user): +def test_positive_create_and_update_with_user( + module_org, module_location, module_user, module_target_sat +): """Create and update host with user specified :id: 72e20f8f-17dc-4e38-8ac1-d08df8758f56 @@ -469,18 +483,22 @@ def test_positive_create_and_update_with_user(module_org, module_location, modul :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( owner=module_user, owner_type='User', organization=module_org, location=module_location ).create() assert host.owner.read() == module_user - new_user = entities.User(organization=[module_org], location=[module_location]).create() + new_user = module_target_sat.api.User( + organization=[module_org], location=[module_location] + ).create() host.owner = new_user host = host.update(['owner']) assert host.owner.read() == new_user @pytest.mark.tier2 -def test_positive_create_and_update_with_usergroup(module_org, module_location, function_role): +def test_positive_create_and_update_with_usergroup( + module_org, module_location, function_role, module_target_sat +): """Create and update host with user group specified :id: 706e860c-8c05-4ddc-be20-0ecd9f0da813 @@ -489,18 +507,18 @@ def test_positive_create_and_update_with_usergroup(module_org, module_location, :CaseLevel: Integration """ - user = entities.User( + user = module_target_sat.api.User( location=[module_location], organization=[module_org], role=[function_role] ).create() - usergroup = entities.UserGroup(role=[function_role], user=[user]).create() - host = entities.Host( + usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() + host = module_target_sat.api.Host( location=module_location, organization=module_org, owner=usergroup, owner_type='Usergroup', ).create() assert host.owner.read().name == usergroup.name - new_usergroup = entities.UserGroup(role=[function_role], user=[user]).create() + new_usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() host.owner = new_usergroup host = host.update(['owner']) assert host.owner.read().name == new_usergroup.name @@ -508,7 +526,7 @@ def test_positive_create_and_update_with_usergroup(module_org, module_location, @pytest.mark.tier1 @pytest.mark.parametrize('build', [True, False]) -def test_positive_create_and_update_with_build_parameter(build): +def test_positive_create_and_update_with_build_parameter(build, target_sat): """Create and update a host with 'build' parameter specified. Build parameter determines whether to enable the host for provisioning @@ -521,7 +539,7 @@ def test_positive_create_and_update_with_build_parameter(build): :CaseImportance: Critical """ - host = entities.Host(build=build).create() + host = target_sat.api.Host(build=build).create() assert host.build == build host.build = not build host = host.update(['build']) @@ -530,7 +548,7 @@ def test_positive_create_and_update_with_build_parameter(build): @pytest.mark.tier1 @pytest.mark.parametrize('enabled', [True, False], ids=['enabled', 'disabled']) -def test_positive_create_and_update_with_enabled_parameter(enabled): +def test_positive_create_and_update_with_enabled_parameter(enabled, target_sat): """Create and update a host with 'enabled' parameter specified. Enabled parameter determines whether to include the host within Satellite 6 reporting @@ -544,7 +562,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): :CaseImportance: Critical """ - host = entities.Host(enabled=enabled).create() + host = target_sat.api.Host(enabled=enabled).create() assert host.enabled == enabled host.enabled = not enabled host = host.update(['enabled']) @@ -553,7 +571,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): @pytest.mark.tier1 @pytest.mark.parametrize('managed', [True, False], ids=['managed', 'unmanaged']) -def test_positive_create_and_update_with_managed_parameter(managed): +def test_positive_create_and_update_with_managed_parameter(managed, target_sat): """Create and update a host with managed parameter specified. Managed flag shows whether the host is managed or unmanaged and determines whether some extra parameters are required @@ -567,7 +585,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): :CaseImportance: Critical """ - host = entities.Host(managed=managed).create() + host = target_sat.api.Host(managed=managed).create() assert host.managed == managed host.managed = not managed host = host.update(['managed']) @@ -575,7 +593,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): @pytest.mark.tier1 -def test_positive_create_and_update_with_comment(): +def test_positive_create_and_update_with_comment(target_sat): """Create and update a host with a comment :id: 9b78663f-139c-4d0b-9115-180624b0d41b @@ -585,7 +603,7 @@ def test_positive_create_and_update_with_comment(): :CaseImportance: Critical """ comment = gen_choice(list(datafactory.valid_data_list().values())) - host = entities.Host(comment=comment).create() + host = target_sat.api.Host(comment=comment).create() assert host.comment == comment new_comment = gen_choice(list(datafactory.valid_data_list().values())) host.comment = new_comment @@ -594,7 +612,7 @@ def test_positive_create_and_update_with_comment(): @pytest.mark.tier2 -def test_positive_create_and_update_with_compute_profile(module_compute_profile): +def test_positive_create_and_update_with_compute_profile(module_compute_profile, module_target_sat): """Create and update a host with a compute profile specified :id: 94be25e8-035d-42c5-b1f3-3aa20030410d @@ -604,9 +622,9 @@ def test_positive_create_and_update_with_compute_profile(module_compute_profile) :CaseLevel: Integration """ - host = entities.Host(compute_profile=module_compute_profile).create() + host = module_target_sat.api.Host(compute_profile=module_compute_profile).create() assert host.compute_profile.read().name == module_compute_profile.name - new_cprofile = entities.ComputeProfile().create() + new_cprofile = module_target_sat.api.ComputeProfile().create() host.compute_profile = new_cprofile host = host.update(['compute_profile']) assert host.compute_profile.read().name == new_cprofile.name @@ -614,7 +632,7 @@ def test_positive_create_and_update_with_compute_profile(module_compute_profile) @pytest.mark.tier2 def test_positive_create_and_update_with_content_view( - module_org, module_location, module_default_org_view, module_lce_library + module_org, module_location, module_default_org_view, module_lce_library, module_target_sat ): """Create and update host with a content view specified @@ -624,7 +642,7 @@ def test_positive_create_and_update_with_content_view( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -646,7 +664,7 @@ def test_positive_create_and_update_with_content_view( @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_with_host_parameters(module_org, module_location): +def test_positive_end_to_end_with_host_parameters(module_org, module_location, module_target_sat): """Create a host with a host parameters specified then remove and update with the newly specified parameters @@ -658,7 +676,7 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): :CaseImportance: Critical """ parameters = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, host_parameters_attributes=parameters, @@ -683,7 +701,7 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): @pytest.mark.tier2 @pytest.mark.e2e def test_positive_end_to_end_with_image( - module_org, module_location, module_cr_libvirt, module_libvirt_image + module_org, module_location, module_cr_libvirt, module_libvirt_image, module_target_sat ): """Create a host with an image specified then remove it and update the host with the same image afterwards @@ -695,7 +713,7 @@ def test_positive_end_to_end_with_image( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -715,7 +733,7 @@ def test_positive_end_to_end_with_image( @pytest.mark.tier1 @pytest.mark.parametrize('method', ['build', 'image']) def test_positive_create_with_provision_method( - method, module_org, module_location, module_cr_libvirt + method, module_org, module_location, module_cr_libvirt, module_target_sat ): """Create a host with provision method specified @@ -728,7 +746,7 @@ def test_positive_create_with_provision_method( :CaseImportance: Critical """ # Compute resource is required for 'image' method - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -738,7 +756,7 @@ def test_positive_create_with_provision_method( @pytest.mark.tier1 -def test_positive_delete(): +def test_positive_delete(target_sat): """Delete a host :id: ec725359-a75e-498c-9da8-f5abd2343dd3 @@ -747,14 +765,16 @@ def test_positive_delete(): :CaseImportance: Critical """ - host = entities.Host().create() + host = target_sat.api.Host().create() host.delete() with pytest.raises(HTTPError): host.read() @pytest.mark.tier2 -def test_positive_create_and_update_domain(module_org, module_location, module_domain): +def test_positive_create_and_update_domain( + module_org, module_location, module_domain, module_target_sat +): """Create and update a host with a domain :id: 8ca9f67c-4c11-40f9-b434-4f200bad000f @@ -763,12 +783,14 @@ def test_positive_create_and_update_domain(module_org, module_location, module_d :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, domain=module_domain ).create() assert host.domain.read().name == module_domain.name - new_domain = entities.Domain(organization=[module_org], location=[module_location]).create() + new_domain = module_target_sat.api.Domain( + organization=[module_org], location=[module_location] + ).create() host.domain = new_domain host = host.update(['domain']) assert host.domain.read().name == new_domain.name @@ -802,7 +824,7 @@ def test_positive_create_and_update_env( @pytest.mark.tier2 -def test_positive_create_and_update_arch(module_architecture): +def test_positive_create_and_update_arch(module_architecture, module_target_sat): """Create and update a host with an architecture :id: 5f190b14-e6db-46e1-8cd1-e94e048e6a77 @@ -811,17 +833,17 @@ def test_positive_create_and_update_arch(module_architecture): :CaseLevel: Integration """ - host = entities.Host(architecture=module_architecture).create() + host = module_target_sat.api.Host(architecture=module_architecture).create() assert host.architecture.read().name == module_architecture.name - new_arch = entities.Architecture(operatingsystem=[host.operatingsystem]).create() + new_arch = module_target_sat.api.Architecture(operatingsystem=[host.operatingsystem]).create() host.architecture = new_arch host = host.update(['architecture']) assert host.architecture.read().name == new_arch.name @pytest.mark.tier2 -def test_positive_create_and_update_os(module_os): +def test_positive_create_and_update_os(module_os, module_target_sat): """Create and update a host with an operating system :id: 46edced1-8909-4066-b196-b8e22512341f @@ -830,13 +852,13 @@ def test_positive_create_and_update_os(module_os): :CaseLevel: Integration """ - host = entities.Host(operatingsystem=module_os).create() + host = module_target_sat.api.Host(operatingsystem=module_os).create() assert host.operatingsystem.read().name == module_os.name - new_os = entities.OperatingSystem( + new_os = module_target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() - medium = entities.Media(id=host.medium.id).read() + medium = module_target_sat.api.Media(id=host.medium.id).read() medium.operatingsystem.append(new_os) medium.update(['operatingsystem']) host.operatingsystem = new_os @@ -845,7 +867,7 @@ def test_positive_create_and_update_os(module_os): @pytest.mark.tier2 -def test_positive_create_and_update_medium(module_org, module_location): +def test_positive_create_and_update_medium(module_org, module_location, module_target_sat): """Create and update a host with a medium :id: d81cb65c-48b3-4ce3-971e-51b9dd123697 @@ -854,11 +876,13 @@ def test_positive_create_and_update_medium(module_org, module_location): :CaseLevel: Integration """ - medium = entities.Media(organization=[module_org], location=[module_location]).create() - host = entities.Host(medium=medium).create() + medium = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() + host = module_target_sat.api.Host(medium=medium).create() assert host.medium.read().name == medium.name - new_medium = entities.Media( + new_medium = module_target_sat.api.Media( operatingsystem=[host.operatingsystem], location=[host.location], organization=[host.organization], @@ -907,7 +931,7 @@ def test_negative_update_mac(module_host): @pytest.mark.tier2 -def test_negative_update_arch(module_architecture): +def test_negative_update_arch(module_architecture, module_target_sat): """Attempt to update a host with an architecture, which does not belong to host's operating system @@ -917,7 +941,7 @@ def test_negative_update_arch(module_architecture): :CaseLevel: Integration """ - host = entities.Host().create() + host = module_target_sat.api.Host().create() host.architecture = module_architecture with pytest.raises(HTTPError): host = host.update(['architecture']) @@ -925,7 +949,7 @@ def test_negative_update_arch(module_architecture): @pytest.mark.tier2 -def test_negative_update_os(): +def test_negative_update_os(target_sat): """Attempt to update a host with an operating system, which is not associated with host's medium @@ -935,8 +959,8 @@ def test_negative_update_os(): :CaseLevel: Integration """ - host = entities.Host().create() - new_os = entities.OperatingSystem( + host = target_sat.api.Host().create() + new_os = target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() host.operatingsystem = new_os @@ -963,9 +987,9 @@ def test_positive_read_content_source_id( :CaseLevel: System """ - proxy = entities.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() + proxy = target_sat.api.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - host = entities.Host( + host = target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -1407,7 +1431,7 @@ class TestHostInterface: @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_create_end_to_end(self, module_host): + def test_positive_create_end_to_end(self, module_host, target_sat): """Create update and delete an interface with different names and minimal input parameters @@ -1418,7 +1442,7 @@ def test_positive_create_end_to_end(self, module_host): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_interfaces_list()) - interface = entities.Interface(host=module_host, name=name).create() + interface = target_sat.api.Interface(host=module_host, name=name).create() assert interface.name == name new_name = gen_choice(datafactory.valid_interfaces_list()) interface.name = new_name @@ -1429,7 +1453,7 @@ def test_positive_create_end_to_end(self, module_host): interface.read() @pytest.mark.tier1 - def test_negative_end_to_end(self, module_host): + def test_negative_end_to_end(self, module_host, target_sat): """Attempt to create and update an interface with different invalid entries as names (>255 chars, unsupported string types), at the end attempt to remove primary interface @@ -1442,9 +1466,9 @@ def test_negative_end_to_end(self, module_host): """ name = gen_choice(datafactory.invalid_interfaces_list()) with pytest.raises(HTTPError) as error: - entities.Interface(host=module_host, name=name).create() + target_sat.api.Interface(host=module_host, name=name).create() assert str(422) in str(error) - interface = entities.Interface(host=module_host).create() + interface = target_sat.api.Interface(host=module_host).create() interface.name = name with pytest.raises(HTTPError) as error: interface.update(['name']) @@ -1463,7 +1487,7 @@ def test_negative_end_to_end(self, module_host): @pytest.mark.upgrade @pytest.mark.tier1 - def test_positive_delete_and_check_host(self): + def test_positive_delete_and_check_host(self, target_sat): """Delete host's interface (not primary) and make sure the host was not accidentally removed altogether with the interface @@ -1476,8 +1500,8 @@ def test_positive_delete_and_check_host(self): :CaseImportance: Critical """ - host = entities.Host().create() - interface = entities.Interface(host=host, primary=False).create() + host = target_sat.api.Host().create() + interface = target_sat.api.Interface(host=host, primary=False).create() interface.delete() with pytest.raises(HTTPError): interface.read() @@ -1491,7 +1515,7 @@ class TestHostBulkAction: """Tests for host bulk actions.""" @pytest.mark.tier2 - def test_positive_bulk_destroy(self, module_org): + def test_positive_bulk_destroy(self, module_org, module_target_sat): """Destroy multiple hosts make sure that hosts were removed, or were not removed when host is excluded from the list. @@ -1506,10 +1530,10 @@ def test_positive_bulk_destroy(self, module_org): host_ids = [] for _ in range(3): name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name, organization=module_org).create() + host = module_target_sat.api.Host(name=name, organization=module_org).create() host_ids.append(host.id) - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={ 'organization_id': module_org.id, 'included': {'ids': host_ids}, @@ -1517,15 +1541,15 @@ def test_positive_bulk_destroy(self, module_org): } ) for host_id in host_ids[:-1]: - result = entities.Host(id=host_id).read() + result = module_target_sat.api.Host(id=host_id).read() assert result.id == host_id with pytest.raises(HTTPError): - entities.Host(id=host_ids[-1]).read() + module_target_sat.api.Host(id=host_ids[-1]).read() - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={'organization_id': module_org.id, 'included': {'ids': host_ids[:-1]}} ) for host_id in host_ids[:-1]: with pytest.raises(HTTPError): - entities.Host(id=host_id).read() + module_target_sat.api.Host(id=host_id).read() diff --git a/tests/foreman/api/test_hostcollection.py b/tests/foreman/api/test_hostcollection.py index 0b5c0147ead..8935779101c 100644 --- a/tests/foreman/api/test_hostcollection.py +++ b/tests/foreman/api/test_hostcollection.py @@ -19,7 +19,6 @@ from random import choice, randint from broker import Broker -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -32,15 +31,15 @@ @pytest.fixture(scope='module') -def fake_hosts(module_org): +def fake_hosts(module_org, module_target_sat): """Create content hosts that can be shared by tests.""" - hosts = [entities.Host(organization=module_org).create() for _ in range(2)] + hosts = [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] return hosts @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create host collections with different names. :id: 8f2b9223-f5be-4cb1-8316-01ea747cae14 @@ -52,12 +51,14 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(name=name, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + name=name, organization=module_org + ).create() assert host_collection.name == name @pytest.mark.tier1 -def test_positive_list(module_org): +def test_positive_list(module_org, module_target_sat): """Create new host collection and then retrieve list of all existing host collections @@ -72,13 +73,13 @@ def test_positive_list(module_org): :CaseImportance: Critical """ - entities.HostCollection(organization=module_org).create() - hc_list = entities.HostCollection().search() + module_target_sat.api.HostCollection(organization=module_org).create() + hc_list = module_target_sat.api.HostCollection().search() assert len(hc_list) >= 1 @pytest.mark.tier1 -def test_positive_list_for_organization(): +def test_positive_list_for_organization(target_sat): """Create host collection for specific organization. Retrieve list of host collections for that organization @@ -89,16 +90,16 @@ def test_positive_list_for_organization(): :CaseImportance: Critical """ - org = entities.Organization().create() - hc = entities.HostCollection(organization=org).create() - hc_list = entities.HostCollection(organization=org).search() + org = target_sat.api.Organization().create() + hc = target_sat.api.HostCollection(organization=org).create() + hc_list = target_sat.api.HostCollection(organization=org).search() assert len(hc_list) == 1 assert hc_list[0].id == hc.id @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, desc): +def test_positive_create_with_description(module_org, desc, module_target_sat): """Create host collections with different descriptions. :id: 9d13392f-8d9d-4ff1-8909-4233e4691055 @@ -110,12 +111,14 @@ def test_positive_create_with_description(module_org, desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(description=desc, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + description=desc, organization=module_org + ).create() assert host_collection.description == desc @pytest.mark.tier1 -def test_positive_create_with_limit(module_org): +def test_positive_create_with_limit(module_org, module_target_sat): """Create host collections with different limits. :id: 86d9387b-7036-4794-96fd-5a3472dd9160 @@ -127,13 +130,15 @@ def test_positive_create_with_limit(module_org): """ for _ in range(5): limit = randint(1, 30) - host_collection = entities.HostCollection(max_hosts=limit, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + max_hosts=limit, organization=module_org + ).create() assert host_collection.max_hosts == limit @pytest.mark.parametrize("unlimited", [False, True]) @pytest.mark.tier1 -def test_positive_create_with_unlimited_hosts(module_org, unlimited): +def test_positive_create_with_unlimited_hosts(module_org, unlimited, module_target_sat): """Create host collection with different values of 'unlimited hosts' parameter. @@ -146,7 +151,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=None if unlimited else 1, organization=module_org, unlimited_hosts=unlimited, @@ -155,7 +160,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): @pytest.mark.tier1 -def test_positive_create_with_host(module_org, fake_hosts): +def test_positive_create_with_host(module_org, fake_hosts, module_target_sat): """Create a host collection that contains a host. :id: 9dc0ad72-58c2-4079-b1ca-2c4373472f0f @@ -167,14 +172,14 @@ def test_positive_create_with_host(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() assert len(host_collection.host) == 1 @pytest.mark.tier1 -def test_positive_create_with_hosts(module_org, fake_hosts): +def test_positive_create_with_hosts(module_org, fake_hosts, module_target_sat): """Create a host collection that contains hosts. :id: bb8d2b42-9a8b-4c4f-ba0c-c56ae5a7eb1d @@ -186,12 +191,14 @@ def test_positive_create_with_hosts(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert len(host_collection.host) == len(fake_hosts) @pytest.mark.tier2 -def test_positive_add_host(module_org, fake_hosts): +def test_positive_add_host(module_org, fake_hosts, module_target_sat): """Add a host to host collection. :id: da8bc901-7ac8-4029-bb62-af21aa4d3a88 @@ -202,7 +209,7 @@ def test_positive_add_host(module_org, fake_hosts): :BZ:1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.host_ids = [fake_hosts[0].id] host_collection = host_collection.update(['host_ids']) assert len(host_collection.host) == 1 @@ -210,7 +217,7 @@ def test_positive_add_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_add_hosts(module_org, fake_hosts): +def test_positive_add_hosts(module_org, fake_hosts, module_target_sat): """Add hosts to host collection. :id: f76b4db1-ccd5-47ab-be15-8c7d91d03b22 @@ -221,7 +228,7 @@ def test_positive_add_hosts(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_ids = [str(host.id) for host in fake_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -229,7 +236,7 @@ def test_positive_add_hosts(module_org, fake_hosts): @pytest.mark.tier1 -def test_positive_read_host_ids(module_org, fake_hosts): +def test_positive_read_host_ids(module_org, fake_hosts, module_target_sat): """Read a host collection and look at the ``host_ids`` field. :id: 444a1528-64c8-41b6-ba2b-6c49799d5980 @@ -241,7 +248,9 @@ def test_positive_read_host_ids(module_org, fake_hosts): :BZ:1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert frozenset(host.id for host in host_collection.host) == frozenset( host.id for host in fake_hosts ) @@ -249,7 +258,7 @@ def test_positive_read_host_ids(module_org, fake_hosts): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Check if host collection name can be updated :id: b2dedb99-6dd7-41be-8aaa-74065c820ac6 @@ -260,14 +269,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.name = new_name assert host_collection.update().name == new_name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_description(module_org, new_desc): +def test_positive_update_description(module_org, new_desc, module_target_sat): """Check if host collection description can be updated :id: f8e9bd1c-1525-4b5f-a07c-eb6b6e7aa628 @@ -278,13 +287,13 @@ def test_positive_update_description(module_org, new_desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.description = new_desc assert host_collection.update().description == new_desc @pytest.mark.tier1 -def test_positive_update_limit(module_org): +def test_positive_update_limit(module_org, module_target_sat): """Check if host collection limit can be updated :id: 4eda7796-cd81-453b-9b72-4ef84b2c1d8c @@ -293,7 +302,7 @@ def test_positive_update_limit(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1, organization=module_org, unlimited_hosts=False ).create() for limit in (1, 3, 5, 10, 20): @@ -302,7 +311,7 @@ def test_positive_update_limit(module_org): @pytest.mark.tier1 -def test_positive_update_unlimited_hosts(module_org): +def test_positive_update_unlimited_hosts(module_org, module_target_sat): """Check if host collection 'unlimited hosts' parameter can be updated :id: 09a3973d-9832-4255-87bf-f9eaeab4aee8 @@ -313,7 +322,7 @@ def test_positive_update_unlimited_hosts(module_org): :CaseImportance: Critical """ random_unlimited = choice([True, False]) - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1 if not random_unlimited else None, organization=module_org, unlimited_hosts=random_unlimited, @@ -326,7 +335,7 @@ def test_positive_update_unlimited_hosts(module_org): @pytest.mark.tier1 -def test_positive_update_host(module_org, fake_hosts): +def test_positive_update_host(module_org, fake_hosts, module_target_sat): """Update host collection's host. :id: 23082854-abcf-4085-be9c-a5d155446acb @@ -335,7 +344,7 @@ def test_positive_update_host(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() host_collection.host_ids = [fake_hosts[1].id] @@ -345,7 +354,7 @@ def test_positive_update_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_update_hosts(module_org, fake_hosts): +def test_positive_update_hosts(module_org, fake_hosts, module_target_sat): """Update host collection's hosts. :id: 0433b37d-ae16-456f-a51d-c7b800334861 @@ -354,8 +363,10 @@ def test_positive_update_hosts(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() - new_hosts = [entities.Host(organization=module_org).create() for _ in range(2)] + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() + new_hosts = [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] host_ids = [str(host.id) for host in new_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -364,7 +375,7 @@ def test_positive_update_hosts(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Check if host collection can be deleted :id: 13a16cd2-16ce-4966-8c03-5d821edf963b @@ -373,7 +384,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.delete() with pytest.raises(HTTPError): host_collection.read() @@ -381,7 +392,7 @@ def test_positive_delete(module_org): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Try to create host collections with different invalid names :id: 38f67d04-a19d-4eab-a577-21b8d62c7389 @@ -393,7 +404,7 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostCollection(name=name, organization=module_org).create() + module_target_sat.api.HostCollection(name=name, organization=module_org).create() @pytest.mark.tier1 @@ -418,14 +429,14 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s """ # this command creates a host collection and "appends", makes available, to the AK module_ak_cv_lce.host_collection.append( - entities.HostCollection(organization=module_org).create() + target_sat.api.HostCollection(organization=module_org).create() ) # Move HC from Add tab to List tab on AK view module_ak_cv_lce = module_ak_cv_lce.update(['host_collection']) # Create a product so we have a subscription to use - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() prod_name = product.name - product_subscription = entities.Subscription(organization=module_org).search( + product_subscription = target_sat.api.Subscription(organization=module_org).search( query={'search': f'name={prod_name}'} )[0] # Create and register VMs as members of Host Collection @@ -438,7 +449,7 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s host_ids = [host.id for host in host_collection.host] # Add subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_add_subscriptions( + target_sat.api.Host().bulk_add_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -447,13 +458,13 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are there for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert ( prod_name in req['results'][0]['product_name'] ), 'Subscription not applied to HC members' # Remove the subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_remove_subscriptions( + target_sat.api.Host().bulk_remove_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -462,5 +473,5 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are gone for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert not req['results'], 'Subscription not removed from HC members' diff --git a/tests/foreman/api/test_hostgroup.py b/tests/foreman/api/test_hostgroup.py index 9e3c3aa0c21..784b404e5dd 100644 --- a/tests/foreman/api/test_hostgroup.py +++ b/tests/foreman/api/test_hostgroup.py @@ -19,7 +19,7 @@ from random import randint from fauxfactory import gen_string -from nailgun import client, entities, entity_fields +from nailgun import client, entity_fields import pytest from requests.exceptions import HTTPError @@ -32,8 +32,10 @@ @pytest.fixture -def hostgroup(module_org, module_location): - return entities.HostGroup(location=[module_location], organization=[module_org]).create() +def hostgroup(module_org, module_location, module_target_sat): + return module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() @pytest.fixture @@ -158,7 +160,7 @@ def test_inherit_puppetclass(self, session_puppet_enabled_sat): @pytest.mark.upgrade @pytest.mark.tier3 - def test_rebuild_config(self, module_org, module_location, hostgroup): + def test_rebuild_config(self, module_org, module_location, hostgroup, module_target_sat): """'Rebuild orchestration config' of an existing host group :id: 58bf7015-18fc-4d25-9b64-7f2dd6dde425 @@ -169,12 +171,12 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): :CaseLevel: System """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - content_view = entities.ContentView(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() content_view = content_view.read() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) - entities.Host( + module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -193,7 +195,7 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_hostgroups_list())) - def test_positive_create_with_name(self, name, module_org, module_location): + def test_positive_create_with_name(self, name, module_org, module_location, module_target_sat): """Create a hostgroup with different names :id: fd5d353c-fd0c-4752-8a83-8f399b4c3416 @@ -204,13 +206,13 @@ def test_positive_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() assert name == hostgroup.name @pytest.mark.tier1 - def test_positive_clone(self, hostgroup): + def test_positive_clone(self, hostgroup, target_sat): """Create a hostgroup by cloning an existing one :id: 44ac8b3b-9cb0-4a9e-ad9b-2c67b2411922 @@ -220,7 +222,7 @@ def test_positive_clone(self, hostgroup): :CaseImportance: Critical """ hostgroup_cloned_name = gen_string('alpha') - hostgroup_cloned = entities.HostGroup(id=hostgroup.id).clone( + hostgroup_cloned = target_sat.api.HostGroup(id=hostgroup.id).clone( data={'name': hostgroup_cloned_name} ) hostgroup_origin = hostgroup.read_json() @@ -402,20 +404,20 @@ def test_positive_create_with_realm(self, module_org, module_location, target_sa :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() assert hostgroup.realm.read().name == realm.name @pytest.mark.tier2 - def test_positive_create_with_locs(self, module_org): + def test_positive_create_with_locs(self, module_org, module_target_sat): """Create a hostgroup with multiple locations specified :id: 0c2ee2ff-9e7a-4931-8cea-f4eecbd8c4c0 @@ -427,12 +429,17 @@ def test_positive_create_with_locs(self, module_org): :CaseLevel: Integration """ - locs = [entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(location=locs, organization=[module_org]).create() + locs = [ + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) + ] + hostgroup = module_target_sat.api.HostGroup( + location=locs, organization=[module_org] + ).create() assert {loc.name for loc in locs} == {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_create_with_orgs(self): + def test_positive_create_with_orgs(self, target_sat): """Create a hostgroup with multiple organizations specified :id: 09642238-cf0d-469a-a0b5-c167b1b8edf5 @@ -444,8 +451,8 @@ def test_positive_create_with_orgs(self): :CaseLevel: Integration """ - orgs = [entities.Organization().create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(organization=orgs).create() + orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] + hostgroup = target_sat.api.HostGroup(organization=orgs).create() assert {org.name for org in orgs}, {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @@ -497,20 +504,20 @@ def test_positive_update_realm(self, module_org, module_location, target_sat): :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() - new_realm = entities.Realm( + new_realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() @@ -549,7 +556,7 @@ def test_positive_update_content_source(self, hostgroup, target_sat): :CaseLevel: Integration """ - new_content_source = entities.SmartProxy().search( + new_content_source = target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0] hostgroup.content_source = new_content_source @@ -557,7 +564,7 @@ def test_positive_update_content_source(self, hostgroup, target_sat): assert hostgroup.content_source.read().name == new_content_source.name @pytest.mark.tier2 - def test_positive_update_locs(self, module_org, hostgroup): + def test_positive_update_locs(self, module_org, hostgroup, module_target_sat): """Update a hostgroup with new multiple locations :id: b045f7e8-d7c0-428b-a29c-8d54e53742e2 @@ -569,14 +576,15 @@ def test_positive_update_locs(self, module_org, hostgroup): :CaseLevel: Integration """ new_locs = [ - entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5)) + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) ] hostgroup.location = new_locs hostgroup = hostgroup.update(['location']) assert {loc.name for loc in new_locs}, {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_update_orgs(self, hostgroup): + def test_positive_update_orgs(self, hostgroup, target_sat): """Update a hostgroup with new multiple organizations :id: 5f6bd4f9-4bd6-4d7e-9a91-de824299020e @@ -587,14 +595,14 @@ def test_positive_update_orgs(self, hostgroup): :CaseLevel: Integration """ - new_orgs = [entities.Organization().create() for _ in range(randint(3, 5))] + new_orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] hostgroup.organization = new_orgs hostgroup = hostgroup.update(['organization']) assert {org.name for org in new_orgs} == {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name, module_org, module_location): + def test_negative_create_with_name(self, name, module_org, module_location, module_target_sat): """Attempt to create a hostgroup with invalid names :id: 3f5aa17a-8db9-4fe9-b309-b8ec5e739da1 @@ -606,7 +614,7 @@ def test_negative_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostGroup( + module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() @@ -630,7 +638,7 @@ def test_negative_update_name(self, new_name, hostgroup): assert hostgroup.read().name == original_name @pytest.mark.tier2 - def test_positive_create_with_group_parameters(self, module_org): + def test_positive_create_with_group_parameters(self, module_org, module_target_sat): """Create a hostgroup with 'group parameters' specified :id: 0959e2a2-d635-482b-9b2e-d33990d6f0dc @@ -646,7 +654,7 @@ def test_positive_create_with_group_parameters(self, module_org): :BZ: 1710853 """ group_params = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( organization=[module_org], group_parameters_attributes=[group_params] ).create() assert group_params['name'] == hostgroup.group_parameters_attributes[0]['name'] diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index b5ed102ed9d..0b4446069bb 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from robottelo import constants @@ -206,7 +205,7 @@ def test_positive_auto_attach_with_http_proxy( @pytest.mark.e2e @pytest.mark.tier2 -def test_positive_assign_http_proxy_to_products(): +def test_positive_assign_http_proxy_to_products(target_sat): """Assign http_proxy to Products and check whether http-proxy is used during sync. @@ -219,15 +218,15 @@ def test_positive_assign_http_proxy_to_products(): :CaseImportance: Critical """ - org = entities.Organization().create() + org = target_sat.api.Organization().create() # create HTTP proxies - http_proxy_a = entities.HTTPProxy( + http_proxy_a = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.un_auth_proxy_url, organization=[org], ).create() - http_proxy_b = entities.HTTPProxy( + http_proxy_b = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.auth_proxy_url, username=settings.http_proxy.username, @@ -236,20 +235,20 @@ def test_positive_assign_http_proxy_to_products(): ).create() # Create products and repositories - product_a = entities.Product(organization=org).create() - product_b = entities.Product(organization=org).create() - repo_a1 = entities.Repository(product=product_a, http_proxy_policy='none').create() - repo_a2 = entities.Repository( + product_a = target_sat.api.Product(organization=org).create() + product_b = target_sat.api.Product(organization=org).create() + repo_a1 = target_sat.api.Repository(product=product_a, http_proxy_policy='none').create() + repo_a2 = target_sat.api.Repository( product=product_a, http_proxy_policy='use_selected_http_proxy', http_proxy_id=http_proxy_a.id, ).create() - repo_b1 = entities.Repository(product=product_b, http_proxy_policy='none').create() - repo_b2 = entities.Repository( + repo_b1 = target_sat.api.Repository(product=product_b, http_proxy_policy='none').create() + repo_b2 = target_sat.api.Repository( product=product_b, http_proxy_policy='global_default_http_proxy' ).create() # Add http_proxy to products - entities.ProductBulkAction().http_proxy( + target_sat.api.ProductBulkAction().http_proxy( data={ "ids": [product_a.id, product_b.id], "http_proxy_policy": "use_selected_http_proxy", diff --git a/tests/foreman/api/test_ldapauthsource.py b/tests/foreman/api/test_ldapauthsource.py index 10b9851b3d6..2139e7c947f 100644 --- a/tests/foreman/api/test_ldapauthsource.py +++ b/tests/foreman/api/test_ldapauthsource.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -27,7 +26,9 @@ @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.parametrize('auth_source_type', ['AD', 'IPA']) -def test_positive_endtoend(auth_source_type, module_org, module_location, ad_data, ipa_data): +def test_positive_endtoend( + auth_source_type, module_org, module_location, ad_data, ipa_data, module_target_sat +): """Create/update/delete LDAP authentication with AD using names of different types :id: e3607c97-7c48-4cf6-b119-2bfd895d9325 @@ -46,7 +47,7 @@ def test_positive_endtoend(auth_source_type, module_org, module_location, ad_dat auth_source_data = ipa_data auth_source_data['ldap_user_name'] = auth_source_data['ldap_user_cn'] auth_type_attr = LDAP_ATTR['login'] - authsource = entities.AuthSourceLDAP( + authsource = module_target_sat.api.AuthSourceLDAP( onthefly_register=True, account=auth_source_data['ldap_user_cn'], account_password=auth_source_data['ldap_user_passwd'], diff --git a/tests/foreman/api/test_lifecycleenvironment.py b/tests/foreman/api/test_lifecycleenvironment.py index a437d5b8a69..dfc3ed1ba27 100644 --- a/tests/foreman/api/test_lifecycleenvironment.py +++ b/tests/foreman/api/test_lifecycleenvironment.py @@ -21,7 +21,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -34,20 +33,20 @@ @pytest.fixture(scope='module') -def module_lce(module_org): - return entities.LifecycleEnvironment( +def module_lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment( organization=module_org, description=gen_string('alpha') ).create() @pytest.fixture -def lce(module_org): - return entities.LifecycleEnvironment(organization=module_org).create() +def lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment(organization=module_org).create() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create lifecycle environment with valid name only :id: ec1d985a-6a39-4de6-b635-c803ecedd832 @@ -58,12 +57,12 @@ def test_positive_create_with_name(name): :parametrized: yes """ - assert entities.LifecycleEnvironment(name=name).create().name == name + assert target_sat.api.LifecycleEnvironment(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create lifecycle environment with valid description :id: 0bc05510-afc7-4087-ab75-1065ab5ba1d3 @@ -75,11 +74,11 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - assert entities.LifecycleEnvironment(description=desc).create().description == desc + assert target_sat.api.LifecycleEnvironment(description=desc).create().description == desc @pytest.mark.tier1 -def test_positive_create_prior(module_org): +def test_positive_create_prior(module_org, module_target_sat): """Create a lifecycle environment with valid name with Library as prior @@ -90,13 +89,13 @@ def test_positive_create_prior(module_org): :CaseImportance: Critical """ - lc_env = entities.LifecycleEnvironment(organization=module_org).create() + lc_env = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() assert lc_env.prior.read().name == ENVIRONMENT @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier3 -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create lifecycle environment providing an invalid name :id: 7e8ea2e6-5927-4e86-8ea8-04c3feb524a6 @@ -108,12 +107,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.LifecycleEnvironment(name=name).create() + target_sat.api.LifecycleEnvironment(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_lce, new_name): +def test_positive_update_name(module_lce, new_name, module_target_sat): """Create lifecycle environment providing the initial name, then update its name to another valid name. @@ -125,13 +124,13 @@ def test_positive_update_name(module_lce, new_name): """ module_lce.name = new_name module_lce.update(['name']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_lce, new_desc): +def test_positive_update_description(module_lce, new_desc, module_target_sat): """Create lifecycle environment providing the initial description, then update its description to another one. @@ -147,7 +146,7 @@ def test_positive_update_description(module_lce, new_desc): """ module_lce.description = new_desc module_lce.update(['description']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_desc == updated.description @@ -175,7 +174,7 @@ def test_negative_update_name(module_lce, new_name): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete(lce, name): +def test_positive_delete(lce, name, target_sat): """Create lifecycle environment and then delete it. :id: cd5a97ca-c1e8-41c7-8d6b-f908916b24e1 @@ -188,12 +187,12 @@ def test_positive_delete(lce, name): """ lce.delete() with pytest.raises(HTTPError): - entities.LifecycleEnvironment(id=lce.id).read() + target_sat.api.LifecycleEnvironment(id=lce.id).read() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_search_in_org(name): +def test_positive_search_in_org(name, target_sat): """Search for a lifecycle environment and specify an org ID. :id: 110e4777-c374-4365-b676-b1db4552fe51 @@ -211,8 +210,8 @@ def test_positive_search_in_org(name): :parametrized: yes """ - new_org = entities.Organization().create() - lc_env = entities.LifecycleEnvironment(organization=new_org).create() + new_org = target_sat.api.Organization().create() + lc_env = target_sat.api.LifecycleEnvironment(organization=new_org).create() lc_envs = lc_env.search({'organization'}) assert len(lc_envs) == 2 assert {lc_env_.name for lc_env_ in lc_envs}, {'Library', lc_env.name} diff --git a/tests/foreman/api/test_media.py b/tests/foreman/api/test_media.py index 89514610e72..8fb0ec5af66 100644 --- a/tests/foreman/api/test_media.py +++ b/tests/foreman/api/test_media.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_string, gen_url -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -35,8 +34,8 @@ class TestMedia: """Tests for ``api/v2/media``.""" @pytest.fixture(scope='class') - def class_media(self, module_org): - return entities.Media(organization=[module_org]).create() + def class_media(self, module_org, class_target_sat): + return class_target_sat.api.Media(organization=[module_org]).create() @pytest.mark.tier1 @pytest.mark.upgrade @@ -44,7 +43,7 @@ def class_media(self, module_org): ('name', 'new_name'), **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) ) - def test_positive_crud_with_name(self, module_org, name, new_name): + def test_positive_crud_with_name(self, module_org, name, new_name, module_target_sat): """Create, update, delete media with valid name only :id: b07a4549-7dd5-4b36-a1b4-9f8d48ddfcb5 @@ -55,9 +54,9 @@ def test_positive_crud_with_name(self, module_org, name, new_name): :CaseImportance: Critical """ - media = entities.Media(organization=[module_org], name=name).create() + media = module_target_sat.api.Media(organization=[module_org], name=name).create() assert media.name == name - media = entities.Media(id=media.id, name=new_name).update(['name']) + media = module_target_sat.api.Media(id=media.id, name=new_name).update(['name']) assert media.name == new_name media.delete() with pytest.raises(HTTPError): @@ -65,7 +64,7 @@ def test_positive_crud_with_name(self, module_org, name, new_name): @pytest.mark.tier1 @pytest.mark.parametrize('os_family', **parametrized(OPERATING_SYSTEMS)) - def test_positive_create_update_with_os_family(self, module_org, os_family): + def test_positive_create_update_with_os_family(self, module_org, os_family, module_target_sat): """Create and update media with every OS family possible :id: d02404f0-b2ad-412c-b1cd-0548254f7c88 @@ -75,14 +74,14 @@ def test_positive_create_update_with_os_family(self, module_org, os_family): :expectedresults: Media entity is created and has proper OS family assigned """ - media = entities.Media(organization=[module_org], os_family=os_family).create() + media = module_target_sat.api.Media(organization=[module_org], os_family=os_family).create() assert media.os_family == os_family new_os_family = new_os_family = random.choice(OPERATING_SYSTEMS) media.os_family = new_os_family assert media.update(['os_family']).os_family == new_os_family @pytest.mark.tier2 - def test_positive_create_with_location(self, module_org, module_location): + def test_positive_create_with_location(self, module_org, module_location, module_target_sat): """Create media entity assigned to non-default location :id: 1c4fa736-c145-46ca-9feb-c4046fc778c6 @@ -91,11 +90,13 @@ def test_positive_create_with_location(self, module_org, module_location): :CaseLevel: Integration """ - media = entities.Media(organization=[module_org], location=[module_location]).create() + media = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() assert media.location[0].read().name == module_location.name @pytest.mark.tier2 - def test_positive_create_with_os(self, module_org): + def test_positive_create_with_os(self, module_org, module_target_sat): """Create media entity assigned to operation system entity :id: dec22198-ed07-480c-9306-fa5458baec0b @@ -104,12 +105,14 @@ def test_positive_create_with_os(self, module_org): :CaseLevel: Integration """ - os = entities.OperatingSystem().create() - media = entities.Media(organization=[module_org], operatingsystem=[os]).create() + os = module_target_sat.api.OperatingSystem().create() + media = module_target_sat.api.Media( + organization=[module_org], operatingsystem=[os] + ).create() assert os.read().medium[0].read().name == media.name @pytest.mark.tier2 - def test_positive_create_update_url(self, module_org): + def test_positive_create_update_url(self, module_org, module_target_sat): """Create media entity providing the initial url path, then update that url to another valid one. @@ -120,15 +123,15 @@ def test_positive_create_update_url(self, module_org): :CaseImportance: Medium """ url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(organization=[module_org], path_=url).create() + media = module_target_sat.api.Media(organization=[module_org], path_=url).create() assert media.path_ == url new_url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(id=media.id, path_=new_url).update(['path_']) + media = module_target_sat.api.Media(id=media.id, path_=new_url).update(['path_']) assert media.path_ == new_url @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Try to create media entity providing an invalid name :id: 0934f4dc-f674-40fe-a639-035761139c83 @@ -140,10 +143,10 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(name=name).create() + target_sat.api.Media(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_invalid_url(self): + def test_negative_create_with_invalid_url(self, target_sat): """Try to create media entity providing an invalid URL :id: ae00b6bb-37ed-459e-b9f7-acc92ed0b262 @@ -153,10 +156,10 @@ def test_negative_create_with_invalid_url(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(path_='NON_EXISTENT_URL').create() + target_sat.api.Media(path_='NON_EXISTENT_URL').create() @pytest.mark.tier1 - def test_negative_create_with_invalid_os_family(self): + def test_negative_create_with_invalid_os_family(self, target_sat): """Try to create media entity providing an invalid OS family :id: 368b7eac-8c52-4071-89c0-1946d7101291 @@ -166,11 +169,11 @@ def test_negative_create_with_invalid_os_family(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(os_family='NON_EXISTENT_OS').create() + target_sat.api.Media(os_family='NON_EXISTENT_OS').create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, module_org, class_media, new_name): + def test_negative_update_name(self, class_media, new_name, target_sat): """Create media entity providing the initial name, then try to update its name to invalid one. @@ -183,10 +186,10 @@ def test_negative_update_name(self, module_org, class_media, new_name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, name=new_name).update(['name']) + target_sat.api.Media(id=class_media.id, name=new_name).update(['name']) @pytest.mark.tier1 - def test_negative_update_url(self, module_org, class_media): + def test_negative_update_url(self, class_media, target_sat): """Try to update media with invalid url. :id: 6832f178-4adc-4bb1-957d-0d8d4fd8d9cd @@ -196,10 +199,10 @@ def test_negative_update_url(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) + target_sat.api.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) @pytest.mark.tier1 - def test_negative_update_os_family(self, module_org, class_media): + def test_negative_update_os_family(self, class_media, target_sat): """Try to update media with invalid operation system. :id: f4c5438d-5f98-40b1-9bc7-c0741e81303a @@ -209,4 +212,6 @@ def test_negative_update_os_family(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update(['os_family']) + target_sat.api.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update( + ['os_family'] + ) diff --git a/tests/foreman/api/test_multiple_paths.py b/tests/foreman/api/test_multiple_paths.py index 66b4bc7ea9a..4cacae5051e 100644 --- a/tests/foreman/api/test_multiple_paths.py +++ b/tests/foreman/api/test_multiple_paths.py @@ -411,7 +411,7 @@ def test_positive_entity_read(self, entity_cls): assert isinstance(entity_cls(id=entity_id).read(), entity_cls) @pytest.mark.tier1 - def test_positive_architecture_read(self): + def test_positive_architecture_read(self, target_sat): """Create an arch that points to an OS, and read the arch. :id: e4c7babe-11d8-4f85-8382-5267a49046e9 @@ -421,14 +421,14 @@ def test_positive_architecture_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - arch_id = entities.Architecture(operatingsystem=[os_id]).create_json()['id'] - architecture = entities.Architecture(id=arch_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + arch_id = target_sat.api.Architecture(operatingsystem=[os_id]).create_json()['id'] + architecture = target_sat.api.Architecture(id=arch_id).read() assert len(architecture.operatingsystem) == 1 assert architecture.operatingsystem[0].id == os_id @pytest.mark.tier1 - def test_positive_syncplan_read(self): + def test_positive_syncplan_read(self, target_sat): """Create a SyncPlan and read it back using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -439,14 +439,14 @@ def test_positive_syncplan_read(self): :CaseImportance: Critical """ - org_id = entities.Organization().create_json()['id'] - syncplan_id = entities.SyncPlan(organization=org_id).create_json()['id'] + org_id = target_sat.api.Organization().create_json()['id'] + syncplan_id = target_sat.api.SyncPlan(organization=org_id).create_json()['id'] assert isinstance( - entities.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan + target_sat.api.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan ) @pytest.mark.tier1 - def test_positive_osparameter_read(self): + def test_positive_osparameter_read(self, target_sat): """Create an OperatingSystemParameter and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -457,15 +457,15 @@ def test_positive_osparameter_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - osp_id = entities.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] + os_id = target_sat.api.OperatingSystem().create_json()['id'] + osp_id = target_sat.api.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] assert isinstance( - entities.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), - entities.OperatingSystemParameter, + target_sat.api.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), + target_sat.api.OperatingSystemParameter, ) @pytest.mark.tier1 - def test_positive_permission_read(self): + def test_positive_permission_read(self, target_sat): """Create an Permission entity and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -476,12 +476,12 @@ class and name and resource_type fields are populated :CaseImportance: Critical """ - perm = entities.Permission().search(query={'per_page': '1'})[0] + perm = target_sat.api.Permission().search(query={'per_page': '1'})[0] assert perm.name assert perm.resource_type @pytest.mark.tier1 - def test_positive_media_read(self): + def test_positive_media_read(self, target_sat): """Create a media pointing at an OS and read the media. :id: 67b656fe-9302-457a-b544-3addb11c85e0 @@ -490,8 +490,8 @@ def test_positive_media_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - media_id = entities.Media(operatingsystem=[os_id]).create_json()['id'] - media = entities.Media(id=media_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + media_id = target_sat.api.Media(operatingsystem=[os_id]).create_json()['id'] + media = target_sat.api.Media(id=media_id).read() assert len(media.operatingsystem) == 1 assert media.operatingsystem[0].id == os_id diff --git a/tests/foreman/api/test_organization.py b/tests/foreman/api/test_organization.py index 692504faf79..868ec7368b1 100644 --- a/tests/foreman/api/test_organization.py +++ b/tests/foreman/api/test_organization.py @@ -24,7 +24,7 @@ from random import randint from fauxfactory import gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -44,7 +44,7 @@ def valid_org_data_list(): Note: The maximum allowed length of org name is 242 only. This is an intended behavior (Also note that 255 is the standard across other - entities.) + entities) """ return dict( alpha=gen_string('alpha', randint(1, 242)), @@ -61,7 +61,7 @@ class TestOrganization: """Tests for the ``organizations`` path.""" @pytest.mark.tier1 - def test_positive_create(self): + def test_positive_create(self, target_sat): """Create an organization using a 'text/plain' content-type. :id: 6f67a3f0-0c1d-498c-9a35-28207b0faec2 @@ -70,7 +70,7 @@ def test_positive_create(self): :CaseImportance: Critical """ - organization = entities.Organization() + organization = target_sat.api.Organization() organization.create_missing() response = client.post( organization.path(), @@ -87,7 +87,7 @@ def test_positive_create(self): @pytest.mark.tier1 @pytest.mark.build_sanity @pytest.mark.parametrize('name', **parametrized(valid_org_data_list())) - def test_positive_create_with_name_and_description(self, name): + def test_positive_create_with_name_and_description(self, name, target_sat): """Create an organization and provide a name and description. :id: afeea84b-61ca-40bf-bb16-476432919115 @@ -99,7 +99,7 @@ def test_positive_create_with_name_and_description(self, name): :parametrized: yes """ - org = entities.Organization(name=name, description=name).create() + org = target_sat.api.Organization(name=name, description=name).create() assert org.name == name assert org.description == name @@ -110,7 +110,7 @@ def test_positive_create_with_name_and_description(self, name): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Create an org with an incorrect name. :id: 9c6a4b45-a98a-4d76-9865-92d992fa1a22 @@ -120,10 +120,10 @@ def test_negative_create_with_invalid_name(self, name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.Organization(name=name).create() + target_sat.api.Organization(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_same_name(self): + def test_negative_create_with_same_name(self, target_sat): """Create two organizations with identical names. :id: a0f5333c-cc83-403c-9bf7-08fb372909dc @@ -132,9 +132,9 @@ def test_negative_create_with_same_name(self): :CaseImportance: Critical """ - name = entities.Organization().create().name + name = target_sat.api.Organization().create().name with pytest.raises(HTTPError): - entities.Organization(name=name).create() + target_sat.api.Organization(name=name).create() @pytest.mark.tier1 def test_negative_check_org_endpoint(self, module_entitlement_manifest_org): @@ -155,7 +155,7 @@ def test_negative_check_org_endpoint(self, module_entitlement_manifest_org): assert 'BEGIN RSA PRIVATE KEY' not in orgstring @pytest.mark.tier1 - def test_positive_search(self): + def test_positive_search(self, target_sat): """Create an organization, then search for it by name. :id: f6f1d839-21f2-4676-8683-9f899cbdec4c @@ -164,14 +164,14 @@ def test_positive_search(self): :CaseImportance: High """ - org = entities.Organization().create() - orgs = entities.Organization().search(query={'search': f'name="{org.name}"'}) + org = target_sat.api.Organization().create() + orgs = target_sat.api.Organization().search(query={'search': f'name="{org.name}"'}) assert len(orgs) == 1 assert orgs[0].id == org.id assert orgs[0].name == org.name @pytest.mark.tier1 - def test_negative_create_with_wrong_path(self): + def test_negative_create_with_wrong_path(self, target_sat): """Attempt to create an organization using foreman API path (``api/v2/organizations``) @@ -184,7 +184,7 @@ def test_negative_create_with_wrong_path(self): :CaseImportance: Critical """ - org = entities.Organization() + org = target_sat.api.Organization() org._meta['api_path'] = 'api/v2/organizations' with pytest.raises(HTTPError) as err: org.create() @@ -192,7 +192,7 @@ def test_negative_create_with_wrong_path(self): assert 'Route overriden by Katello' in err.value.response.text @pytest.mark.tier2 - def test_default_org_id_check(self): + def test_default_org_id_check(self, target_sat): """test to check the default_organization id :id: df066396-a069-4e9e-b3c1-c6d34a755ec0 @@ -204,7 +204,7 @@ def test_default_org_id_check(self): :CaseImportance: Low """ default_org_id = ( - entities.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[0].id + target_sat.api.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[0].id ) assert default_org_id == 1 @@ -213,9 +213,9 @@ class TestOrganizationUpdate: """Tests for the ``organizations`` path.""" @pytest.fixture - def module_org(self): + def module_org(self, target_sat): """Create an organization.""" - return entities.Organization().create() + return target_sat.api.Organization().create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_org_data_list())) @@ -252,7 +252,7 @@ def test_positive_update_description(self, module_org, desc): assert module_org.description == desc @pytest.mark.tier2 - def test_positive_update_user(self, module_org): + def test_positive_update_user(self, module_org, target_sat): """Update an organization, associate user with it. :id: 2c0c0061-5b4e-4007-9f54-b61d6e65ef58 @@ -261,14 +261,14 @@ def test_positive_update_user(self, module_org): :CaseLevel: Integration """ - user = entities.User().create() + user = target_sat.api.User().create() module_org.user = [user] module_org = module_org.update(['user']) assert len(module_org.user) == 1 assert module_org.user[0].id == user.id @pytest.mark.tier2 - def test_positive_update_subnet(self, module_org): + def test_positive_update_subnet(self, module_org, target_sat): """Update an organization, associate subnet with it. :id: 3aa0b9cb-37f7-4e7e-a6ec-c1b407225e54 @@ -277,14 +277,14 @@ def test_positive_update_subnet(self, module_org): :CaseLevel: Integration """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() module_org.subnet = [subnet] module_org = module_org.update(['subnet']) assert len(module_org.subnet) == 1 assert module_org.subnet[0].id == subnet.id @pytest.mark.tier2 - def test_positive_add_and_remove_hostgroup(self): + def test_positive_add_and_remove_hostgroup(self, target_sat): """Add a hostgroup to an organization and then remove it :id: 7eb1aca7-fd7b-404f-ab18-21be5052a11f @@ -297,8 +297,8 @@ def test_positive_add_and_remove_hostgroup(self): :CaseImportance: Medium """ - org = entities.Organization().create() - hostgroup = entities.HostGroup().create() + org = target_sat.api.Organization().create() + hostgroup = target_sat.api.HostGroup().create() org.hostgroup = [hostgroup] org = org.update(['hostgroup']) assert len(org.hostgroup) == 1 @@ -348,7 +348,7 @@ def test_positive_add_and_remove_smart_proxy(self, target_sat): @pytest.mark.tier1 @pytest.mark.parametrize('update_field', ['name', 'label']) - def test_negative_update(self, module_org, update_field): + def test_negative_update(self, module_org, update_field, target_sat): """Update an organization's attributes with invalid values. :id: b7152d0b-5ab0-4d68-bfdf-f3eabcb5fbc6 @@ -367,4 +367,4 @@ def test_negative_update(self, module_org, update_field): update_field: gen_string(str_type='utf8', length=256 if update_field == 'name' else 10) } with pytest.raises(HTTPError): - entities.Organization(id=module_org.id, **update_dict).update([update_field]) + target_sat.api.Organization(id=module_org.id, **update_dict).update([update_field]) diff --git a/tests/foreman/api/test_oscap_tailoringfiles.py b/tests/foreman/api/test_oscap_tailoringfiles.py index d29eed1ca95..fb6b78750f0 100644 --- a/tests/foreman/api/test_oscap_tailoringfiles.py +++ b/tests/foreman/api/test_oscap_tailoringfiles.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.utils.datafactory import gen_string @@ -27,7 +26,9 @@ class TestTailoringFile: @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_crud_tailoringfile(self, default_org, default_location, tailoring_file_path): + def test_positive_crud_tailoringfile( + self, default_org, default_location, tailoring_file_path, target_sat + ): """Perform end to end testing for oscap tailoring files component :id: 2441988f-2054-49f7-885e-3675336f712f @@ -39,23 +40,23 @@ def test_positive_crud_tailoringfile(self, default_org, default_location, tailor name = gen_string('alpha') new_name = gen_string('alpha') original_filename = gen_string('alpha') - scap = entities.TailoringFile( + scap = target_sat.api.TailoringFile( name=name, scap_file=tailoring_file_path['local'], organization=[default_org], location=[default_location], ).create() - assert entities.TailoringFile().search(query={'search': f'name={name}'}) - result = entities.TailoringFile(id=scap.id).read() + assert target_sat.api.TailoringFile().search(query={'search': f'name={name}'}) + result = target_sat.api.TailoringFile(id=scap.id).read() assert result.name == name assert result.location[0].id == default_location.id assert result.organization[0].id == default_org.id - scap = entities.TailoringFile( + scap = target_sat.api.TailoringFile( id=scap.id, name=new_name, original_filename=f'{original_filename}' ).update() - result = entities.TailoringFile(id=scap.id).read() + result = target_sat.api.TailoringFile(id=scap.id).read() assert result.name == new_name assert result.original_filename == original_filename - assert entities.TailoringFile().search(query={'search': f'name={new_name}'}) - entities.TailoringFile(id=scap.id).delete() - assert not entities.TailoringFile().search(query={'search': f'name={new_name}'}) + assert target_sat.api.TailoringFile().search(query={'search': f'name={new_name}'}) + target_sat.api.TailoringFile(id=scap.id).delete() + assert not target_sat.api.TailoringFile().search(query={'search': f'name={new_name}'}) diff --git a/tests/foreman/api/test_oscappolicy.py b/tests/foreman/api/test_oscappolicy.py index 1efdd3e3779..c9500c465d1 100644 --- a/tests/foreman/api/test_oscappolicy.py +++ b/tests/foreman/api/test_oscappolicy.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest @@ -27,7 +26,7 @@ class TestOscapPolicy: @pytest.mark.tier1 @pytest.mark.e2e def test_positive_crud_scap_policy( - self, default_org, default_location, scap_content, tailoring_file + self, default_org, default_location, scap_content, tailoring_file, target_sat ): """Perform end to end testing for oscap policy component @@ -42,11 +41,11 @@ def test_positive_crud_scap_policy( name = gen_string('alpha') new_name = gen_string('alpha') description = gen_string('alpha') - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[default_location], organization=[default_org] ).create() # Create new oscap policy with assigned content and tailoring file - policy = entities.CompliancePolicies( + policy = target_sat.api.CompliancePolicies( name=name, deploy_by='ansible', description=description, @@ -60,7 +59,7 @@ def test_positive_crud_scap_policy( location=[default_location], organization=[default_org], ).create() - assert entities.CompliancePolicies().search(query={'search': f'name="{name}"'}) + assert target_sat.api.CompliancePolicies().search(query={'search': f'name="{name}"'}) # Check that created entity has expected values assert policy.deploy_by == 'ansible' assert policy.name == name @@ -74,9 +73,11 @@ def test_positive_crud_scap_policy( assert str(policy.organization[0].id) == str(default_org.id) assert str(policy.location[0].id) == str(default_location.id) # Update oscap policy with new name - policy = entities.CompliancePolicies(id=policy.id, name=new_name).update() + policy = target_sat.api.CompliancePolicies(id=policy.id, name=new_name).update() assert policy.name == new_name - assert not entities.CompliancePolicies().search(query={'search': f'name="{name}"'}) + assert not target_sat.api.CompliancePolicies().search(query={'search': f'name="{name}"'}) # Delete oscap policy entity - entities.CompliancePolicies(id=policy.id).delete() - assert not entities.CompliancePolicies().search(query={'search': f'name="{new_name}"'}) + target_sat.api.CompliancePolicies(id=policy.id).delete() + assert not target_sat.api.CompliancePolicies().search( + query={'search': f'name="{new_name}"'} + ) diff --git a/tests/foreman/api/test_permission.py b/tests/foreman/api/test_permission.py index 5778682965c..011db9a4030 100644 --- a/tests/foreman/api/test_permission.py +++ b/tests/foreman/api/test_permission.py @@ -72,7 +72,7 @@ def create_permissions(self, class_target_sat): cls.permission_names = list(chain.from_iterable(cls.permissions.values())) @pytest.mark.tier1 - def test_positive_search_by_name(self): + def test_positive_search_by_name(self, target_sat): """Search for a permission by name. :id: 1b6117f6-599d-4b2d-80a8-1e0764bdc04d @@ -84,7 +84,9 @@ def test_positive_search_by_name(self): """ failures = {} for permission_name in self.permission_names: - results = entities.Permission().search(query={'search': f'name="{permission_name}"'}) + results = target_sat.api.Permission().search( + query={'search': f'name="{permission_name}"'} + ) if len(results) != 1 or len(results) == 1 and results[0].name != permission_name: failures[permission_name] = { 'length': len(results), @@ -95,7 +97,7 @@ def test_positive_search_by_name(self): pytest.fail(json.dumps(failures, indent=True, sort_keys=True)) @pytest.mark.tier1 - def test_positive_search_by_resource_type(self): + def test_positive_search_by_resource_type(self, target_sat): """Search for permissions by resource type. :id: 29d9362b-1bf3-4722-b40f-a5e8b4d0d9ba @@ -109,7 +111,7 @@ def test_positive_search_by_resource_type(self): for resource_type in self.permission_resource_types: if resource_type is None: continue - perm_group = entities.Permission().search( + perm_group = target_sat.api.Permission().search( query={'search': f'resource_type="{resource_type}"'} ) permissions = {perm.name for perm in perm_group} @@ -128,7 +130,7 @@ def test_positive_search_by_resource_type(self): pytest.fail(json.dumps(failures, indent=True, sort_keys=True)) @pytest.mark.tier1 - def test_positive_search(self): + def test_positive_search(self, target_sat): """search with no parameters return all permissions :id: e58308df-19ec-415d-8fa1-63ebf3cd0ad6 @@ -137,7 +139,7 @@ def test_positive_search(self): :CaseImportance: Critical """ - permissions = entities.Permission().search(query={'per_page': '1000'}) + permissions = target_sat.api.Permission().search(query={'per_page': '1000'}) names = {perm.name for perm in permissions} resource_types = {perm.resource_type for perm in permissions} expected_names = set(self.permission_names) @@ -206,7 +208,7 @@ def create_user(self, target_sat, class_org, class_location): location=[class_location], ).create() - def give_user_permission(self, perm_name): + def give_user_permission(self, perm_name, target_sat): """Give ``self.user`` the ``perm_name`` permission. This method creates a role and filter to accomplish the above goal. @@ -222,10 +224,10 @@ def give_user_permission(self, perm_name): updating ``self.user``'s roles. :returns: Nothing. """ - role = entities.Role().create() - permissions = entities.Permission().search(query={'search': f'name="{perm_name}"'}) + role = target_sat.api.Role().create() + permissions = target_sat.api.Permission().search(query={'search': f'name="{perm_name}"'}) assert len(permissions) == 1 - entities.Filter(permission=permissions, role=role).create() + target_sat.api.Filter(permission=permissions, role=role).create() self.user.role += [role] self.user = self.user.update(['role']) @@ -347,7 +349,7 @@ def test_positive_check_delete(self, entity_cls, class_org, class_location): 'entity_cls', **parametrized([entities.Architecture, entities.Domain, entities.ActivationKey]), ) - def test_positive_check_update(self, entity_cls, class_org, class_location): + def test_positive_check_update(self, entity_cls, class_org, class_location, target_sat): """Check whether the "edit_*" role has an effect. :id: b5de2115-b031-413e-8e5b-eac8cb714174 @@ -377,7 +379,7 @@ def test_positive_check_update(self, entity_cls, class_org, class_location): self.give_user_permission(_permission_name(entity_cls, 'update')) # update() calls read() under the hood, which triggers # permission error - if entity_cls is entities.ActivationKey: + if entity_cls is target_sat.api.ActivationKey: entity_cls(self.cfg, id=new_entity.id, name=name, organization=class_org).update_json( ['name'] ) diff --git a/tests/foreman/api/test_product.py b/tests/foreman/api/test_product.py index 84b6d78f866..dfa9f59f705 100644 --- a/tests/foreman/api/test_product.py +++ b/tests/foreman/api/test_product.py @@ -20,7 +20,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -40,7 +39,7 @@ @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_name(request, name, module_org): +def test_positive_create_with_name(request, name, module_org, module_target_sat): """Create a product providing different valid names :id: 3d873b73-6919-4fda-84df-0e26bdf0c1dc @@ -51,12 +50,12 @@ def test_positive_create_with_name(request, name, module_org): :CaseImportance: Critical """ - product = entities.Product(name=name, organization=module_org).create() + product = module_target_sat.api.Product(name=name, organization=module_org).create() assert product.name == name @pytest.mark.tier1 -def test_positive_create_with_label(module_org): +def test_positive_create_with_label(module_org, module_target_sat): """Create a product providing label which is different from its name :id: 95cf8e05-fd09-422e-bf6f-8b1dde762976 @@ -66,14 +65,14 @@ def test_positive_create_with_label(module_org): :CaseImportance: Critical """ label = gen_string('alphanumeric') - product = entities.Product(label=label, organization=module_org).create() + product = module_target_sat.api.Product(label=label, organization=module_org).create() assert product.label == label assert product.name != label @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_create_with_description(description, module_org): +def test_positive_create_with_description(description, module_org, module_target_sat): """Create a product providing different descriptions :id: f3e2df77-6711-440b-800a-9cebbbec36c5 @@ -84,12 +83,14 @@ def test_positive_create_with_description(description, module_org): :CaseImportance: Critical """ - product = entities.Product(description=description, organization=module_org).create() + product = module_target_sat.api.Product( + description=description, organization=module_org + ).create() assert product.description == description @pytest.mark.tier2 -def test_positive_create_with_gpg(module_org): +def test_positive_create_with_gpg(module_org, module_target_sat): """Create a product and provide a GPG key. :id: 57331c1f-15dd-4c9f-b8fc-3010847b2975 @@ -98,17 +99,17 @@ def test_positive_create_with_gpg(module_org): :CaseLevel: Integration """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_FILE.read_text(), organization=module_org, ).create() - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = module_target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() assert product.gpg_key.id == gpg_key.id @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name, module_org): +def test_negative_create_with_name(name, module_org, module_target_sat): """Create a product providing invalid names only :id: 76531f53-09ff-4ee9-89b9-09a697526fb1 @@ -120,11 +121,11 @@ def test_negative_create_with_name(name, module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(module_org, module_target_sat): """Create a product providing a name of already existent entity :id: 039269c5-607a-4b70-91dd-b8fed8e50cc6 @@ -134,13 +135,13 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() with pytest.raises(HTTPError): - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_label(module_org): +def test_negative_create_with_label(module_org, module_target_sat): """Create a product providing invalid label :id: 30b1a737-07f1-4786-b68a-734e57c33a62 @@ -150,12 +151,12 @@ def test_negative_create_with_label(module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Product(label=gen_string('utf8'), organization=module_org).create() + module_target_sat.api.Product(label=gen_string('utf8'), organization=module_org).create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_update_name(name, module_org): +def test_positive_update_name(name, module_org, module_target_sat): """Update product name to another valid name. :id: 1a9f6e0d-43fb-42e2-9dbd-e880f03b0297 @@ -166,7 +167,7 @@ def test_positive_update_name(name, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.name = name product = product.update(['name']) assert product.name == name @@ -174,7 +175,7 @@ def test_positive_update_name(name, module_org): @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_update_description(description, module_org): +def test_positive_update_description(description, module_org, module_target_sat): """Update product description to another valid one. :id: c960c326-2e9f-4ee7-bdec-35a705305067 @@ -185,14 +186,14 @@ def test_positive_update_description(description, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.description = description product = product.update(['description']) assert product.description == description @pytest.mark.tier1 -def test_positive_update_name_to_original(module_org): +def test_positive_update_name_to_original(module_org, module_target_sat): """Rename Product back to original name :id: 3075f17f-4475-4b64-9fbd-1e41ced9142d @@ -201,7 +202,7 @@ def test_positive_update_name_to_original(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() old_name = product.name # Update product name @@ -218,7 +219,7 @@ def test_positive_update_name_to_original(module_org): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_update_gpg(module_org): +def test_positive_update_gpg(module_org, module_target_sat): """Create a product and update its GPGKey :id: 3b08f155-a0d6-4987-b281-dc02e8d5a03e @@ -228,14 +229,14 @@ def test_positive_update_gpg(module_org): :CaseLevel: Integration """ # Create a product and make it point to a GPG key. - gpg_key_1 = entities.GPGKey( + gpg_key_1 = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_FILE.read_text(), organization=module_org, ).create() - product = entities.Product(gpg_key=gpg_key_1, organization=module_org).create() + product = module_target_sat.api.Product(gpg_key=gpg_key_1, organization=module_org).create() # Update the product and make it point to a new GPG key. - gpg_key_2 = entities.GPGKey( + gpg_key_2 = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), organization=module_org, ).create() @@ -246,7 +247,7 @@ def test_positive_update_gpg(module_org): @pytest.mark.skip_if_open("BZ:1310422") @pytest.mark.tier2 -def test_positive_update_organization(module_org): +def test_positive_update_organization(module_org, module_target_sat): """Create a product and update its organization :id: b298957a-2cdb-4f17-a934-098612f3b659 @@ -257,9 +258,9 @@ def test_positive_update_organization(module_org): :BZ: 1310422 """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() # Update the product and make it point to a new organization. - new_org = entities.Organization().create() + new_org = module_target_sat.api.Organization().create() product.organization = new_org product = product.update() assert product.organization.id == new_org.id @@ -267,7 +268,7 @@ def test_positive_update_organization(module_org): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_update_name(name, module_org): +def test_negative_update_name(name, module_org, module_target_sat): """Attempt to update product name to invalid one :id: 3eb61fa8-3524-4872-8f1b-4e88004f66f5 @@ -278,13 +279,13 @@ def test_negative_update_name(name, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with pytest.raises(HTTPError): - entities.Product(id=product.id, name=name).update(['name']) + module_target_sat.api.Product(id=product.id, name=name).update(['name']) @pytest.mark.tier1 -def test_negative_update_label(module_org): +def test_negative_update_label(module_org, module_target_sat): """Attempt to update product label to another one. :id: 065cd673-8d10-46c7-800c-b731b06a5359 @@ -293,7 +294,7 @@ def test_negative_update_label(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.label = gen_string('alpha') with pytest.raises(HTTPError): product.update(['label']) @@ -301,7 +302,7 @@ def test_negative_update_label(module_org): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete(name, module_org): +def test_positive_delete(name, module_org, module_target_sat): """Create product and then delete it. :id: 30df95f5-0a4e-41ee-a99f-b418c5c5f2f3 @@ -312,7 +313,7 @@ def test_positive_delete(name, module_org): :CaseImportance: Critical """ - product = entities.Product(name=name, organization=module_org).create() + product = module_target_sat.api.Product(name=name, organization=module_org).create() product.delete() with pytest.raises(HTTPError): product.read() @@ -320,7 +321,7 @@ def test_positive_delete(name, module_org): @pytest.mark.tier1 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync(module_org): +def test_positive_sync(module_org, module_target_sat): """Sync product (repository within a product) :id: 860e00a1-c370-4bd0-8987-449338071d56 @@ -329,8 +330,8 @@ def test_positive_sync(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository( product=product, content_type='yum', url=settings.repos.yum_1.url ).create() assert repo.read().content_counts['rpm'] == 0 @@ -341,7 +342,7 @@ def test_positive_sync(module_org): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_several_repos(module_org): +def test_positive_sync_several_repos(module_org, module_target_sat): """Sync product (all repositories within a product) :id: 07918442-b72f-4db5-96b6-975564f3663a @@ -353,11 +354,11 @@ def test_positive_sync_several_repos(module_org): :BZ: 1389543 """ - product = entities.Product(organization=module_org).create() - rpm_repo = entities.Repository( + product = module_target_sat.api.Product(organization=module_org).create() + rpm_repo = module_target_sat.api.Repository( product=product, content_type='yum', url=settings.repos.yum_1.url ).create() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type=REPO_TYPE['docker'], docker_upstream_name=CONTAINER_UPSTREAM_NAME, product=product, @@ -372,7 +373,7 @@ def test_positive_sync_several_repos(module_org): @pytest.mark.tier2 -def test_positive_filter_product_list(module_entitlement_manifest_org): +def test_positive_filter_product_list(module_entitlement_manifest_org, module_target_sat): """Filter products based on param 'custom/redhat_only' :id: e61fb63a-4552-4915-b13d-23ab80138249 @@ -384,9 +385,9 @@ def test_positive_filter_product_list(module_entitlement_manifest_org): :BZ: 1667129 """ org = module_entitlement_manifest_org - product = entities.Product(organization=org).create() - custom_products = entities.Product(organization=org).search(query={'custom': True}) - rh_products = entities.Product(organization=org).search( + product = module_target_sat.api.Product(organization=org).create() + custom_products = module_target_sat.api.Product(organization=org).search(query={'custom': True}) + rh_products = module_target_sat.api.Product(organization=org).search( query={'redhat_only': True, 'per_page': 1000} ) diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index 5047a9a616c..7d6bf4d2e1d 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -18,7 +18,6 @@ """ from broker import Broker from fauxfactory import gen_string -from nailgun import entities import pytest from requests import HTTPError from wait_for import wait_for @@ -49,24 +48,24 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() - custom_repo = entities.Repository( - product=entities.Product(organization=org).create(), + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=org).create(), ).create() custom_repo.sync() - lce = entities.LifecycleEnvironment(organization=org).create() - cv = entities.ContentView( + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() + cv = module_target_sat.api.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=cv, max_hosts=100, organization=org, environment=lce, auto_attach=True ).create() - subscription = entities.Subscription(organization=org).search( + subscription = module_target_sat.api.Subscription(organization=org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) @@ -78,7 +77,7 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_CRUDL(name): +def test_positive_CRUDL(name, target_sat): """Create, Read, Update, Delete, List :id: a2a577db-144e-4761-a42e-e83885464786 @@ -101,27 +100,27 @@ def test_positive_CRUDL(name): """ # Create template1 = gen_string('alpha') - rt = entities.ReportTemplate(name=name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=name, template=template1).create() # List - res = entities.ReportTemplate().search(query={'search': f'name="{name}"'}) + res = target_sat.api.ReportTemplate().search(query={'search': f'name="{name}"'}) assert name in list(map(lambda x: x.name, res)) # Read - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert name == rt.name assert template1 == rt.template # Update template2 = gen_string('alpha') - entities.ReportTemplate(id=rt.id, template=template2).update(['template']) - rt = entities.ReportTemplate(id=rt.id).read() + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(['template']) + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert template2 == rt.template # Delete - entities.ReportTemplate(id=rt.id).delete() + target_sat.api.ReportTemplate(id=rt.id).delete() with pytest.raises(HTTPError): - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() @pytest.mark.tier1 -def test_positive_generate_report_nofilter(): +def test_positive_generate_report_nofilter(target_sat): """Generate Host - Statuses report :id: a4b687db-144e-4761-a42e-e93887464986 @@ -137,8 +136,10 @@ def test_positive_generate_report_nofilter(): :CaseImportance: Critical """ host_name = gen_string('alpha').lower() - entities.Host(name=host_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate() for column_name in [ 'Name', @@ -164,7 +165,7 @@ def test_positive_generate_report_nofilter(): @pytest.mark.tier2 -def test_positive_generate_report_filter(): +def test_positive_generate_report_filter(target_sat): """Generate Host - Statuses report :id: a4b677cb-144e-4761-a42e-e93887464986 @@ -181,9 +182,11 @@ def test_positive_generate_report_filter(): """ host1_name = gen_string('alpha').lower() host2_name = gen_string('alpha').lower() - entities.Host(name=host1_name).create() - entities.Host(name=host2_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host1_name).create() + target_sat.api.Host(name=host2_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate(data={"input_values": {"hosts": host2_name}}) for column_name in [ 'Name', @@ -210,7 +213,7 @@ def test_positive_generate_report_filter(): @pytest.mark.tier2 -def test_positive_report_add_userinput(): +def test_positive_report_add_userinput(target_sat): """Add user input to template, use it in template, generate template :id: a4a577db-144e-4761-a42e-e86887464986 @@ -230,21 +233,21 @@ def test_positive_report_add_userinput(): input_value = gen_string('alpha').lower() template_name = gen_string('alpha').lower() template = f'<%= "value=\\"" %><%= input(\'{input_name}\') %><%= "\\"" %>' - entities.Host(name=host_name).create() - rt = entities.ReportTemplate(name=template_name, template=template).create() - entities.TemplateInput( + target_sat.api.Host(name=host_name).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template).create() + target_sat.api.TemplateInput( name=input_name, input_type="user", template=rt.id, ).create() - ti = entities.TemplateInput(template=rt.id).search()[0].read() + ti = target_sat.api.TemplateInput(template=rt.id).search()[0].read() assert input_name == ti.name res = rt.generate(data={"input_values": {input_name: input_value}}) assert f'value="{input_value}"' in res @pytest.mark.tier2 -def test_positive_lock_clone_nodelete_unlock_report(): +def test_positive_lock_clone_nodelete_unlock_report(target_sat): """Lock report template. Check it can be cloned and can't be deleted or edited. Unlock. Check it can be deleted and edited. @@ -274,15 +277,15 @@ def test_positive_lock_clone_nodelete_unlock_report(): template_clone_name = gen_string('alpha').lower() template1 = gen_string('alpha') template2 = gen_string('alpha') - rt = entities.ReportTemplate(name=template_name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template1).create() # 2. Lock template - entities.ReportTemplate(id=rt.id, locked=True).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=True).update(["locked"]) rt = rt.read() assert rt.locked is True # 3. Clone template, check cloned data rt.clone(data={'name': template_clone_name}) cloned_rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': f'name="{template_clone_name}"'})[0] .read() ) @@ -294,24 +297,28 @@ def test_positive_lock_clone_nodelete_unlock_report(): rt.delete() # In BZ1680458, exception is thrown but template is deleted anyway assert ( - len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) != 0 + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + != 0 ) # 5. Try to edit template with pytest.raises(HTTPError): - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template1 == rt.template # 6. Unlock template - entities.ReportTemplate(id=rt.id, locked=False).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=False).update(["locked"]) rt = rt.read() assert rt.locked is False # 7. Edit template - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template2 == rt.template # 8. Delete template rt.delete() - assert len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) == 0 + assert ( + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + == 0 + ) @pytest.mark.tier2 @@ -593,7 +600,7 @@ def test_positive_generate_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) @@ -632,7 +639,7 @@ def test_positive_schedule_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 864468c8318..dd3cc00b06c 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -17,7 +17,6 @@ :Upstream: No """ from manifester import Manifester -from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout import pytest from requests.exceptions import HTTPError @@ -179,7 +178,7 @@ def test_positive_sync_kickstart_repo(module_entitlement_manifest_org, target_sa repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update(['download_policy']) diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index 56f77e55ba4..29cfbba2d13 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -23,7 +23,7 @@ from urllib.parse import urljoin, urlparse, urlunparse from fauxfactory import gen_string -from nailgun import client, entities +from nailgun import client from nailgun.entity_mixins import TaskFailedError, call_entity_method_with_timeout import pytest from requests.exceptions import HTTPError @@ -47,24 +47,24 @@ def repo_options(request, module_org, module_product): @pytest.fixture -def repo_options_custom_product(request, module_org): +def repo_options_custom_product(request, module_org, module_target_sat): """Return the options that were passed as indirect parameters.""" options = getattr(request, 'param', {}).copy() options['organization'] = module_org - options['product'] = entities.Product(organization=module_org).create() + options['product'] = module_target_sat.api.Product(organization=module_org).create() return options @pytest.fixture -def env(module_org): +def env(module_org, module_target_sat): """Create a new puppet environment.""" - return entities.Environment(organization=[module_org]).create() + return module_target_sat.api.Environment(organization=[module_org]).create() @pytest.fixture -def repo(repo_options): +def repo(repo_options, module_target_sat): """Create a new repository.""" - return entities.Repository(**repo_options).create() + return module_target_sat.api.Repository(**repo_options).create() class TestRepository: @@ -197,7 +197,7 @@ def test_positive_create_with_download_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_positive_create_with_default_download_policy(self, repo): + def test_positive_create_with_default_download_policy(self, repo, target_sat): """Verify if the default download policy is assigned when creating a YUM repo without `download_policy` field @@ -210,7 +210,7 @@ def test_positive_create_with_default_download_policy(self, repo): :CaseImportance: Critical """ - default_dl_policy = entities.Setting().search( + default_dl_policy = target_sat.api.Setting().search( query={'search': 'name=default_download_policy'} ) assert default_dl_policy @@ -310,7 +310,7 @@ def test_positive_create_unprotected(self, repo_options, repo): assert repo.unprotected == repo_options['unprotected'] @pytest.mark.tier2 - def test_positive_create_with_gpg(self, module_org, module_product): + def test_positive_create_with_gpg(self, module_org, module_product, module_target_sat): """Create a repository and provide a GPG key ID. :id: 023cf84b-74f3-4e63-a9d7-10afee6c1990 @@ -319,16 +319,16 @@ def test_positive_create_with_gpg(self, module_org, module_product): :CaseLevel: Integration """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key).create() # Verify that the given GPG key ID is used. assert repo.gpg_key.id == gpg_key.id @pytest.mark.tier2 - def test_positive_create_same_name_different_orgs(self, repo): + def test_positive_create_same_name_different_orgs(self, repo, target_sat): """Create two repos with the same name in two different organizations. :id: bd1bd7e3-e393-44c8-a6d0-42edade40f60 @@ -338,9 +338,9 @@ def test_positive_create_same_name_different_orgs(self, repo): :CaseLevel: Integration """ - org_2 = entities.Organization().create() - product_2 = entities.Product(organization=org_2).create() - repo_2 = entities.Repository(product=product_2, name=repo.name).create() + org_2 = target_sat.api.Organization().create() + product_2 = target_sat.api.Product(organization=org_2).create() + repo_2 = target_sat.api.Repository(product=product_2, name=repo.name).create() assert repo_2.name == repo.name @pytest.mark.tier1 @@ -349,7 +349,7 @@ def test_positive_create_same_name_different_orgs(self, repo): **datafactory.parametrized([{'name': name} for name in datafactory.invalid_values_list()]), indirect=True, ) - def test_negative_create_name(self, repo_options): + def test_negative_create_name(self, repo_options, target_sat): """Attempt to create repository with invalid names only. :id: 24947c92-3415-43df-add6-d6eb38afd8a3 @@ -361,7 +361,7 @@ def test_negative_create_name(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -371,7 +371,7 @@ def test_negative_create_name(self, repo_options): ), indirect=True, ) - def test_negative_create_with_same_name(self, repo_options, repo): + def test_negative_create_with_same_name(self, repo_options, repo, target_sat): """Attempt to create a repository providing a name of already existent entity @@ -384,10 +384,10 @@ def test_negative_create_with_same_name(self, repo_options, repo): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 - def test_negative_create_label(self, module_product): + def test_negative_create_label(self, module_product, module_target_sat): """Attempt to create repository with invalid label. :id: f646ae84-2660-41bd-9883-331285fa1c9a @@ -397,7 +397,9 @@ def test_negative_create_label(self, module_product): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(product=module_product, label=gen_string('utf8')).create() + module_target_sat.api.Repository( + product=module_product, label=gen_string('utf8') + ).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -405,7 +407,7 @@ def test_negative_create_label(self, module_product): **datafactory.parametrized([{'url': url} for url in datafactory.invalid_names_list()]), indirect=True, ) - def test_negative_create_url(self, repo_options): + def test_negative_create_url(self, repo_options, target_sat): """Attempt to create repository with invalid url. :id: 0bb9fc3f-d442-4437-b5d8-83024bc7ceab @@ -417,7 +419,7 @@ def test_negative_create_url(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.skipif( @@ -428,7 +430,7 @@ def test_negative_create_url(self, repo_options): **datafactory.parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}.com'}]), indirect=True, ) - def test_negative_create_with_url_with_special_characters(self, repo_options): + def test_negative_create_with_url_with_special_characters(self, repo_options, target_sat): """Verify that repository URL cannot contain unquoted special characters :id: 2ffaa412-e5e5-4bec-afaa-9ea54315df49 @@ -440,7 +442,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -450,7 +452,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): ), indirect=True, ) - def test_negative_create_with_invalid_download_policy(self, repo_options): + def test_negative_create_with_invalid_download_policy(self, repo_options, target_sat): """Verify that YUM repository cannot be created with invalid download policy @@ -464,13 +466,13 @@ def test_negative_create_with_invalid_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_negative_update_to_invalid_download_policy(self, repo): + def test_negative_update_to_invalid_download_policy(self, repo, target_sat): """Verify that YUM repository cannot be updated to invalid download policy @@ -499,7 +501,7 @@ def test_negative_update_to_invalid_download_policy(self, repo): ), indirect=True, ) - def test_negative_create_non_yum_with_download_policy(self, repo_options): + def test_negative_create_non_yum_with_download_policy(self, repo_options, target_sat): """Verify that non-YUM repositories cannot be created with download policy @@ -513,7 +515,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -523,7 +525,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): ), indirect=True, ) - def test_negative_create_checksum(self, repo_options): + def test_negative_create_checksum(self, repo_options, target_sat): """Attempt to create repository with invalid checksum type. :id: c49a3c49-110d-4b74-ae14-5c9494a4541c @@ -535,7 +537,7 @@ def test_negative_create_checksum(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -547,7 +549,7 @@ def test_negative_create_checksum(self, repo_options): ids=['sha1', 'sha256'], indirect=True, ) - def test_negative_create_checksum_with_on_demand_policy(self, repo_options): + def test_negative_create_checksum_with_on_demand_policy(self, repo_options, target_sat): """Attempt to create repository with checksum and on_demand policy. :id: de8b157c-ed62-454b-94eb-22659ce1158e @@ -559,7 +561,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -669,7 +671,7 @@ def test_positive_update_unprotected(self, repo): assert repo.unprotected is True @pytest.mark.tier2 - def test_positive_update_gpg(self, module_org, module_product): + def test_positive_update_gpg(self, module_org, module_product, module_target_sat): """Create a repository and update its GPGKey :id: 0e9319dc-c922-4ecf-9f83-d221cfdf54c2 @@ -679,14 +681,14 @@ def test_positive_update_gpg(self, module_org, module_product): :CaseLevel: Integration """ # Create a repo and make it point to a GPG key. - gpg_key_1 = entities.GPGKey( + gpg_key_1 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key_1).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key_1).create() # Update the repo and make it point to a new GPG key. - gpg_key_2 = entities.GPGKey( + gpg_key_2 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -712,7 +714,7 @@ def test_positive_update_contents(self, repo): @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_upload_delete_srpm(self, repo): + def test_positive_upload_delete_srpm(self, repo, target_sat): """Create a repository and upload, delete SRPM contents. :id: e091a725-048f-44ca-90cc-c016c450ced9 @@ -726,12 +728,12 @@ def test_positive_upload_delete_srpm(self, repo): :BZ: 1378442 """ # upload srpm - entities.ContentUpload(repository=repo).upload( + target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) assert repo.read().content_counts['srpm'] == 1 - srpm_detail = entities.Srpms().search(query={'repository_id': repo.id}) + srpm_detail = target_sat.api.Srpms().search(query={'repository_id': repo.id}) assert len(srpm_detail) == 1 # Delete srpm @@ -750,7 +752,7 @@ def test_positive_upload_delete_srpm(self, repo): ids=['yum_fake'], indirect=True, ) - def test_positive_create_delete_srpm_repo(self, repo): + def test_positive_create_delete_srpm_repo(self, repo, target_sat): """Create a repository, sync SRPM contents and remove repo :id: e091a725-042f-43ca-99cc-c017c450ced9 @@ -763,7 +765,7 @@ def test_positive_create_delete_srpm_repo(self, repo): """ repo.sync() assert repo.read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'repository_id': repo.id})) == 3 + assert len(target_sat.api.Srpms().search(query={'repository_id': repo.id})) == 3 repo.delete() with pytest.raises(HTTPError): repo.read() @@ -778,7 +780,7 @@ def test_positive_create_delete_srpm_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_remove_contents(self, repo): + def test_positive_remove_contents(self, repo, target_sat): """Synchronize a repository and remove rpm content. :id: f686b74b-7ee9-4806-b999-bc05ffe61a9d @@ -795,7 +797,7 @@ def test_positive_remove_contents(self, repo): repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 @@ -953,7 +955,7 @@ def test_negative_synchronize_auth_yum_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_resynchronize_rpm_repo(self, repo): + def test_positive_resynchronize_rpm_repo(self, repo, target_sat): """Check that repository content is resynced after packages were removed from repository @@ -971,7 +973,7 @@ def test_positive_resynchronize_rpm_repo(self, repo): repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 # Re-synchronize repository @@ -1173,7 +1175,7 @@ def test_positive_recreate_pulp_repositories(self, module_entitlement_manifest_o reposet=constants.REPOSET['rhst7'], releasever=None, ) - call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) + call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) with target_sat.session.shell() as sh: sh.send('foreman-rake console') time.sleep(30) # sleep to allow time for console to open @@ -1211,9 +1213,9 @@ def test_positive_mirroring_policy(self, target_sat): repo_url = settings.repos.yum_0.url packages_count = constants.FAKE_0_YUM_REPO_PACKAGES_COUNT - org = entities.Organization().create() - prod = entities.Product(organization=org).create() - repo = entities.Repository( + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository( download_policy='immediate', mirroring_policy='mirror_complete', product=prod, @@ -1224,7 +1226,7 @@ def test_positive_mirroring_policy(self, target_sat): assert repo.content_counts['rpm'] == packages_count # remove all packages from the repo and upload another one - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -1302,7 +1304,7 @@ class TestRepositorySync: """Tests for ``/katello/api/repositories/:id/sync``.""" @pytest.mark.tier2 - def test_positive_sync_repos_with_lots_files(self): + def test_positive_sync_repos_with_lots_files(self, target_sat): """Attempt to synchronize repository containing a lot of files inside rpms. @@ -1316,9 +1318,9 @@ def test_positive_sync_repos_with_lots_files(self): :expectedresults: repository was successfully synchronized """ - org = entities.Organization().create() - product = entities.Product(organization=org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_8.url).create() + org = target_sat.api.Organization().create() + product = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository(product=product, url=settings.repos.yum_8.url).create() response = repo.sync() assert response, f"Repository {repo} failed to sync." @@ -1341,7 +1343,7 @@ def test_positive_sync_rh(self, module_entitlement_manifest_org, target_sat): reposet=constants.REPOSET['rhst7'], releasever=None, ) - entities.Repository(id=repo_id).sync() + target_sat.api.Repository(id=repo_id).sync() @pytest.mark.tier2 @pytest.mark.skipif( @@ -1417,19 +1419,19 @@ def test_positive_bulk_cancel_sync(self, target_sat, module_entitlement_manifest releasever=repo['releasever'], ) repo_ids.append(repo_id) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update() sync_ids = [] for repo_id in repo_ids: - sync_task = entities.Repository(id=repo_id).sync(synchronous=False) + sync_task = target_sat.api.Repository(id=repo_id).sync(synchronous=False) sync_ids.append(sync_task['id']) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) # Give some time for sync cancels to calm down time.sleep(30) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) for sync_id in sync_ids: - sync_result = entities.ForemanTask(id=sync_id).poll(canceled=True) + sync_result = target_sat.api.ForemanTask(id=sync_id).poll(canceled=True) assert ( 'Task canceled' in sync_result['humanized']['errors'] or 'No content added' in sync_result['humanized']['output'] @@ -1541,11 +1543,11 @@ def test_positive_sync_kickstart_check_os( repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.sync() major, minor = constants.REPOS['kickstart'][distro]['version'].split('.') - os = entities.OperatingSystem().search( + os = target_sat.api.OperatingSystem().search( query={'search': f'name="RedHat" AND major="{major}" AND minor="{minor}"'} ) assert len(os) @@ -1666,7 +1668,7 @@ def test_positive_synchronize(self, repo): ), indirect=True, ) - def test_positive_cancel_docker_repo_sync(self, repo): + def test_positive_cancel_docker_repo_sync(self, repo, target_sat): """Cancel a large, syncing Docker-type repository :id: 86534979-be49-40ad-8290-05ac71c801b2 @@ -1689,8 +1691,8 @@ def test_positive_cancel_docker_repo_sync(self, repo): sync_task = repo.sync(synchronous=False) # Need to wait for sync to actually start up time.sleep(2) - entities.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) - sync_task = entities.ForemanTask(id=sync_task['id']).poll(canceled=True) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) + sync_task = target_sat.api.ForemanTask(id=sync_task['id']).poll(canceled=True) assert 'Task canceled' in sync_task['humanized']['errors'] assert 'No content added' in sync_task['humanized']['output'] @@ -1709,7 +1711,9 @@ def test_positive_cancel_docker_repo_sync(self, repo): ), indirect=True, ) - def test_positive_delete_product_with_synced_repo(self, repo_options_custom_product): + def test_positive_delete_product_with_synced_repo( + self, repo_options_custom_product, target_sat + ): """Create and sync a Docker-type repository, delete the product. :id: c3d33836-54df-484d-97e1-f9fc9e22d23c @@ -1724,7 +1728,7 @@ def test_positive_delete_product_with_synced_repo(self, repo_options_custom_prod :BZ: 1867287 """ - repo = entities.Repository(**repo_options_custom_product).create() + repo = target_sat.api.Repository(**repo_options_custom_product).create() repo.sync(timeout=600) assert repo.read().content_counts['docker_manifest'] >= 1 assert repo.product.delete() @@ -1779,7 +1783,7 @@ def test_positive_synchronize_private_registry(self, repo): :parametrized: yes - :expectedresults: A repository is created with a private Docker \ + :expectedresults: A repository is created with a private Docker repository and it is synchronized. :customerscenario: true @@ -1910,7 +1914,7 @@ def test_negative_synchronize_private_registry_no_passwd( match='422 Client Error: Unprocessable Entity for url: ' f'{target_sat.url}:443/katello/api/v2/repositories', ): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier2 @pytest.mark.upgrade @@ -2187,7 +2191,7 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, # releasever=None, # basearch=None, # ) -# call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) +# call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) class TestSRPMRepository: @@ -2196,7 +2200,9 @@ class TestSRPMRepository: @pytest.mark.skip_if_open("BZ:2016047") @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): + def test_positive_srpm_upload_publish_promote_cv( + self, module_org, env, repo, module_target_sat + ): """Upload SRPM to repository, add repository to content view and publish, promote content view @@ -2204,20 +2210,27 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): :expectedresults: srpms can be listed in organization, content view, Lifecycle env """ - entities.ContentUpload(repository=repo).upload( + module_target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = module_target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 1 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 1 + assert ( + len(module_target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 1 + ) assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) == 1 + len( + module_target_sat.api.Srpms().search( + query={'content_view_version_id': cv.version[0].id} + ) + ) + == 1 ) @pytest.mark.upgrade @@ -2231,7 +2244,7 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): **datafactory.parametrized({'fake_srpm': {'url': repo_constants.FAKE_YUM_SRPM_REPO}}), indirect=True, ) - def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): + def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo, target_sat): """Synchronize repository with SRPMs, add repository to content view and publish, promote content view @@ -2243,19 +2256,20 @@ def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): """ repo.sync() - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 3 + assert len(target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 3 assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) >= 3 + len(target_sat.api.Srpms().search(query={'content_view_version_id': cv.version[0].id})) + >= 3 ) cv.version[0].promote(data={'environment_ids': env.id, 'force': False}) - assert len(entities.Srpms().search(query={'environment_id': env.id})) == 3 + assert len(target_sat.api.Srpms().search(query={'environment_id': env.id})) == 3 class TestSRPMRepositoryIgnoreContent: @@ -2370,7 +2384,7 @@ class TestFileRepository: **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_upload_file_to_file_repo(self, repo): + def test_positive_upload_file_to_file_repo(self, repo, target_sat): """Check arbitrary file can be uploaded to File Repository :id: fdb46481-f0f4-45aa-b075-2a8f6725e51b @@ -2388,7 +2402,9 @@ def test_positive_upload_file_to_file_repo(self, repo): repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) assert repo.read().content_counts['file'] == 1 - filesearch = entities.File().search(query={"search": f"name={constants.RPM_TO_UPLOAD}"}) + filesearch = target_sat.api.File().search( + query={"search": f"name={constants.RPM_TO_UPLOAD}"} + ) assert constants.RPM_TO_UPLOAD == filesearch[0].name @pytest.mark.stubbed @@ -2419,7 +2435,7 @@ def test_positive_file_permissions(self): **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_remove_file(self, repo): + def test_positive_remove_file(self, repo, target_sat): """Check arbitrary file can be removed from File Repository :id: 65068b4c-9018-4baa-b87b-b6e9d7384a5d @@ -2440,7 +2456,7 @@ def test_positive_remove_file(self, repo): repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) assert repo.read().content_counts['file'] == 1 - file_detail = entities.File().search(query={'repository_id': repo.id}) + file_detail = target_sat.api.File().search(query={'repository_id': repo.id}) repo.remove_content(data={'ids': [file_detail[0].id], 'content_type': 'file'}) assert repo.read().content_counts['file'] == 0 @@ -2534,7 +2550,9 @@ class TestTokenAuthContainerRepository: """ @pytest.mark.tier2 - def test_positive_create_with_long_token(self, module_org, module_product, request): + def test_positive_create_with_long_token( + self, module_org, module_product, request, module_target_sat + ): """Create and sync Docker-type repo from the Red Hat Container registry Using token based auth, with very long tokens (>255 characters). @@ -2571,7 +2589,7 @@ def test_positive_create_with_long_token(self, module_org, module_product, reque if not len(repo_options['upstream_password']) > 255: pytest.skip('The "long_pass" registry does not meet length requirement') - repo = entities.Repository(**repo_options).create() + repo = module_target_sat.api.Repository(**repo_options).create() @request.addfinalizer def clean_repo(): @@ -2593,7 +2611,9 @@ def clean_repo(): @pytest.mark.tier2 @pytest.mark.parametrize('repo_key', container_repo_keys) - def test_positive_tag_whitelist(self, request, repo_key, module_org, module_product): + def test_positive_tag_whitelist( + self, request, repo_key, module_org, module_product, module_target_sat + ): """Create and sync Docker-type repos from multiple supported registries with a tag whitelist :id: 4f8ea85b-4c69-4da6-a8ef-bd467ee35147 @@ -2616,7 +2636,7 @@ def test_positive_tag_whitelist(self, request, repo_key, module_org, module_prod repo_options['organization'] = module_org repo_options['product'] = module_product - repo = entities.Repository(**repo_options).create() + repo = module_target_sat.api.Repository(**repo_options).create() @request.addfinalizer def clean_repo(): diff --git a/tests/foreman/api/test_repository_set.py b/tests/foreman/api/test_repository_set.py index 449bb1be33c..e4766585824 100644 --- a/tests/foreman/api/test_repository_set.py +++ b/tests/foreman/api/test_repository_set.py @@ -19,7 +19,6 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.constants import PRDS, REPOSET @@ -33,17 +32,17 @@ @pytest.fixture -def product(function_entitlement_manifest_org): +def product(function_entitlement_manifest_org, module_target_sat): """Find and return the product matching PRODUCT_NAME.""" - return entities.Product( + return module_target_sat.api.Product( name=PRODUCT_NAME, organization=function_entitlement_manifest_org ).search()[0] @pytest.fixture -def reposet(product): +def reposet(product, module_target_sat): """Find and return the repository set matching REPOSET_NAME and product.""" - return entities.RepositorySet(name=REPOSET_NAME, product=product).search()[0] + return module_target_sat.api.RepositorySet(name=REPOSET_NAME, product=product).search()[0] @pytest.fixture diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index 233e5ffea15..9ed32e118ec 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -20,7 +20,6 @@ :Upstream: No """ -from nailgun import entities from nailgun.config import ServerConfig import pytest from requests.exceptions import HTTPError @@ -41,7 +40,7 @@ class TestRole: ('name', 'new_name'), **parametrized(list(zip(generate_strings_list(), generate_strings_list()))), ) - def test_positive_crud(self, name, new_name): + def test_positive_crud(self, name, new_name, target_sat): """Create, update and delete role with name ``name_generator()``. :id: 02c7d04d-a52c-4bc2-9e17-9938ab2ee5b2 @@ -55,7 +54,7 @@ def test_positive_crud(self, name, new_name): :CaseImportance: Critical """ - role = entities.Role(name=name).create() + role = target_sat.api.Role(name=name).create() assert role.name == name role.name = new_name assert role.update(['name']).name == new_name @@ -67,7 +66,7 @@ def test_positive_crud(self, name, new_name): class TestCannedRole: """Implements Canned Roles tests from API""" - def create_org_admin_role(self, name=None, orgs=None, locs=None): + def create_org_admin_role(self, target_sat, name=None, orgs=None, locs=None): """Helper function to create org admin role for particular organizations and locations by cloning 'Organization admin' role. @@ -80,17 +79,19 @@ def create_org_admin_role(self, name=None, orgs=None, locs=None): data returned from 'clone' function """ name = gen_string('alpha') if not name else name - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': {'name': name, 'organization_ids': orgs or [], 'location_ids': locs or []} } ) if 'role' in org_admin: - return entities.Role(id=org_admin['role']['id']).read() - return entities.Role(id=org_admin['id']).read() + return target_sat.api.Role(id=org_admin['role']['id']).read() + return target_sat.api.Role(id=org_admin['id']).read() - def create_org_admin_user(self, role_taxos, user_taxos): + def create_org_admin_user(self, role_taxos, user_taxos, target_sat): """Helper function to create an Org Admin user by assigning org admin role and assign taxonomies to Role and User @@ -105,12 +106,12 @@ def create_org_admin_user(self, role_taxos, user_taxos): """ # Create Org Admin Role org_admin = self.create_org_admin_role( - orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] + target_sat, orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] ) # Create Org Admin User user_login = gen_string('alpha') user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_passwd, organization=[user_taxos['org'].id], @@ -120,7 +121,7 @@ def create_org_admin_user(self, role_taxos, user_taxos): user.passwd = user_passwd return user - def create_simple_user(self, filter_taxos, role=None): + def create_simple_user(self, target_sat, filter_taxos, role=None): """Creates simple user and assigns taxonomies :param dict filter_taxos: Filter taxonomiest specified as dictionary containing @@ -130,7 +131,7 @@ def create_simple_user(self, filter_taxos, role=None): passwd attr """ user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=gen_string('alpha'), password=user_passwd, organization=[filter_taxos['org'].id], @@ -140,14 +141,16 @@ def create_simple_user(self, filter_taxos, role=None): user.passwd = user_passwd return user - def create_domain(self, orgs, locs): + def create_domain(self, target_sat, orgs, locs): """Creates domain in given orgs and locs :param list orgs: List of Organization ids :param list locs: List of Location ids :return Domain: Returns the ```nailgun.entities.Domain``` object """ - return entities.Domain(name=gen_string('alpha'), organization=orgs, location=locs).create() + return target_sat.api.Domain( + name=gen_string('alpha'), organization=orgs, location=locs + ).create() def user_config(self, user, satellite): """Returns The ```nailgun.confin.ServerConfig``` for given user @@ -160,19 +163,19 @@ def user_config(self, user, satellite): ) @pytest.fixture - def role_taxonomies(self): + def role_taxonomies(self, target_sat): """Create role taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture - def filter_taxonomies(self): + def filter_taxonomies(self, target_sat): """Create filter taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture @@ -184,7 +187,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): sat_url=target_sat.url, ldap_user_name=ad_data['ldap_user_name'], ldap_user_passwd=ad_data['ldap_user_passwd'], - authsource=entities.AuthSourceLDAP( + authsource=target_sat.api.AuthSourceLDAP( onthefly_register=True, account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], @@ -210,7 +213,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): LDAPAuthSource.delete({'name': authsource_name}) @pytest.mark.tier1 - def test_positive_create_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_role_with_taxonomies(self, role_taxonomies, target_sat): """create role with taxonomies :id: fa449217-889c-429b-89b5-0b6c018ffd9e @@ -222,7 +225,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], @@ -232,7 +235,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): assert role_taxonomies['loc'].id == role.location[0].id @pytest.mark.tier1 - def test_positive_create_role_without_taxonomies(self): + def test_positive_create_role_without_taxonomies(self, target_sat): """Create role without taxonomies :id: fe65a691-1b04-4bfe-a24b-adb48feb31d1 @@ -244,13 +247,13 @@ def test_positive_create_role_without_taxonomies(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name, organization=[], location=[]).create() + role = target_sat.api.Role(name=role_name, organization=[], location=[]).create() assert role.name == role_name assert not role.organization assert not role.location @pytest.mark.tier1 - def test_positive_create_filter_without_override(self, role_taxonomies): + def test_positive_create_filter_without_override(self, role_taxonomies, target_sat): """Create filter in role w/o overriding it :id: 1aadb7ea-ff76-4171-850f-188ba6f87021 @@ -269,27 +272,27 @@ def test_positive_create_filter_without_override(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id assert role_taxonomies['org'].id == filtr.organization[0].id assert role_taxonomies['loc'].id == filtr.location[0].id assert not filtr.override @pytest.mark.tier1 - def test_positive_create_non_overridable_filter(self): + def test_positive_create_non_overridable_filter(self, target_sat): """Create non overridable filter in role :id: f891e2e1-76f8-4edf-8c96-b41d05483298 :steps: Create a filter to which taxonomies cannot be associated. - e.g Architecture filter + e.g. Architecture filter :expectedresults: @@ -299,21 +302,23 @@ def test_positive_create_non_overridable_filter(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) - filtr = entities.Filter(permission=arch_perm, role=role.id).create() + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) + filtr = target_sat.api.Filter(permission=arch_perm, role=role.id).create() assert role.id == filtr.role.id assert not filtr.override @pytest.mark.tier1 - def test_negative_override_non_overridable_filter(self, filter_taxonomies): + def test_negative_override_non_overridable_filter(self, filter_taxonomies, target_sat): """Override non overridable filter :id: 7793be96-e8eb-451b-a986-51a46a1ab4f9 :steps: Attempt to override a filter to which taxonomies cannot be - associated. e.g Architecture filter + associated. e.g. Architecture filter :expectedresults: Filter is not overrided as taxonomies cannot be applied to that filter @@ -321,11 +326,13 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) with pytest.raises(HTTPError): - entities.Filter( + target_sat.api.Filter( permission=arch_perm, role=[role.id], override=True, @@ -335,7 +342,9 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxonomies): + def test_positive_create_overridable_filter( + self, role_taxonomies, filter_taxonomies, target_sat + ): """Create overridable filter in role :id: c7ea9377-9b9e-495e-accd-3576166d504e @@ -355,14 +364,14 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -377,7 +386,7 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono assert role_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which applies to its non-overrided filters :id: 902dcb32-2126-4ff4-b733-3e86749ccd1e @@ -390,29 +399,29 @@ def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id role.organization = [filter_taxonomies['org']] role.location = [filter_taxonomies['loc']] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert filter_taxonomies['org'].id == role.organization[0].id assert filter_taxonomies['loc'].id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert filter_taxonomies['org'].id == filtr.organization[0].id assert filter_taxonomies['loc'].id == filtr.location[0].id @pytest.mark.tier1 - def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which doesnt applies to its overrided filters :id: 9f3bf95a-f71a-4063-b51c-12610bc655f2 @@ -426,14 +435,14 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -442,23 +451,23 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie ).create() assert role.id == filtr.role.id # Creating new Taxonomies - org_new = entities.Organization().create() - loc_new = entities.Location().create() + org_new = target_sat.api.Organization().create() + loc_new = target_sat.api.Location().create() # Updating Taxonomies role.organization = [org_new] role.location = [loc_new] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert org_new.id == role.organization[0].id assert loc_new.id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert org_new.id != filtr.organization[0].id assert loc_new.id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies): + def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies, target_sat): """Unsetting override flag resets filter taxonomies :id: eaa7b921-7c12-45c5-989b-d82aa2b6e3a6 @@ -477,14 +486,14 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -500,7 +509,7 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi assert filter_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_create_org_admin_from_clone(self): + def test_positive_create_org_admin_from_clone(self, target_sat): """Create Org Admin role which has access to most of the resources within organization @@ -515,14 +524,16 @@ def test_positive_create_org_admin_from_clone(self): :BZ: 1637436 """ - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) org_admin = self.create_org_admin_role() - default_filters = entities.Role(id=default_org_admin[0].id).read().filters - orgadmin_filters = entities.Role(id=org_admin.id).read().filters + default_filters = target_sat.api.Role(id=default_org_admin[0].id).read().filters + orgadmin_filters = target_sat.api.Role(id=org_admin.id).read().filters assert len(default_filters) == len(orgadmin_filters) @pytest.mark.tier1 - def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies, target_sat): """Taxonomies can be assigned to cloned role :id: 31079015-5ede-439a-a062-e20d1ffd66df @@ -542,7 +553,7 @@ def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): org_admin = self.create_org_admin_role( orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - org_admin = entities.Role(id=org_admin.id).read() + org_admin = target_sat.api.Role(id=org_admin.id).read() assert role_taxonomies['org'].id == org_admin.organization[0].id assert role_taxonomies['loc'].id == org_admin.location[0].id @@ -575,7 +586,7 @@ def test_negative_access_entities_from_org_admin( sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_user( @@ -606,10 +617,10 @@ def test_negative_access_entities_from_user( sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier2 - def test_positive_override_cloned_role_filter(self, role_taxonomies): + def test_positive_override_cloned_role_filter(self, role_taxonomies, target_sat): """Cloned role filter overrides :id: 8a32ed5f-b93f-4f31-aff4-16602fbe7fab @@ -625,27 +636,27 @@ def test_positive_override_cloned_role_filter(self, role_taxonomies): :CaseLevel: Integration """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() + role = target_sat.api.Role(name=role_name).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() cloned_role_name = gen_string('alpha') - cloned_role = entities.Role(id=role.id).clone(data={'name': cloned_role_name}) + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': cloned_role_name}) assert cloned_role_name == cloned_role['name'] - filter_cloned_id = entities.Role(id=cloned_role['id']).read().filters[0].id - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned_id = target_sat.api.Role(id=cloned_role['id']).read().filters[0].id + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() filter_cloned.override = True filter_cloned.organization = [role_taxonomies['org']] filter_cloned.location = [role_taxonomies['loc']] filter_cloned.update(['override', 'organization', 'location']) # Updated Filter - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() assert filter_cloned.override assert role_taxonomies['org'].id == filter_cloned.organization[0].id assert role_taxonomies['loc'].id == filter_cloned.location[0].id @pytest.mark.tier2 def test_positive_emptiness_of_filter_taxonomies_on_role_clone( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """Taxonomies of filters in cloned role are set to None for filters that are overridden in parent role @@ -667,29 +678,29 @@ def test_positive_emptiness_of_filter_taxonomies_on_role_clone( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - filter_cloned = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + filter_cloned = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not filter_cloned.organization assert not filter_cloned.location assert filter_cloned.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_with_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Unlimited and Override flag sets on filter for filter that is overridden in parent role @@ -710,34 +721,34 @@ def test_positive_clone_role_having_overridden_filter_with_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone( + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( - self, role_taxonomies + self, role_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is not overridden in parent @@ -758,27 +769,29 @@ def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_taxonomies): + def test_positive_clone_role_having_unlimited_filter_with_taxonomies( + self, role_taxonomies, target_sat + ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is unlimited in parent role @@ -797,28 +810,28 @@ def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_ :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_without_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): # noqa """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is overridden in parent role @@ -838,27 +851,29 @@ def test_positive_clone_role_having_overridden_filter_without_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_non_overided_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, only unlimited but not override flag sets on filter for filter that is overridden in parent role @@ -881,23 +896,25 @@ def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_t :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_unlimited_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is unlimited in parent role @@ -919,18 +936,18 @@ def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxo :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @@ -948,7 +965,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta 2. Assign an organization A and Location A to the Org Admin role 3. Create two users without assigning roles while creating them 4. Assign Organization A and Location A to both users - 5. Create an user group with above two users + 5. Create a user group with above two users 6. Assign Org Admin role to User Group :expectedresults: Both the user should have access to the resources of @@ -961,7 +978,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ) userone_login = gen_string('alpha') userone_pass = gen_string('alphanumeric') - user_one = entities.User( + user_one = target_sat.api.User( login=userone_login, password=userone_pass, organization=[role_taxonomies['org'].id], @@ -970,7 +987,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta assert userone_login == user_one.login usertwo_login = gen_string('alpha') usertwo_pass = gen_string('alphanumeric') - user_two = entities.User( + user_two = target_sat.api.User( login=usertwo_login, password=usertwo_pass, organization=[role_taxonomies['org'].id], @@ -978,17 +995,17 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ).create() assert usertwo_login == user_two.login ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name # Creating Subnets and Domains to verify if user really can access them - subnet = entities.Subnet( + subnet = target_sat.api.Subnet( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], ).create() - domain = entities.Domain( + domain = target_sat.api.Domain( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], @@ -998,13 +1015,13 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta auth=(login, password), url=target_sat.url, verify=settings.server.verify_ca ) try: - entities.Domain(sc).search( + target_sat.api.Domain(sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, } ) - entities.Subnet(sc).search( + target_sat.api.Subnet(sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1012,8 +1029,8 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ) except HTTPError as err: pytest.fail(str(err)) - assert domain.id in [dom.id for dom in entities.Domain(sc).search()] - assert subnet.id in [sub.id for sub in entities.Subnet(sc).search()] + assert domain.id in [dom.id for dom in target_sat.api.Domain(sc).search()] + assert subnet.id in [sub.id for sub in target_sat.api.Subnet(sc).search()] @pytest.mark.tier3 def test_positive_user_group_users_access_contradict_as_org_admins(self): @@ -1067,10 +1084,10 @@ def test_negative_assign_org_admin_to_user_group( org_admin = self.create_org_admin_role( orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - user_one = self.create_simple_user(filter_taxos=filter_taxonomies) - user_two = self.create_simple_user(filter_taxos=filter_taxonomies) + user_one = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) + user_two = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name @@ -1078,7 +1095,7 @@ def test_negative_assign_org_admin_to_user_group( for user in [user_one, user_two]: sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.Domain(sc, id=dom.id).read() + target_sat.api.Domain(sc, id=dom.id).read() @pytest.mark.tier2 def test_negative_assign_taxonomies_by_org_admin( @@ -1111,13 +1128,13 @@ def test_negative_assign_taxonomies_by_org_admin( ) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']] ).create() assert dom_name == dom.name user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1129,13 +1146,13 @@ def test_negative_assign_taxonomies_by_org_admin( auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) # Getting the domain from user1 - dom = entities.Domain(sc, id=dom.id).read() + dom = target_sat.api.Domain(sc, id=dom.id).read() dom.organization = [filter_taxonomies['org']] with pytest.raises(HTTPError): dom.update(['organization']) @pytest.mark.tier1 - def test_positive_remove_org_admin_role(self, role_taxonomies): + def test_positive_remove_org_admin_role(self, role_taxonomies, target_sat): """Super Admin user can remove Org Admin role :id: 03fac76c-22ac-43cf-9068-b96e255b3c3c @@ -1156,21 +1173,23 @@ def test_positive_remove_org_admin_role(self, role_taxonomies): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User(login=user_login, password=user_pass, role=[org_admin.id]).create() + user = target_sat.api.User( + login=user_login, password=user_pass, role=[org_admin.id] + ).create() assert user_login == user.login try: - entities.Role(id=org_admin.id).delete() + target_sat.api.Role(id=org_admin.id).delete() except HTTPError as err: pytest.fail(str(err)) # Getting updated user - user = entities.User(id=user.id).read() + user = target_sat.api.User(id=user.id).read() assert org_admin.id not in [role.id for role in user.role] @pytest.mark.tier2 def test_positive_taxonomies_control_to_superadmin_with_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin :id: 37db0b40-ed35-4e70-83e8-83cff27caae2 @@ -1179,9 +1198,9 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( 1. Create Org Admin role and assign organization A and Location A 2. Create User and assign above Org Admin role 3. Login with SuperAdmin who created the above Org Admin role and - access entities in Organization A and Location A + access target_sat.api in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin :CaseLevel: Integration @@ -1190,7 +1209,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( sc, name=dom_name, organization=[role_taxonomies['org']], @@ -1198,7 +1217,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( ).create() assert dom_name == dom.name try: - entities.Subnet().search( + target_sat.api.Subnet().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1211,7 +1230,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( def test_positive_taxonomies_control_to_superadmin_without_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin role/user :id: 446f66a5-16e0-4298-b326-262913502955 @@ -1224,7 +1243,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( 4. Login with SuperAdmin who created the above Org Admin role and access entities in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin :CaseLevel: Integration @@ -1233,22 +1252,22 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( sc, name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert dom_name == dom.name - user_role = entities.Role(id=user.role[0].id).read() - entities.Role(id=user_role.id).delete() - entities.User(id=user.id).delete() + user_role = target_sat.api.Role(id=user.role[0].id).read() + target_sat.api.Role(id=user_role.id).delete() + target_sat.api.User(id=user.id).delete() with pytest.raises(HTTPError): user_role.read() with pytest.raises(HTTPError): user.read() try: - entities.Domain().search( + target_sat.api.Domain().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1260,7 +1279,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( @pytest.mark.tier1 @pytest.mark.upgrade def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): - """Org Admin doesnt have permissions to create new roles + """Org Admin doesn't have permissions to create new roles :id: 806ecc16-0dc7-405b-90d3-0584eced27a3 @@ -1279,7 +1298,7 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1292,7 +1311,7 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): ) role_name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Role( + target_sat.api.Role( sc, name=role_name, organization=[role_taxonomies['org']], @@ -1316,9 +1335,9 @@ def test_negative_modify_roles_by_org_admin(self, role_taxonomies, target_sat): existing roles """ user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) - test_role = entities.Role().create() + test_role = target_sat.api.Role().create() sc = self.user_config(user, target_sat) - test_role = entities.Role(sc, id=test_role.id).read() + test_role = target_sat.api.Role(sc, id=test_role.id).read() test_role.organization = [role_taxonomies['org']] test_role.location = [role_taxonomies['loc']] with pytest.raises(HTTPError): @@ -1346,7 +1365,7 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1358,7 +1377,7 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) with pytest.raises(HTTPError): - entities.User(sc, id=1).read() + target_sat.api.User(sc, id=1).read() @pytest.mark.tier2 @pytest.mark.upgrade @@ -1393,7 +1412,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1406,7 +1425,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( sc_user, login=user_login, password=user_pass, @@ -1418,7 +1437,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): assert org_admin.id == user.role[0].id if not is_open('BZ:1825698'): name = gen_string('alphanumeric') - location = entities.Location(sc_user, name=name).create() + location = target_sat.api.Location(sc_user, name=name).create() assert location.name == name @pytest.mark.tier2 @@ -1446,7 +1465,7 @@ def test_positive_access_users_inside_org_admin_taxonomies(self, role_taxonomies test_user = self.create_simple_user(filter_taxos=role_taxonomies) sc = self.user_config(user, target_sat) try: - entities.User(sc, id=test_user.id).read() + target_sat.api.User(sc, id=test_user.id).read() except HTTPError as err: pytest.fail(str(err)) @@ -1473,7 +1492,7 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): """ user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, organization=[role_taxonomies['org']], @@ -1488,7 +1507,7 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) name = gen_string('alphanumeric') - location = entities.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() + location = target_sat.api.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() assert location.name == name @pytest.mark.tier2 @@ -1518,7 +1537,7 @@ def test_negative_access_users_outside_org_admin_taxonomies( test_user = self.create_simple_user(filter_taxos=filter_taxonomies) sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.User(sc, id=test_user.id).read() + target_sat.api.User(sc, id=test_user.id).read() @pytest.mark.tier1 def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_sat): @@ -1542,7 +1561,7 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1554,11 +1573,11 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) with pytest.raises(HTTPError): - entities.Organization(sc, name=gen_string('alpha')).create() + target_sat.api.Organization(sc, name=gen_string('alpha')).create() if not is_open("BZ:1825698"): try: loc_name = gen_string('alpha') - loc = entities.Location(sc, name=loc_name).create() + loc = target_sat.api.Location(sc, name=loc_name).create() except HTTPError as err: pytest.fail(str(err)) assert loc_name == loc.name @@ -1568,7 +1587,7 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s def test_positive_access_all_global_entities_by_org_admin( self, role_taxonomies, filter_taxonomies, target_sat ): - """Org Admin can access all global entities in any taxonomies + """Org Admin can access all global target_sat.api in any taxonomies regardless of its own assigned taxonomies :id: add5feb3-7a3f-45a1-a633-49f1141b029b @@ -1579,16 +1598,16 @@ def test_positive_access_all_global_entities_by_org_admin( 2. Create new user and assign Org A,B and Location A,B 3. Assign Org Admin role to User 4. Login with Org Admin user - 5. Attempt to create all the global entities in org B and Loc B - e.g Architectures, Operating System + 5. Attempt to create all the global target_sat.api in org B and Loc B + e.g. Architectures, Operating System :expectedresults: Org Admin should have access to all the global - entities in any taxonomies + target_sat.api in any taxonomies """ org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1601,22 +1620,24 @@ def test_positive_access_all_global_entities_by_org_admin( ) try: for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: entity(sc).search() except HTTPError as err: pytest.fail(str(err)) @pytest.mark.tier3 - def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, create_ldap): + def test_negative_access_entities_from_ldap_org_admin( + self, role_taxonomies, create_ldap, target_sat + ): """LDAP User can not access resources in taxonomies assigned to role if its own taxonomies are not same as its role @@ -1650,17 +1671,19 @@ def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, cre verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the domain resource created in org admin role with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_ldap_user( - self, role_taxonomies, create_ldap, module_location, module_org + self, role_taxonomies, create_ldap, module_location, module_org, target_sat ): """LDAP User can not access resources within its own taxonomies if assigned role does not have permissions for same taxonomies @@ -1693,16 +1716,20 @@ def test_negative_access_entities_from_ldap_user( verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 - def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, create_ldap): + def test_positive_assign_org_admin_to_ldap_user_group( + self, role_taxonomies, create_ldap, target_sat + ): """Users in LDAP usergroup can access to the resources in taxonomies if the taxonomies of Org Admin role are same @@ -1736,7 +1763,7 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre locs=[create_ldap['authsource'].location[0].id], ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1744,9 +1771,11 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to the usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1757,10 +1786,12 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre verify=settings.server.verify_ca, ) # Accessing the Domain resource - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 - def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_taxonomies): + def test_negative_assign_org_admin_to_ldap_user_group( + self, create_ldap, role_taxonomies, target_sat + ): """Users in LDAP usergroup can not have access to the resources in taxonomies if the taxonomies of Org Admin role is not same @@ -1792,7 +1823,7 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1800,9 +1831,11 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1814,7 +1847,7 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta ) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() class TestRoleSearchFilter: diff --git a/tests/foreman/api/test_settings.py b/tests/foreman/api/test_settings.py index b5d0542d35d..b4fe49c23c1 100644 --- a/tests/foreman/api/test_settings.py +++ b/tests/foreman/api/test_settings.py @@ -18,7 +18,6 @@ """ import random -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -188,7 +187,7 @@ def test_negative_discover_host_with_invalid_prefix(): @pytest.mark.tier2 @pytest.mark.parametrize('download_policy', ["immediate", "on_demand"]) @pytest.mark.parametrize('setting_update', ['default_download_policy'], indirect=True) -def test_positive_custom_repo_download_policy(setting_update, download_policy): +def test_positive_custom_repo_download_policy(setting_update, download_policy, target_sat): """Check the set custom repository download policy for newly created custom repository. :id: d5150cce-ba85-4ea0-a8d1-6a54d0d29571 @@ -209,11 +208,11 @@ def test_positive_custom_repo_download_policy(setting_update, download_policy): :CaseLevel: Acceptance """ - org = entities.Organization().create() - prod = entities.Product(organization=org).create() + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() setting_update.value = download_policy setting_update.update({'value'}) - repo = entities.Repository(product=prod, content_type='yum', organization=org).create() + repo = target_sat.api.Repository(product=prod, content_type='yum', organization=org).create() assert repo.download_policy == download_policy repo.delete() prod.delete() diff --git a/tests/foreman/api/test_subnet.py b/tests/foreman/api/test_subnet.py index 8c1fb2f7f8e..9ac9fca0979 100644 --- a/tests/foreman/api/test_subnet.py +++ b/tests/foreman/api/test_subnet.py @@ -23,7 +23,6 @@ """ import re -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -36,7 +35,7 @@ @pytest.mark.tier1 -def test_positive_create_with_parameter(): +def test_positive_create_with_parameter(target_sat): """Subnet can be created along with parameters :id: ec581cb5-8c48-4b9c-b536-302c0b7ec30f @@ -47,14 +46,14 @@ def test_positive_create_with_parameter(): :expectedresults: The Subnet is created with parameter """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() assert subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(generate_strings_list())) -def test_positive_add_parameter(name): +def test_positive_add_parameter(name, target_sat): """Parameters can be created in subnet :id: c1dae6f4-45b1-45db-8529-d7918e41a99b @@ -70,15 +69,15 @@ def test_positive_add_parameter(name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(subnet=subnet.id, name=name, value=value).create() + subnet_param = target_sat.api.Parameter(subnet=subnet.id, name=name, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 -def test_positive_add_parameter_with_values_and_separator(): +def test_positive_add_parameter_with_values_and_separator(target_sat): """Subnet parameters can be created with values separated by comma :id: b3de6f96-7c39-4c44-b91c-a6d141f5dd6a @@ -94,10 +93,10 @@ def test_positive_add_parameter_with_values_and_separator(): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() name = gen_string('alpha') values = ', '.join(generate_strings_list()) - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=values).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=values).create() assert subnet_param.name == name assert subnet_param.value == values @@ -106,7 +105,7 @@ def test_positive_add_parameter_with_values_and_separator(): @pytest.mark.parametrize( 'separator', **parametrized({'comma': ',', 'slash': '/', 'dash': '-', 'pipe': '|'}) ) -def test_positive_create_with_parameter_and_valid_separator(separator): +def test_positive_create_with_parameter_and_valid_separator(separator, target_sat): """Subnet parameters can be created with name with valid separators :id: d1e2d75a-a1e8-4767-93f1-0bb1b75e10a0 @@ -124,16 +123,16 @@ def test_positive_create_with_parameter_and_valid_separator(separator): :CaseImportance: Low """ name = f'{separator}'.join(generate_strings_list()) - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=value).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_create_with_parameter_and_invalid_separator(name): +def test_negative_create_with_parameter_and_invalid_separator(name, target_sat): """Subnet parameters can not be created with name with invalid separators @@ -155,13 +154,13 @@ def test_negative_create_with_parameter_and_invalid_separator(name): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() with pytest.raises(HTTPError): - entities.Parameter(name=name, subnet=subnet.id).create() + target_sat.api.Parameter(name=name, subnet=subnet.id).create() @pytest.mark.tier1 -def test_negative_create_with_duplicated_parameters(): +def test_negative_create_with_duplicated_parameters(target_sat): """Attempt to create multiple parameters with same key name for the same subnet @@ -180,10 +179,10 @@ def test_negative_create_with_duplicated_parameters(): :CaseImportance: Low """ - subnet = entities.Subnet().create() - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() with pytest.raises(HTTPError) as context: - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() assert re.search("Name has already been taken", context.value.response.text) @@ -244,7 +243,7 @@ def test_positive_subnet_parameters_override_from_host(): @pytest.mark.tier3 -def test_positive_subnet_parameters_override_impact_on_subnet(): +def test_positive_subnet_parameters_override_impact_on_subnet(target_sat): """Override subnet parameter from host impact on subnet parameter :id: 6fe963ed-93a3-496e-bfd9-599bf91a61f3 @@ -266,15 +265,15 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): # Create subnet with valid parameters parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], subnet_parameters_attributes=parameter ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] # Create host with above subnet - host = entities.Host(location=loc, organization=org, subnet=org_subnet).create() + host = target_sat.api.Host(location=loc, organization=org, subnet=org_subnet).create() assert host.subnet.read().name == org_subnet.name parameter_new_value = [ { @@ -293,7 +292,7 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): @pytest.mark.tier1 -def test_positive_update_parameter(): +def test_positive_update_parameter(target_sat): """Subnet parameter can be updated :id: 8c389c3f-60ef-4856-b8fc-c5b066c67a2f @@ -309,7 +308,7 @@ def test_positive_update_parameter(): :CaseImportance: Medium """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() update_parameter = [{'name': gen_string('utf8'), 'value': gen_string('utf8')}] subnet.subnet_parameters_attributes = update_parameter up_subnet = subnet.update(['subnet_parameters_attributes']) @@ -319,7 +318,7 @@ def test_positive_update_parameter(): @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_update_parameter(new_name): +def test_negative_update_parameter(new_name, target_sat): """Subnet parameter can not be updated with invalid names :id: fcdbad13-ad96-4152-8e20-e023d61a2853 @@ -339,8 +338,8 @@ def test_negative_update_parameter(new_name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter( + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter( name=gen_string('utf8'), subnet=subnet.id, value=gen_string('utf8') ).create() sub_param.name = new_name @@ -377,7 +376,7 @@ def test_positive_update_subnet_parameter_host_impact(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_subnet_parameter(): +def test_positive_delete_subnet_parameter(target_sat): """Subnet parameter can be deleted :id: 972b66ec-d506-4fcb-9786-c62f2f79ac1a @@ -389,8 +388,8 @@ def test_positive_delete_subnet_parameter(): :expectedresults: The parameter should be deleted from subnet """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter(subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter(subnet=subnet.id).create() sub_param.delete() with pytest.raises(HTTPError): sub_param.read() @@ -452,7 +451,7 @@ def test_positive_delete_subnet_overridden_parameter_host_impact(): @pytest.mark.tier1 -def test_positive_list_parameters(): +def test_positive_list_parameters(target_sat): """Satellite lists all the subnet parameters :id: ce86d531-bf6b-45a9-81e3-67e1b3398f76 @@ -467,9 +466,9 @@ def test_positive_list_parameters(): parameters """ parameter = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], ipam='DHCP', @@ -478,10 +477,10 @@ def test_positive_list_parameters(): ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter['value'] - sub_param = entities.Parameter( + sub_param = target_sat.api.Parameter( name=gen_string('alpha'), subnet=org_subnet.id, value=gen_string('alpha') ).create() - org_subnet = entities.Subnet(id=org_subnet.id).read() + org_subnet = target_sat.api.Subnet(id=org_subnet.id).read() params_list = { param['name']: param['value'] for param in org_subnet.subnet_parameters_attributes diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 377d43555dd..b27488cd5f7 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -21,7 +21,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities from nailgun.config import ServerConfig from nailgun.entity_mixins import TaskFailedError import pytest @@ -44,28 +43,30 @@ def rh_repo(module_sca_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @pytest.fixture(scope='module') -def custom_repo(rh_repo, module_sca_manifest_org): - custom_repo = entities.Repository( - product=entities.Product(organization=module_sca_manifest_org).create(), +def custom_repo(rh_repo, module_sca_manifest_org, module_target_sat): + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=module_sca_manifest_org).create(), ).create() custom_repo.sync() return custom_repo @pytest.fixture(scope='module') -def module_ak(module_sca_manifest_org, rh_repo, custom_repo): +def module_ak(module_sca_manifest_org, rh_repo, custom_repo, module_target_sat): """rh_repo and custom_repo are included here to ensure their execution before the AK""" - module_ak = entities.ActivationKey( + module_ak = module_target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment( + id=module_sca_manifest_org.library.id + ), auto_attach=True, ).create() return module_ak @@ -82,12 +83,12 @@ def test_positive_create(module_entitlement_manifest, module_target_sat): :CaseImportance: Critical """ - org = entities.Organization().create() + org = module_target_sat.api.Organization().create() module_target_sat.upload_manifest(org.id, module_entitlement_manifest.content) @pytest.mark.tier1 -def test_positive_refresh(function_entitlement_manifest_org, request): +def test_positive_refresh(function_entitlement_manifest_org, request, target_sat): """Upload a manifest and refresh it afterwards. :id: cd195db6-e81b-42cb-a28d-ec0eb8a53341 @@ -97,7 +98,7 @@ def test_positive_refresh(function_entitlement_manifest_org, request): :CaseImportance: Critical """ org = function_entitlement_manifest_org - sub = entities.Subscription(organization=org) + sub = target_sat.api.Subscription(organization=org) request.addfinalizer(lambda: sub.delete_manifest(data={'organization_id': org.id})) sub.refresh_manifest(data={'organization_id': org.id}) assert sub.search() @@ -120,9 +121,9 @@ def test_positive_create_after_refresh( :CaseImportance: Critical """ - org_sub = entities.Subscription(organization=function_entitlement_manifest_org) - new_org = entities.Organization().create() - new_org_sub = entities.Subscription(organization=new_org) + org_sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) + new_org = target_sat.api.Organization().create() + new_org_sub = target_sat.api.Subscription(organization=new_org) try: org_sub.refresh_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert org_sub.search() @@ -133,7 +134,7 @@ def test_positive_create_after_refresh( @pytest.mark.tier1 -def test_positive_delete(function_entitlement_manifest_org): +def test_positive_delete(function_entitlement_manifest_org, target_sat): """Delete an Uploaded manifest. :id: 4c21c7c9-2b26-4a65-a304-b978d5ba34fc @@ -142,7 +143,7 @@ def test_positive_delete(function_entitlement_manifest_org): :CaseImportance: Critical """ - sub = entities.Subscription(organization=function_entitlement_manifest_org) + sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) assert sub.search() sub.delete_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert len(sub.search()) == 0 @@ -157,12 +158,12 @@ def test_negative_upload(function_entitlement_manifest, target_sat): :expectedresults: The manifest is not uploaded to the second organization. """ - orgs = [entities.Organization().create() for _ in range(2)] + orgs = [target_sat.api.Organization().create() for _ in range(2)] with function_entitlement_manifest as manifest: target_sat.upload_manifest(orgs[0].id, manifest.content) with pytest.raises(TaskFailedError): target_sat.upload_manifest(orgs[1].id, manifest.content) - assert len(entities.Subscription(organization=orgs[1]).search()) == 0 + assert len(target_sat.api.Subscription(organization=orgs[1]).search()) == 0 @pytest.mark.tier2 @@ -208,11 +209,11 @@ def test_positive_delete_manifest_as_another_user( ) # use the first admin to upload a manifest with function_entitlement_manifest as manifest: - entities.Subscription(sc1, organization=function_org).upload( + target_sat.api.Subscription(sc1, organization=function_org).upload( data={'organization_id': function_org.id}, files={'content': manifest.content} ) # try to search and delete the manifest with another admin - entities.Subscription(sc2, organization=function_org).delete_manifest( + target_sat.api.Subscription(sc2, organization=function_org).delete_manifest( data={'organization_id': function_org.id} ) assert len(Subscription.list({'organization-id': function_org.id})) == 0 @@ -238,7 +239,7 @@ def test_positive_subscription_status_disabled( rhel_contenthost.install_katello_ca(target_sat) rhel_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel_contenthost.subscribed - host_content = entities.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content + host_content = target_sat.api.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content assert 'Simple Content Access' in str(host_content) @@ -266,9 +267,12 @@ def test_sca_end_to_end( rhel7_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel7_contenthost.subscribed # Check to see if Organization is in SCA Mode - assert entities.Organization(id=module_sca_manifest_org.id).read().simple_content_access is True + assert ( + target_sat.api.Organization(id=module_sca_manifest_org.id).read().simple_content_access + is True + ) # Verify that you cannot attach a subscription to an activation key in SCA Mode - subscription = entities.Subscription(organization=module_sca_manifest_org).search( + subscription = target_sat.api.Subscription(organization=module_sca_manifest_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] with pytest.raises(HTTPError) as ak_context: @@ -276,12 +280,12 @@ def test_sca_end_to_end( assert 'Simple Content Access' in ak_context.value.response.text # Verify that you cannot attach a subscription to an Host in SCA Mode with pytest.raises(HTTPError) as host_context: - entities.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( + target_sat.api.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( data={'subscriptions': [{'id': subscription.id, 'quantity': 1}]} ) assert 'Simple Content Access' in host_context.value.response.text # Create a content view with repos and check to see that the client has access - content_view = entities.ContentView(organization=module_sca_manifest_org).create() + content_view = target_sat.api.ContentView(organization=module_sca_manifest_org).create() content_view.repository = [rh_repo, custom_repo] content_view.update(['repository']) content_view.publish() @@ -327,34 +331,34 @@ def test_positive_candlepin_events_processed_by_stomp( :CaseImportance: High """ - repo = entities.Repository( - product=entities.Product(organization=function_org).create() + repo = target_sat.api.Repository( + product=target_sat.api.Product(organization=function_org).create() ).create() repo.sync() - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=function_org.default_content_view, max_hosts=100, organization=function_org, - environment=entities.LifecycleEnvironment(id=function_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=function_org.library.id), auto_attach=True, ).create() rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(function_org.name, ak.name) - host = entities.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) + host = target_sat.api.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) host_id = host[0].id - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 2 with function_entitlement_manifest as manifest: target_sat.upload_manifest(function_org.id, manifest.content) - subscription = entities.Subscription(organization=function_org).search( + subscription = target_sat.api.Subscription(organization=function_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] - entities.HostSubscription(host=host_id).add_subscriptions( + target_sat.api.HostSubscription(host=host_id).add_subscriptions( data={'subscriptions': [{'id': subscription.cp_id, 'quantity': 1}]} ) - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 0 - response = entities.Ping().search_json()['services']['candlepin_events'] + response = target_sat.api.Ping().search_json()['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message'] @@ -386,11 +390,11 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte :CaseImportance: High """ - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=module_sca_manifest_org.library.id), auto_attach=True, ).create() # registering the content host with no content enabled/synced in the org @@ -411,7 +415,7 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() # re-registering the host should test whether Candlepin gracefully handles # registration of a host with an expired SCA cert diff --git a/tests/foreman/api/test_syncplan.py b/tests/foreman/api/test_syncplan.py index 9653322bd55..566d06d692a 100644 --- a/tests/foreman/api/test_syncplan.py +++ b/tests/foreman/api/test_syncplan.py @@ -24,7 +24,7 @@ from time import sleep from fauxfactory import gen_choice, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -101,7 +101,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.tier1 -def test_positive_get_routes(): +def test_positive_get_routes(target_sat): """Issue an HTTP GET response to both available routes. :id: 9e40ea7f-71ea-4ced-94ba-cde03620c654 @@ -112,8 +112,8 @@ def test_positive_get_routes(): :CaseImportance: Critical """ - org = entities.Organization().create() - entities.SyncPlan(organization=org).create() + org = target_sat.api.Organization().create() + target_sat.api.SyncPlan(organization=org).create() response1 = client.get( f'{get_url()}/katello/api/v2/sync_plans', auth=get_credentials(), @@ -144,7 +144,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan = sync_plan.read() assert sync_plan.enabled == enabled @@ -152,7 +152,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a sync plan with a random name. :id: c1263134-0d7c-425a-82fd-df5274e1f9ba @@ -163,14 +163,16 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, name=name, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan( + enabled=False, name=name, organization=module_org + ).create() sync_plan = sync_plan.read() assert sync_plan.name == name @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, description): +def test_positive_create_with_description(module_org, description, module_target_sat): """Create a sync plan with a random description. :id: 3e5745e8-838d-44a5-ad61-7e56829ad47c @@ -182,7 +184,7 @@ def test_positive_create_with_description(module_org, description): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=description, organization=module_org ).create() sync_plan = sync_plan.read() @@ -191,7 +193,7 @@ def test_positive_create_with_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_create_with_interval(module_org, interval): +def test_positive_create_with_interval(module_org, interval, module_target_sat): """Create a sync plan with a random interval. :id: d160ed1c-b698-42dc-be0b-67ac693c7840 @@ -202,7 +204,7 @@ def test_positive_create_with_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -235,7 +237,7 @@ def test_positive_create_with_sync_date(module_org, sync_delta, target_sat): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Create a sync plan with an invalid name. :id: a3a0f844-2f81-4f87-9f68-c25506c29ce2 @@ -248,12 +250,12 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(name=name, organization=module_org).create() + module_target_sat.api.SyncPlan(name=name, organization=module_org).create() @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_interval(module_org, interval): +def test_negative_create_with_invalid_interval(module_org, interval, module_target_sat): """Create a sync plan with invalid interval specified. :id: f5844526-9f58-4be3-8a96-3849a465fc02 @@ -266,11 +268,11 @@ def test_negative_create_with_invalid_interval(module_org, interval): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(interval=interval, organization=module_org).create() + module_target_sat.api.SyncPlan(interval=interval, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_empty_interval(module_org): +def test_negative_create_with_empty_interval(module_org, module_target_sat): """Create a sync plan with no interval specified. :id: b4686463-69c8-4538-b040-6fb5246a7b00 @@ -280,7 +282,7 @@ def test_negative_create_with_empty_interval(module_org): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(organization=module_org) + sync_plan = module_target_sat.api.SyncPlan(organization=module_org) sync_plan.create_missing() del sync_plan.interval with pytest.raises(HTTPError): @@ -300,7 +302,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=not enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=not enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan.enabled = enabled sync_plan.update(['enabled']) @@ -310,7 +312,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, name): +def test_positive_update_name(module_org, name, module_target_sat): """Create a sync plan and update its name. :id: dbfadf4f-50af-4aa8-8d7d-43988dc4528f @@ -322,7 +324,7 @@ def test_positive_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name sync_plan.update(['name']) sync_plan = sync_plan.read() @@ -331,7 +333,7 @@ def test_positive_update_name(module_org, name): @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_org, description): +def test_positive_update_description(module_org, description, module_target_sat): """Create a sync plan and update its description. :id: 4769fe9c-9eec-40c8-b015-1e3d7e570bec @@ -341,7 +343,7 @@ def test_positive_update_description(module_org, description): :expectedresults: A sync plan is created and its description can be updated with the specified description. """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org ).create() sync_plan.description = description @@ -352,7 +354,7 @@ def test_positive_update_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval(module_org, interval): +def test_positive_update_interval(module_org, interval, module_target_sat): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 @@ -364,7 +366,7 @@ def test_positive_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -384,7 +386,7 @@ def test_positive_update_interval(module_org, interval): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval_custom_cron(module_org, interval): +def test_positive_update_interval_custom_cron(module_org, interval, module_target_sat): """Create a sync plan and update its interval to custom cron. :id: 26c58319-cae0-4b0c-b388-2a1fe3f22344 @@ -397,7 +399,7 @@ def test_positive_update_interval_custom_cron(module_org, interval): :CaseImportance: Critical """ if interval != SYNC_INTERVAL['custom']: - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, @@ -436,7 +438,7 @@ def test_positive_update_sync_date(module_org, sync_delta, target_sat): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, name): +def test_negative_update_name(module_org, name, module_target_sat): """Try to update a sync plan with an invalid name. :id: ae502053-9d3c-4cad-aee4-821f846ceae5 @@ -448,7 +450,7 @@ def test_negative_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name with pytest.raises(HTTPError): sync_plan.update(['name']) @@ -456,7 +458,7 @@ def test_negative_update_name(module_org, name): @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_interval(module_org, interval): +def test_negative_update_interval(module_org, interval, module_target_sat): """Try to update a sync plan with invalid interval. :id: 8c981174-6f55-49c0-8baa-40e5c3fc598c @@ -468,7 +470,7 @@ def test_negative_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.interval = interval with pytest.raises(HTTPError): sync_plan.update(['interval']) @@ -626,14 +628,14 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, :CaseLevel: System """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=2) validate_repo_content(repo, ['erratum', 'rpm', 'package_group'], after_sync=False) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=datetime.utcnow().replace(second=0) ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -660,10 +662,10 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='hourly', @@ -706,8 +708,8 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques :BZ: 1655595, 1695733 """ delay = 2 * 60 # delay for sync date in seconds - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=1) @@ -716,7 +718,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -758,9 +760,11 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque """ # Test with multiple products and multiple repos needs more delay. delay = 8 * 60 # delay for sync date in seconds - products = [entities.Product(organization=module_org).create() for _ in range(2)] + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] repos = [ - entities.Repository(product=product).create() for product in products for _ in range(2) + target_sat.api.Repository(product=product).create() + for product in products + for _ in range(2) ] # Verify products have not been synced yet logger.info( @@ -774,7 +778,7 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -831,9 +835,9 @@ def test_positive_synchronize_rh_product_past_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() - sync_plan = entities.SyncPlan( + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() + sync_plan = target_sat.SyncPlan( organization=org, enabled=True, interval='hourly', @@ -864,7 +868,7 @@ def test_positive_synchronize_rh_product_past_sync_date( # Add disassociate RH product from sync plan check for BZ#1879537 assert len(sync_plan.read().product) == 1 # Disable the reposet - reposet = entities.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] + reposet = target_sat.api.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] reposet.disable(data={'basearch': 'x86_64', 'releasever': None, 'product_id': product.id}) # Assert that the Sync Plan now has no product associated with it assert len(sync_plan.read().product) == 0 @@ -895,12 +899,12 @@ def test_positive_synchronize_rh_product_future_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=org, enabled=True, interval='hourly', sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -944,11 +948,11 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(days=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='daily', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -989,11 +993,11 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(weeks=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='weekly', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -1061,7 +1065,7 @@ def test_positive_delete_products(module_org, target_sat): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product(module_org): +def test_positive_delete_synced_product(module_org, module_target_sat): """Create a sync plan with one synced product and delete it. :id: 195d8fec-1fa0-42ab-84a5-32dd81a285ca @@ -1071,9 +1075,9 @@ def test_positive_delete_synced_product(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() sync_plan.delete() @@ -1083,7 +1087,7 @@ def test_positive_delete_synced_product(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product_custom_cron(module_org): +def test_positive_delete_synced_product_custom_cron(module_org, module_target_sat): """Create a sync plan with custom cron with one synced product and delete it. @@ -1094,13 +1098,13 @@ def test_positive_delete_synced_product_custom_cron(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( organization=module_org, interval='custom cron', cron_expression=gen_choice(valid_cron_expressions()), ).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() product = product.read() diff --git a/tests/foreman/api/test_templatesync.py b/tests/foreman/api/test_templatesync.py index 061b8db1dd1..8e38540d395 100644 --- a/tests/foreman/api/test_templatesync.py +++ b/tests/foreman/api/test_templatesync.py @@ -19,7 +19,6 @@ import time from fauxfactory import gen_string -from nailgun import entities import pytest import requests @@ -67,7 +66,9 @@ def setUpClass(self, module_target_sat): ) @pytest.mark.tier2 - def test_positive_import_filtered_templates_from_git(self, module_org, module_location): + def test_positive_import_filtered_templates_from_git( + self, module_org, module_location, module_target_sat + ): """Assure only templates with a given filter regex are pulled from git repo. @@ -91,7 +92,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo :CaseImportance: High """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -105,7 +106,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(True) assert imported_count == 8 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -114,7 +115,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptemplates) == 5 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -123,7 +124,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptables) == 1 - jtemplates = entities.JobTemplate().search( + jtemplates = module_target_sat.api.JobTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -132,7 +133,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(jtemplates) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -143,7 +144,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo assert len(rtemplates) == 1 @pytest.mark.tier2 - def test_import_filtered_templates_from_git_with_negate(self, module_org): + def test_import_filtered_templates_from_git_with_negate(self, module_org, module_target_sat): """Assure templates with a given filter regex are NOT pulled from git repo. @@ -162,7 +163,7 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -176,15 +177,15 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(False) assert not_imported_count == 9 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptemplates) == 6 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptables) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(rtemplates) == 1 @@ -267,7 +268,7 @@ def test_positive_import_and_associate( prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir # Associate Never - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -277,7 +278,7 @@ def test_positive_import_and_associate( } ) # - Template 1 imported in X and Y taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -288,7 +289,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 1 not imported in metadata taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -302,7 +303,7 @@ def test_positive_import_and_associate( f'cp {dir_path}/example_template.erb {dir_path}/another_template.erb && ' f'sed -ie "s/name: .*/name: another_template/" {dir_path}/another_template.erb' ) - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -312,7 +313,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -323,7 +324,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 should be imported in importing taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -334,7 +335,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # Associate Always - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -344,7 +345,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -355,7 +356,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -367,7 +368,7 @@ def test_positive_import_and_associate( assert len(ptemplate[0].read().organization) == 1 @pytest.mark.tier2 - def test_positive_import_from_subdirectory(self, module_org): + def test_positive_import_from_subdirectory(self, module_org, module_target_sat): """Assure templates are imported from specific repositories subdirectory :id: 8ea11a1a-165e-4834-9387-7accb4c94e77 @@ -384,7 +385,7 @@ def test_positive_import_from_subdirectory(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -423,7 +424,7 @@ def test_positive_export_filtered_templates_to_localdir( :CaseImportance: Low """ dir_name, dir_path = create_import_export_local_dir - exported_temps = entities.Template().exports( + exported_temps = target_sat.api.Template().exports( data={ 'repo': FOREMAN_TEMPLATE_ROOT_DIR, 'dirname': dir_name, @@ -459,7 +460,7 @@ def test_positive_export_filtered_templates_negate( """ # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -498,7 +499,7 @@ def test_positive_export_and_import_with_metadata( ex_template = 'example_template.erb' prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'location_ids': [module_location.id], @@ -508,7 +509,7 @@ def test_positive_export_and_import_with_metadata( ) export_file = f'{prefix.lower()}{ex_template}' # Export same template to local dir with refreshed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'refresh', 'repo': dir_path, @@ -522,7 +523,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 0 # Export same template to local dir with keeping metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'keep', 'repo': dir_path, @@ -536,7 +537,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 1 # Export same template to local dir with removed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'remove', 'repo': dir_path, @@ -553,7 +554,7 @@ def test_positive_export_and_import_with_metadata( # Take Templates out of Tech Preview Feature Tests @pytest.mark.tier3 @pytest.mark.parametrize('verbose', [True, False]) - def test_positive_import_json_output_verbose(self, module_org, verbose): + def test_positive_import_json_output_verbose(self, module_org, verbose, module_target_sat): """Assert all the required fields displayed in import output when verbose is True and False @@ -575,7 +576,7 @@ def test_positive_import_json_output_verbose(self, module_org, verbose): :CaseImportance: Low """ prefix = gen_string('alpha') - templates = entities.Template().imports( + templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -628,19 +629,19 @@ def test_positive_import_json_output_changed_key_true( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) target_sat.execute(f'echo " Updating Template data." >> {dir_path}/example_template.erb') - post_template = entities.Template().imports( + post_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(post_template['message']['templates'][0]['changed']) @pytest.mark.tier2 def test_positive_import_json_output_changed_key_false( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `changed` key returns `False` when template data gets updated @@ -663,11 +664,11 @@ def test_positive_import_json_output_changed_key_false( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) - post_template = entities.Template().imports( + post_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert not bool(post_template['message']['templates'][0]['changed']) @@ -697,7 +698,7 @@ def test_positive_import_json_output_name_key( target_sat.execute( f'sed -ie "s/name: .*/name: {template_name}/" {dir_path}/example_template.erb' ) - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert 'name' in template['message']['templates'][0].keys() @@ -705,7 +706,7 @@ def test_positive_import_json_output_name_key( @pytest.mark.tier2 def test_positive_import_json_output_imported_key( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `imported` key returns `True` on successful import @@ -725,13 +726,15 @@ def test_positive_import_json_output_imported_key( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(template['message']['templates'][0]['imported']) @pytest.mark.tier2 - def test_positive_import_json_output_file_key(self, create_import_export_local_dir, module_org): + def test_positive_import_json_output_file_key( + self, create_import_export_local_dir, module_org, module_target_sat + ): """Assert template imports output `file` key returns correct file name from where the template is imported @@ -750,7 +753,7 @@ def test_positive_import_json_output_file_key(self, create_import_export_local_d :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert 'example_template.erb' == template['message']['templates'][0]['file'] @@ -780,7 +783,7 @@ def test_positive_import_json_output_corrupted_metadata( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/<%#/$#$#@%^$^@@RT$$/" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -791,7 +794,7 @@ def test_positive_import_json_output_corrupted_metadata( @pytest.mark.skip_if_open('BZ:1787355') @pytest.mark.tier2 def test_positive_import_json_output_filtered_skip_message( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output returns template import skipped info for templates whose name doesnt match the filter @@ -812,7 +815,7 @@ def test_positive_import_json_output_filtered_skip_message( :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -850,7 +853,7 @@ def test_positive_import_json_output_no_name_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/name: .*/name: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -884,7 +887,7 @@ def test_positive_import_json_output_no_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "/model: .*/d" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -918,7 +921,7 @@ def test_positive_import_json_output_blank_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/model: .*/model: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -948,7 +951,7 @@ def test_positive_export_json_output( :CaseImportance: Low """ prefix = gen_string('alpha') - imported_templates = entities.Template().imports( + imported_templates = target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -963,7 +966,7 @@ def test_positive_export_json_output( assert imported_count == 17 # Total Count # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - exported_templates = entities.Template().exports( + exported_templates = target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': prefix} ) exported_count = [ @@ -1000,7 +1003,7 @@ def test_positive_import_log_to_production(self, module_org, target_sat): :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1038,7 +1041,7 @@ def test_positive_export_log_to_production( :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1047,7 +1050,7 @@ def test_positive_export_log_to_production( } ) _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': 'empty'} ) time.sleep(5) @@ -1076,7 +1079,7 @@ def test_positive_export_log_to_production( ids=['non_empty_repo', 'empty_repo'], ) def test_positive_export_all_templates_to_repo( - self, module_org, git_repository, git_branch, url + self, module_org, git_repository, git_branch, url, module_target_sat ): """Assure all templates are exported if no filter is specified. @@ -1094,7 +1097,7 @@ def test_positive_export_all_templates_to_repo( :CaseImportance: Low """ - output = entities.Template().exports( + output = module_target_sat.api.Template().exports( data={ 'repo': f'{url}/{git.username}/{git_repository["name"]}', 'branch': git_branch, @@ -1118,7 +1121,7 @@ def test_positive_export_all_templates_to_repo( assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_positive_import_all_templates_from_repo(self, module_org): + def test_positive_import_all_templates_from_repo(self, module_org, module_target_sat): """Assure all templates are imported if no filter is specified. :id: 95ac9543-d989-44f4-b4d9-18f20a0b58b9 @@ -1131,7 +1134,7 @@ def test_positive_import_all_templates_from_repo(self, module_org): :CaseImportance: Low """ - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1150,7 +1153,7 @@ def test_positive_import_all_templates_from_repo(self, module_org): assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_negative_import_locked_template(self, module_org): + def test_negative_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are not pulled from repository. :id: 88e21cad-448e-45e0-add2-94493a1319c5 @@ -1164,7 +1167,7 @@ def test_negative_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1176,7 +1179,7 @@ def test_negative_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # try to import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1193,13 +1196,13 @@ def test_negative_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template @pytest.mark.tier2 - def test_positive_import_locked_template(self, module_org): + def test_positive_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are pulled from repository while using force parameter. :id: 936c91cc-1947-45b0-8bf0-79ba4be87b97 @@ -1213,7 +1216,7 @@ def test_positive_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1225,7 +1228,7 @@ def test_positive_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # force import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1244,7 +1247,7 @@ def test_positive_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index 626604be526..29442aa3080 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -23,7 +23,6 @@ import json import re -from nailgun import entities from nailgun.config import ServerConfig import pytest from requests.exceptions import HTTPError @@ -45,9 +44,9 @@ @pytest.fixture(scope='module') -def create_user(): +def create_user(module_target_sat): """Create a user""" - return entities.User().create() + return module_target_sat.api.User().create() class TestUser: @@ -55,7 +54,7 @@ class TestUser: @pytest.mark.tier1 @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_with_username(self, username): + def test_positive_create_with_username(self, username, target_sat): """Create User for all variations of Username :id: a9827cda-7f6d-4785-86ff-3b6969c9c00a @@ -66,14 +65,14 @@ def test_positive_create_with_username(self, username): :CaseImportance: Critical """ - user = entities.User(login=username).create() + user = target_sat.api.User(login=username).create() assert user.login == username @pytest.mark.tier1 @pytest.mark.parametrize( 'firstname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_firstname(self, firstname): + def test_positive_create_with_firstname(self, firstname, target_sat): """Create User for all variations of First Name :id: 036bb958-227c-420c-8f2b-c607136f12e0 @@ -86,14 +85,14 @@ def test_positive_create_with_firstname(self, firstname): """ if len(str.encode(firstname)) > 50: firstname = firstname[:20] - user = entities.User(firstname=firstname).create() + user = target_sat.api.User(firstname=firstname).create() assert user.firstname == firstname @pytest.mark.tier1 @pytest.mark.parametrize( 'lastname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_lastname(self, lastname): + def test_positive_create_with_lastname(self, lastname, target_sat): """Create User for all variations of Last Name :id: 95d3b571-77e7-42a1-9c48-21f242e8cdc2 @@ -106,12 +105,12 @@ def test_positive_create_with_lastname(self, lastname): """ if len(str.encode(lastname)) > 50: lastname = lastname[:20] - user = entities.User(lastname=lastname).create() + user = target_sat.api.User(lastname=lastname).create() assert user.lastname == lastname @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_create_with_email(self, mail): + def test_positive_create_with_email(self, mail, target_sat): """Create User for all variations of Email :id: e68caf51-44ba-4d32-b79b-9ab9b67b9590 @@ -122,12 +121,12 @@ def test_positive_create_with_email(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() assert user.mail == mail @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description): + def test_positive_create_with_description(self, description, target_sat): """Create User for all variations of Description :id: 1463d71c-b77d-4223-84fa-8370f77b3edf @@ -138,14 +137,14 @@ def test_positive_create_with_description(self, description): :CaseImportance: Critical """ - user = entities.User(description=description).create() + user = target_sat.api.User(description=description).create() assert user.description == description @pytest.mark.tier1 @pytest.mark.parametrize( 'password', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_password(self, password): + def test_positive_create_with_password(self, password, target_sat): """Create User for all variations of Password :id: 53d0a419-0730-4f7d-9170-d855adfc5070 @@ -156,13 +155,13 @@ def test_positive_create_with_password(self, password): :CaseImportance: Critical """ - user = entities.User(password=password).create() + user = target_sat.api.User(password=password).create() assert user is not None @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_delete(self, mail): + def test_positive_delete(self, mail, target_sat): """Create random users and then delete it. :id: df6059e7-85c5-42fa-99b5-b7f1ef809f52 @@ -173,7 +172,7 @@ def test_positive_delete(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() user.delete() with pytest.raises(HTTPError): user.read() @@ -307,7 +306,7 @@ def test_positive_update_description(self, create_user, description): @pytest.mark.tier1 @pytest.mark.parametrize('admin_enable', [True, False]) - def test_positive_update_admin(self, admin_enable): + def test_positive_update_admin(self, admin_enable, target_sat): """Update a user and provide the ``admin`` attribute. :id: b5fedf65-37f5-43ca-806a-ac9a7979b19d @@ -318,13 +317,13 @@ def test_positive_update_admin(self, admin_enable): :CaseImportance: Critical """ - user = entities.User(admin=admin_enable).create() + user = target_sat.api.User(admin=admin_enable).create() user.admin = not admin_enable assert user.update().admin == (not admin_enable) @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(invalid_emails_list())) - def test_negative_create_with_invalid_email(self, mail): + def test_negative_create_with_invalid_email(self, mail, target_sat): """Create User with invalid Email Address :id: ebbd1f5f-e71f-41f4-a956-ce0071b0a21c @@ -336,11 +335,11 @@ def test_negative_create_with_invalid_email(self, mail): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(mail=mail).create() + target_sat.api.User(mail=mail).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_usernames_list())) - def test_negative_create_with_invalid_username(self, invalid_name): + def test_negative_create_with_invalid_username(self, invalid_name, target_sat): """Create User with invalid Username :id: aaf157a9-0375-4405-ad87-b13970e0609b @@ -352,11 +351,11 @@ def test_negative_create_with_invalid_username(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(login=invalid_name).create() + target_sat.api.User(login=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_firstname(self, invalid_name): + def test_negative_create_with_invalid_firstname(self, invalid_name, target_sat): """Create User with invalid Firstname :id: cb1ca8a9-38b1-4d58-ae32-915b47b91657 @@ -368,11 +367,11 @@ def test_negative_create_with_invalid_firstname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(firstname=invalid_name).create() + target_sat.api.User(firstname=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_lastname(self, invalid_name): + def test_negative_create_with_invalid_lastname(self, invalid_name, target_sat): """Create User with invalid Lastname :id: 59546d26-2b6b-400b-990f-0b5d1c35004e @@ -384,10 +383,10 @@ def test_negative_create_with_invalid_lastname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(lastname=invalid_name).create() + target_sat.api.User(lastname=invalid_name).create() @pytest.mark.tier1 - def test_negative_create_with_blank_authorized_by(self): + def test_negative_create_with_blank_authorized_by(self, target_sat): """Create User with blank authorized by :id: 1fe2d1e3-728c-4d89-97ae-3890e904f413 @@ -397,7 +396,7 @@ def test_negative_create_with_blank_authorized_by(self): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(auth_source='').create() + target_sat.api.User(auth_source='').create() @pytest.mark.tier1 def test_positive_table_preferences(self, module_target_sat): @@ -413,16 +412,16 @@ def test_positive_table_preferences(self, module_target_sat): :BZ: 1757394 """ - existing_roles = entities.Role().search() + existing_roles = module_target_sat.api.Role().search() password = gen_string('alpha') - user = entities.User(role=existing_roles, password=password).create() + user = module_target_sat.api.User(role=existing_roles, password=password).create() name = "hosts" columns = ["power_status", "name", "comment"] sc = ServerConfig( auth=(user.login, password), url=module_target_sat.url, verify=settings.server.verify_ca ) - entities.TablePreferences(sc, user=user, name=name, columns=columns).create() - table_preferences = entities.TablePreferences(sc, user=user).search() + module_target_sat.api.TablePreferences(sc, user=user, name=name, columns=columns).create() + table_preferences = module_target_sat.api.TablePreferences(sc, user=user).search() assert len(table_preferences) == 1 tp = table_preferences[0] assert hasattr(tp, 'name') @@ -437,14 +436,14 @@ class TestUserRole: """Test associations between users and roles.""" @pytest.fixture(scope='class') - def make_roles(self): + def make_roles(self, class_target_sat): """Create two roles.""" - return [entities.Role().create() for _ in range(2)] + return [class_target_sat.api.Role().create() for _ in range(2)] @pytest.mark.tier1 @pytest.mark.build_sanity @pytest.mark.parametrize('number_of_roles', range(1, 3)) - def test_positive_create_with_role(self, make_roles, number_of_roles): + def test_positive_create_with_role(self, make_roles, number_of_roles, class_target_sat): """Create a user with the ``role`` attribute. :id: 32daacf1-eed4-49b1-81e1-ab0a5b0113f2 @@ -458,7 +457,7 @@ def test_positive_create_with_role(self, make_roles, number_of_roles): :CaseImportance: Critical """ chosen_roles = make_roles[:number_of_roles] - user = entities.User(role=chosen_roles).create() + user = class_target_sat.api.User(role=chosen_roles).create() assert len(user.role) == number_of_roles assert {role.id for role in user.role} == {role.id for role in chosen_roles} @@ -488,14 +487,14 @@ class TestSshKeyInUser: """Implements the SSH Key in User Tests""" @pytest.fixture(scope='class') - def create_user(self): + def create_user(self, class_target_sat): """Create an user and import different keys from data json file""" - user = entities.User().create() + user = class_target_sat.api.User().create() data_keys = json.loads(DataFile.SSH_KEYS_JSON.read_bytes()) return dict(user=user, data_keys=data_keys) @pytest.mark.tier1 - def test_positive_CRD_ssh_key(self): + def test_positive_CRD_ssh_key(self, class_target_sat): """SSH Key can be added to User :id: d00905f6-3a70-4e2f-a5ae-fcac18274bb7 @@ -511,18 +510,18 @@ def test_positive_CRD_ssh_key(self): :CaseImportance: Critical """ - user = entities.User().create() + user = class_target_sat.api.User().create() ssh_name = gen_string('alpha') ssh_key = gen_ssh_keypairs()[1] - user_sshkey = entities.SSHKey(user=user, name=ssh_name, key=ssh_key).create() + user_sshkey = class_target_sat.api.SSHKey(user=user, name=ssh_name, key=ssh_key).create() assert ssh_name == user_sshkey.name assert ssh_key == user_sshkey.key user_sshkey.delete() - result = entities.SSHKey(user=user).search() + result = class_target_sat.api.SSHKey(user=user).search() assert len(result) == 0 @pytest.mark.tier1 - def test_negative_create_ssh_key(self, create_user): + def test_negative_create_ssh_key(self, create_user, target_sat): """Invalid ssh key can not be added in User Template :id: e924ff03-8b2c-4ab9-a054-ea491413e143 @@ -542,7 +541,7 @@ def test_negative_create_ssh_key(self, create_user): """ invalid_sshkey = gen_string('alpha', length=256) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_sshkey ).create() assert re.search('Key is not a valid public ssh key', context.value.response.text) @@ -551,7 +550,7 @@ def test_negative_create_ssh_key(self, create_user): assert re.search('Length could not be calculated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_invalid_length_ssh_key(self, create_user): + def test_negative_create_invalid_length_ssh_key(self, create_user, target_sat): """Attempt to add SSH key that has invalid length :id: 899f0c46-c7fe-4610-80f1-1add4a9cbc26 @@ -568,14 +567,14 @@ def test_negative_create_invalid_length_ssh_key(self, create_user): """ invalid_length_key = create_user['data_keys']['ssh_keys']['invalid_ssh_key'] with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_length_key ).create() assert re.search('Length could not be calculated', context.value.response.text) assert not re.search('Fingerprint could not be generated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_ssh_key_with_invalid_name(self, create_user): + def test_negative_create_ssh_key_with_invalid_name(self, create_user, target_sat): """Attempt to add SSH key that has invalid name length :id: e1e17839-a392-45bb-bb1e-28d3cd9dba1c @@ -591,14 +590,14 @@ def test_negative_create_ssh_key_with_invalid_name(self, create_user): """ invalid_ssh_key_name = gen_string('alpha', length=300) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=invalid_ssh_key_name, key=gen_ssh_keypairs()[1] ).create() assert re.search("Name is too long", context.value.response.text) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_multiple_ssh_key_types(self, create_user): + def test_positive_create_multiple_ssh_key_types(self, create_user, class_target_sat): """Multiple types of ssh keys can be added to user :id: d1ffa908-dc86-40c8-b6f0-20650cc67046 @@ -615,15 +614,15 @@ def test_positive_create_multiple_ssh_key_types(self, create_user): dsa = create_user['data_keys']['ssh_keys']['dsa'] ecdsa = create_user['data_keys']['ssh_keys']['ecdsa'] ed = create_user['data_keys']['ssh_keys']['ed'] - user = entities.User().create() + user = class_target_sat.api.User().create() for key in [rsa, dsa, ecdsa, ed]: - entities.SSHKey(user=user, name=gen_string('alpha'), key=key).create() - user_sshkeys = entities.SSHKey(user=user).search() + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=key).create() + user_sshkeys = class_target_sat.api.SSHKey(user=user).search() assert len(user_sshkeys) == 4 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_ssh_key_in_host_enc(self, target_sat): + def test_positive_ssh_key_in_host_enc(self, class_target_sat): """SSH key appears in host ENC output :id: 4b70a950-e777-4b2d-a83d-29279715fe6d @@ -639,13 +638,15 @@ def test_positive_ssh_key_in_host_enc(self, target_sat): :CaseLevel: Integration """ - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - user = entities.User(organization=[org], location=[loc]).create() + org = class_target_sat.api.Organization().create() + loc = class_target_sat.api.Location(organization=[org]).create() + user = class_target_sat.api.User(organization=[org], location=[loc]).create() ssh_key = gen_ssh_keypairs()[1] - entities.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() - host = entities.Host(owner=user, owner_type='User', organization=org, location=loc).create() - sshkey_updated_for_host = f'{ssh_key} {user.login}@{target_sat.hostname}' + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() + host = class_target_sat.api.Host( + owner=user, owner_type='User', organization=org, location=loc + ).create() + sshkey_updated_for_host = f'{ssh_key} {user.login}@{class_target_sat.hostname}' host_enc_key = host.enc()['data']['parameters']['ssh_authorized_keys'] assert sshkey_updated_for_host == host_enc_key[0] @@ -695,7 +696,7 @@ def create_ldap(self, ad_data, module_target_sat): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_in_ldap_mode(self, username, create_ldap): + def test_positive_create_in_ldap_mode(self, username, create_ldap, target_sat): """Create User in ldap mode :id: 6f8616b1-5380-40d2-8678-7c4434050cfb @@ -706,14 +707,14 @@ def test_positive_create_in_ldap_mode(self, username, create_ldap): :CaseLevel: Integration """ - user = entities.User( + user = target_sat.api.User( login=username, auth_source=create_ldap['authsource'], password='' ).create() assert user.login == username @pytest.mark.tier3 - def test_positive_ad_basic_no_roles(self, create_ldap): - """Login with LDAP Auth- AD for user with no roles/rights + def test_positive_ad_basic_no_roles(self, create_ldap, target_sat): + """Login with LDAP Auth AD for user with no roles/rights :id: 3910c6eb-6eff-4ab7-a50d-ba40f5c24c08 @@ -721,7 +722,7 @@ def test_positive_ad_basic_no_roles(self, create_ldap): :steps: Login to server with an AD user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. :CaseLevel: System """ @@ -731,7 +732,7 @@ def test_positive_ad_basic_no_roles(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(sc).search() @pytest.mark.tier3 @pytest.mark.upgrade @@ -764,8 +765,10 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_ user.delete() role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = module_target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = module_target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -780,22 +783,22 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_ verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search( + module_target_sat.api.Architecture(sc).search() + user = module_target_sat.api.User().search( query={'search': 'login={}'.format(create_ldap['ldap_user_name'])} )[0] - user.role = [entities.Role(id=org_admin['id']).read()] + user.role = [module_target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + module_target_sat.api.Architecture, + module_target_sat.api.Audit, + module_target_sat.api.Bookmark, + module_target_sat.api.CommonParameter, + module_target_sat.api.LibvirtComputeResource, + module_target_sat.api.OVirtComputeResource, + module_target_sat.api.VMWareComputeResource, + module_target_sat.api.Errata, + module_target_sat.api.OperatingSystem, ]: entity(sc).search() @@ -843,7 +846,7 @@ def create_ldap(self, class_target_sat): user.delete() @pytest.mark.tier3 - def test_positive_ipa_basic_no_roles(self, create_ldap): + def test_positive_ipa_basic_no_roles(self, create_ldap, target_sat): """Login with LDAP Auth- FreeIPA for user with no roles/rights :id: 901a241d-aa76-4562-ab1a-a752e6fb7ed5 @@ -852,7 +855,7 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): :steps: Login to server with an FreeIPA user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. :CaseLevel: System """ @@ -862,11 +865,11 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(sc).search() @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): + def test_positive_access_entities_from_ipa_org_admin(self, create_ldap, target_sat): """LDAP FreeIPA User can access resources within its taxonomies if assigned role has permission for same taxonomies @@ -885,8 +888,10 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): :CaseLevel: System """ role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -901,22 +906,22 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': 'login={}'.format(create_ldap['username'])})[ - 0 - ] - user.role = [entities.Role(id=org_admin['id']).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': 'login={}'.format(create_ldap['username'])} + )[0] + user.role = [target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: entity(sc).search() diff --git a/tests/foreman/api/test_usergroup.py b/tests/foreman/api/test_usergroup.py index 01dcef27010..a01ee213a38 100644 --- a/tests/foreman/api/test_usergroup.py +++ b/tests/foreman/api/test_usergroup.py @@ -22,7 +22,6 @@ from random import randint from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -38,12 +37,12 @@ class TestUserGroup: """Tests for the ``usergroups`` path.""" @pytest.fixture - def user_group(self): - return entities.UserGroup().create() + def user_group(self, target_sat): + return target_sat.api.UserGroup().create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, target_sat, name): """Create new user group using different valid names :id: 3a2255d9-f48d-4f22-a4b9-132361bd9224 @@ -54,12 +53,12 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - user_group = entities.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(name=name).create() assert user_group.name == name @pytest.mark.tier1 @pytest.mark.parametrize('login', **parametrized(valid_usernames_list())) - def test_positive_create_with_user(self, login): + def test_positive_create_with_user(self, target_sat, login): """Create new user group using valid user attached to that group. :id: ab127e09-31d2-4c5b-ae6c-726e4b11a21e @@ -70,13 +69,13 @@ def test_positive_create_with_user(self, login): :CaseImportance: Critical """ - user = entities.User(login=login).create() - user_group = entities.UserGroup(user=[user]).create() + user = target_sat.api.User(login=login).create() + user_group = target_sat.api.UserGroup(user=[user]).create() assert len(user_group.user) == 1 assert user_group.user[0].read().login == login @pytest.mark.tier1 - def test_positive_create_with_users(self): + def test_positive_create_with_users(self, target_sat): """Create new user group using multiple users attached to that group. :id: b8dbbacd-b5cb-49b1-985d-96df21440652 @@ -86,15 +85,15 @@ def test_positive_create_with_users(self): :CaseImportance: Critical """ - users = [entities.User().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(user=users).create() + users = [target_sat.api.User().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(user=users).create() assert sorted(user.login for user in users) == sorted( user.read().login for user in user_group.user ) @pytest.mark.tier1 @pytest.mark.parametrize('role_name', **parametrized(valid_data_list())) - def test_positive_create_with_role(self, role_name): + def test_positive_create_with_role(self, target_sat, role_name): """Create new user group using valid role attached to that group. :id: c4fac71a-9dda-4e5f-a5df-be362d3cbd52 @@ -105,13 +104,13 @@ def test_positive_create_with_role(self, role_name): :CaseImportance: Critical """ - role = entities.Role(name=role_name).create() - user_group = entities.UserGroup(role=[role]).create() + role = target_sat.api.Role(name=role_name).create() + user_group = target_sat.api.UserGroup(role=[role]).create() assert len(user_group.role) == 1 assert user_group.role[0].read().name == role_name @pytest.mark.tier1 - def test_positive_create_with_roles(self): + def test_positive_create_with_roles(self, target_sat): """Create new user group using multiple roles attached to that group. :id: 5838fcfd-e256-49cf-aef8-b2bf215b3586 @@ -121,15 +120,15 @@ def test_positive_create_with_roles(self): :CaseImportance: Critical """ - roles = [entities.Role().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(role=roles).create() + roles = [target_sat.api.Role().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(role=roles).create() assert sorted(role.name for role in roles) == sorted( role.read().name for role in user_group.role ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_usergroup(self, name): + def test_positive_create_with_usergroup(self, target_sat, name): """Create new user group using another user group attached to the initial group. @@ -141,13 +140,13 @@ def test_positive_create_with_usergroup(self, name): :CaseImportance: Critical """ - sub_user_group = entities.UserGroup(name=name).create() - user_group = entities.UserGroup(usergroup=[sub_user_group]).create() + sub_user_group = target_sat.api.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(usergroup=[sub_user_group]).create() assert len(user_group.usergroup) == 1 assert user_group.usergroup[0].read().name == name @pytest.mark.tier2 - def test_positive_create_with_usergroups(self): + def test_positive_create_with_usergroups(self, target_sat): """Create new user group using multiple user groups attached to that initial group. @@ -158,15 +157,15 @@ def test_positive_create_with_usergroups(self): :CaseLevel: Integration """ - sub_user_groups = [entities.UserGroup().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(usergroup=sub_user_groups).create() + sub_user_groups = [target_sat.api.UserGroup().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(usergroup=sub_user_groups).create() assert sorted(usergroup.name for usergroup in sub_user_groups) == sorted( usergroup.read().name for usergroup in user_group.usergroup ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, target_sat, name): """Attempt to create user group with invalid name. :id: 1a3384dc-5d52-442c-87c8-e38048a61dfa @@ -178,10 +177,10 @@ def test_negative_create_with_name(self, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.UserGroup(name=name).create() + target_sat.api.UserGroup(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_same_name(self): + def test_negative_create_with_same_name(self, target_sat): """Attempt to create user group with a name of already existent entity. :id: aba0925a-d5ec-4e90-86c6-404b9b6f0179 @@ -190,9 +189,9 @@ def test_negative_create_with_same_name(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() with pytest.raises(HTTPError): - entities.UserGroup(name=user_group.name).create() + target_sat.api.UserGroup(name=user_group.name).create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @@ -212,7 +211,7 @@ def test_positive_update(self, user_group, new_name): assert new_name == user_group.name @pytest.mark.tier1 - def test_positive_update_with_new_user(self): + def test_positive_update_with_new_user(self, target_sat): """Add new user to user group :id: e11b57c3-5f86-4963-9cc6-e10e2f02468b @@ -221,14 +220,14 @@ def test_positive_update_with_new_user(self): :CaseImportance: Critical """ - user = entities.User().create() - user_group = entities.UserGroup().create() + user = target_sat.api.User().create() + user_group = target_sat.api.UserGroup().create() user_group.user = [user] user_group = user_group.update(['user']) assert user.login == user_group.user[0].read().login @pytest.mark.tier2 - def test_positive_update_with_existing_user(self): + def test_positive_update_with_existing_user(self, target_sat): """Update user that assigned to user group with another one :id: 71b78f64-867d-4bf5-9b1e-02698a17fb38 @@ -237,14 +236,14 @@ def test_positive_update_with_existing_user(self): :CaseLevel: Integration """ - users = [entities.User().create() for _ in range(2)] - user_group = entities.UserGroup(user=[users[0]]).create() + users = [target_sat.api.User().create() for _ in range(2)] + user_group = target_sat.api.UserGroup(user=[users[0]]).create() user_group.user[0] = users[1] user_group = user_group.update(['user']) assert users[1].login == user_group.user[0].read().login @pytest.mark.tier1 - def test_positive_update_with_new_role(self): + def test_positive_update_with_new_role(self, target_sat): """Add new role to user group :id: 8e0872c1-ae88-4971-a6fc-cd60127d6663 @@ -253,15 +252,15 @@ def test_positive_update_with_new_role(self): :CaseImportance: Critical """ - new_role = entities.Role().create() - user_group = entities.UserGroup().create() + new_role = target_sat.api.Role().create() + user_group = target_sat.api.UserGroup().create() user_group.role = [new_role] user_group = user_group.update(['role']) assert new_role.name == user_group.role[0].read().name @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_update_with_new_usergroup(self): + def test_positive_update_with_new_usergroup(self, target_sat): """Add new user group to existing one :id: 3cb29d07-5789-4f94-9fd9-a7e494b3c110 @@ -270,8 +269,8 @@ def test_positive_update_with_new_usergroup(self): :CaseImportance: Critical """ - new_usergroup = entities.UserGroup().create() - user_group = entities.UserGroup().create() + new_usergroup = target_sat.api.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.usergroup = [new_usergroup] user_group = user_group.update(['usergroup']) assert new_usergroup.name == user_group.usergroup[0].read().name @@ -295,7 +294,7 @@ def test_negative_update(self, user_group, new_name): assert user_group.read().name != new_name @pytest.mark.tier1 - def test_negative_update_with_same_name(self): + def test_negative_update_with_same_name(self, target_sat): """Attempt to update user group with a name of already existent entity. :id: 14888998-9282-4d81-9e99-234d19706783 @@ -305,15 +304,15 @@ def test_negative_update_with_same_name(self): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.UserGroup(name=name).create() - new_user_group = entities.UserGroup().create() + target_sat.api.UserGroup(name=name).create() + new_user_group = target_sat.api.UserGroup().create() new_user_group.name = name with pytest.raises(HTTPError): new_user_group.update(['name']) assert new_user_group.read().name != name @pytest.mark.tier1 - def test_positive_delete(self): + def test_positive_delete(self, target_sat): """Create user group with valid name and then delete it :id: c5cfcc4a-9177-47bb-8f19-7a8930eb7ca3 @@ -322,7 +321,7 @@ def test_positive_delete(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.delete() with pytest.raises(HTTPError): user_group.read() diff --git a/tests/foreman/api/test_webhook.py b/tests/foreman/api/test_webhook.py index b806dbb24c0..a220b19af99 100644 --- a/tests/foreman/api/test_webhook.py +++ b/tests/foreman/api/test_webhook.py @@ -18,7 +18,6 @@ """ import re -from nailgun import entities import pytest from requests.exceptions import HTTPError from wait_for import TimedOutError, wait_for @@ -68,7 +67,7 @@ def assert_event_triggered(channel, event): class TestWebhook: @pytest.mark.tier2 - def test_negative_invalid_event(self): + def test_negative_invalid_event(self, target_sat): """Test negative webhook creation with an invalid event :id: 60cd456a-9943-45cb-a72e-23a83a691499 @@ -78,11 +77,11 @@ def test_negative_invalid_event(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(event='invalid_event').create() + target_sat.api.Webhooks(event='invalid_event').create() @pytest.mark.tier2 @pytest.mark.parametrize('event', **parametrized(WEBHOOK_EVENTS)) - def test_positive_valid_event(self, event): + def test_positive_valid_event(self, event, target_sat): """Test positive webhook creation with a valid event :id: 9b505f1b-7ee1-4362-b44c-f3107d043a05 @@ -91,11 +90,11 @@ def test_positive_valid_event(self, event): :CaseImportance: High """ - hook = entities.Webhooks(event=event).create() + hook = target_sat.api.Webhooks(event=event).create() assert event in hook.event @pytest.mark.tier2 - def test_negative_invalid_method(self): + def test_negative_invalid_method(self, target_sat): """Test negative webhook creation with an invalid HTTP method :id: 573be312-7bf3-4d9e-aca1-e5cac810d04b @@ -105,11 +104,11 @@ def test_negative_invalid_method(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(http_method='NONE').create() + target_sat.api.Webhooks(http_method='NONE').create() @pytest.mark.tier2 @pytest.mark.parametrize('method', **parametrized(WEBHOOK_METHODS)) - def test_positive_valid_method(self, method): + def test_positive_valid_method(self, method, target_sat): """Test positive webhook creation with a valid HTTP method :id: cf8f276a-d21e-44d0-92f2-657232240c7e @@ -118,12 +117,12 @@ def test_positive_valid_method(self, method): :CaseImportance: High """ - hook = entities.Webhooks(http_method=method).create() + hook = target_sat.api.Webhooks(http_method=method).create() assert hook.http_method == method @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_end_to_end(self): + def test_positive_end_to_end(self, target_sat): """Create a new webhook. :id: 7593a04e-cf7e-414c-9e7e-3fe2936cc32a @@ -132,7 +131,7 @@ def test_positive_end_to_end(self): :CaseImportance: Critical """ - hook = entities.Webhooks().create() + hook = target_sat.api.Webhooks().create() assert hook hook.name = "testing" hook.http_method = "GET" @@ -149,7 +148,7 @@ def test_positive_end_to_end(self): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_event_triggered(self, module_org, target_sat): + def test_positive_event_triggered(self, module_org, module_target_sat): """Create a webhook and trigger the event associated with it. @@ -160,13 +159,13 @@ def test_positive_event_triggered(self, module_org, target_sat): :CaseImportance: Critical """ - hook = entities.Webhooks( + hook = module_target_sat.api.Webhooks( event='actions.katello.repository.sync_succeeded', http_method='GET' ).create() - repo = entities.Repository( + repo = module_target_sat.api.Repository( organization=module_org, content_type='yum', url=settings.repos.yum_0.url ).create() - with target_sat.session.shell() as shell: + with module_target_sat.api.session.shell() as shell: shell.send('foreman-tail') repo.sync() assert_event_triggered(shell, hook.event) From 034dcacb0eeab0b233ab3e571056b8226ddcd2dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Nov 2023 13:23:31 +0530 Subject: [PATCH 77/96] Bump deepdiff from 6.6.1 to 6.7.0 (#13054) Bumps [deepdiff](https://github.com/seperman/deepdiff) from 6.6.1 to 6.7.0. - [Release notes](https://github.com/seperman/deepdiff/releases) - [Changelog](https://github.com/seperman/deepdiff/blob/master/docs/changelog.rst) - [Commits](https://github.com/seperman/deepdiff/commits) --- updated-dependencies: - dependency-name: deepdiff dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e401aea7641..efde8050ec6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ betelgeuse==1.10.0 broker[docker]==0.4.1 cryptography==41.0.5 -deepdiff==6.6.1 +deepdiff==6.7.0 dynaconf[vault]==3.2.4 fauxfactory==3.1.0 jinja2==3.1.2 From 783549a099f80c1c3c4296058ad09853675ec3c7 Mon Sep 17 00:00:00 2001 From: David Moore <109112035+damoore044@users.noreply.github.com> Date: Thu, 9 Nov 2023 12:07:11 -0500 Subject: [PATCH 78/96] Stream fix errata:UI (test_end_to_end) (#12978) * Updates for UI errata e2e * Check bulk generate applicability task * Cleanup between parameterized runs * Addressing comments --- robottelo/cli/factory.py | 10 +- robottelo/host_helpers/cli_factory.py | 4 +- tests/foreman/ui/test_errata.py | 296 +++++++++++++++++++------- 3 files changed, 226 insertions(+), 84 deletions(-) diff --git a/robottelo/cli/factory.py b/robottelo/cli/factory.py index 007fa7dadfd..7d1dd5b612a 100644 --- a/robottelo/cli/factory.py +++ b/robottelo/cli/factory.py @@ -1692,13 +1692,19 @@ def setup_org_for_a_custom_repo(options=None): raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') # Get the content view info cv_info = ContentView.info({'id': cv_id}) - lce_promoted = cv_info['lifecycle-environments'] + assert len(cv_info['versions']) > 0 + cv_info['versions'].sort(key=lambda version: version['id']) cvv = cv_info['versions'][-1] + lce_promoted = cv_info['lifecycle-environments'] # Promote version to next env try: if env_id not in [int(lce['id']) for lce in lce_promoted]: ContentView.version_promote( - {'id': cvv['id'], 'organization-id': org_id, 'to-lifecycle-environment-id': env_id} + { + 'id': cvv['id'], + 'organization-id': org_id, + 'to-lifecycle-environment-id': env_id, + } ) except CLIReturnCodeError as err: raise CLIFactoryError(f'Failed to promote version to next environment\n{err.msg}') diff --git a/robottelo/host_helpers/cli_factory.py b/robottelo/host_helpers/cli_factory.py index cf7d8a28058..d27e27ce6e4 100644 --- a/robottelo/host_helpers/cli_factory.py +++ b/robottelo/host_helpers/cli_factory.py @@ -630,8 +630,10 @@ def setup_org_for_a_custom_repo(self, options=None): raise CLIFactoryError(f'Failed to publish new version of content view\n{err.msg}') # Get the version id cv_info = self._satellite.cli.ContentView.info({'id': cv_id}) - lce_promoted = cv_info['lifecycle-environments'] + assert len(cv_info['versions']) > 0 + cv_info['versions'].sort(key=lambda version: version['id']) cvv = cv_info['versions'][-1] + lce_promoted = cv_info['lifecycle-environments'] # Promote version to next env try: if env_id not in [int(lce['id']) for lce in lce_promoted]: diff --git a/tests/foreman/ui/test_errata.py b/tests/foreman/ui/test_errata.py index 632fcefad32..b181396c047 100644 --- a/tests/foreman/ui/test_errata.py +++ b/tests/foreman/ui/test_errata.py @@ -16,6 +16,8 @@ :Upstream: No """ +from datetime import datetime + from airgun.session import Session from broker import Broker from fauxfactory import gen_string @@ -27,6 +29,7 @@ from robottelo.constants import ( DEFAULT_LOC, FAKE_1_CUSTOM_PACKAGE, + FAKE_1_CUSTOM_PACKAGE_NAME, FAKE_2_CUSTOM_PACKAGE, FAKE_3_YUM_OUTDATED_PACKAGES, FAKE_4_CUSTOM_PACKAGE, @@ -61,20 +64,6 @@ def _generate_errata_applicability(hostname): host.errata_applicability(synchronous=False) -def _install_client_package(client, package, errata_applicability=False): - """Install a package in virtual machine client. - - :param client: The Virtual machine client. - :param package: the package to install in virtual machine client. - :param errata_applicability: If True, force host to generate errata applicability. - :returns: True if package installed successfully, False otherwise. - """ - result = client.execute(f'yum install -y {package}') - if errata_applicability: - _generate_errata_applicability(client.hostname) - return result.status == 0 - - def _set_setting_value(setting_entity, value): """Set setting value. @@ -124,13 +113,6 @@ def function_org_with_parameter(target_sat, function_manifest): return org -@pytest.fixture(scope='module') -def module_lce(module_target_sat, module_org_with_parameter): - return module_target_sat.api.LifecycleEnvironment( - organization=module_org_with_parameter - ).create() - - @pytest.fixture def lce(target_sat, function_org_with_parameter): return target_sat.api.LifecycleEnvironment(organization=function_org_with_parameter).create() @@ -163,37 +145,132 @@ def vm(module_repos_collection_with_setup, rhel7_contenthost, target_sat): return rhel7_contenthost +@pytest.fixture +def registered_contenthost( + rhel_contenthost, + module_org, + module_lce, + module_cv, + module_target_sat, + request, + repos=[CUSTOM_REPO_URL], +): + """RHEL ContentHost registered in satellite, + Using SCA and global registration. + + :param repos: list of upstream URLs for custom repositories, + default to CUSTOM_REPO_URL + """ + activation_key = module_target_sat.api.ActivationKey( + organization=module_org, + environment=module_lce, + ).create() + + custom_products = [] + custom_repos = [] + for repo_url in repos: + # Publishes a new cvv, associates org, ak, cv, with custom repo: + custom_repo_info = module_target_sat.cli_factory.setup_org_for_a_custom_repo( + { + 'url': repo_url, + 'organization-id': module_org.id, + 'lifecycle-environment-id': module_lce.id, + 'activationkey-id': activation_key.id, + 'content-view-id': module_cv.id, + } + ) + custom_products.append(custom_repo_info['product-id']) + custom_repos.append(custom_repo_info['repository-id']) + + # Promote newest version with all content + module_cv = module_cv.read() + module_cv.version.sort(key=lambda version: version.id) + module_cv.version[-1].promote(data={'environment_ids': module_lce.id}) + module_cv = module_cv.read() + + result = rhel_contenthost.register( + activation_keys=activation_key.name, + target=module_target_sat, + org=module_org, + loc=None, + ) + assert result.status == 0, f'Failed to register host:\n{result.stderr}' + assert rhel_contenthost.subscribed + + for custom_repo_id in custom_repos: + custom_repo = module_target_sat.api.Repository(id=custom_repo_id).read() + assert custom_repo + result = custom_repo.sync()['humanized'] + assert ( + len(result['errors']) == 0 + ), f'Failed to sync custom repository [id: {custom_repo_id}]:\n{str(result["errors"])}' + + yield rhel_contenthost + + @request.addfinalizer + # Cleanup for in between parameterized runs + def cleanup(): + nonlocal rhel_contenthost, module_cv, custom_repos, custom_products, activation_key + rhel_contenthost.unregister() + activation_key.delete() + # Remove CV from all lifecycle-environments + module_target_sat.cli.ContentView.remove_from_environment( + { + 'id': module_cv.id, + 'organization-id': module_org.id, + 'lifecycle-environment-id': module_lce.id, + } + ) + module_target_sat.cli.ContentView.remove_from_environment( + { + 'id': module_cv.id, + 'organization-id': module_org.id, + 'lifecycle-environment': 'Library', + } + ) + # Delete all CV versions + module_cv = module_cv.read() + for version in module_cv.version: + version.delete() + # Remove repos from CV, delete all custom repos and products + for repo_id in custom_repos: + module_target_sat.cli.ContentView.remove_repository( + { + 'id': module_cv.id, + 'repository-id': repo_id, + } + ) + module_target_sat.api.Repository(id=repo_id).delete() + for product_id in custom_products: + module_target_sat.api.Product(id=product_id).delete() + # Publish a new CV version with no content + module_cv = module_cv.read() + module_cv.publish() + + @pytest.mark.e2e @pytest.mark.tier3 -@pytest.mark.parametrize('setting_update', ['remote_execution_by_default'], indirect=True) -@pytest.mark.parametrize( - 'module_repos_collection_with_setup', - [ - { - 'distro': 'rhel7', - 'SatelliteToolsRepository': {}, - 'RHELAnsibleEngineRepository': {}, - 'YumRepository': {'url': CUSTOM_REPO_URL}, - } - ], - indirect=True, -) +@pytest.mark.rhel_ver_match('[^6]') @pytest.mark.no_containers def test_end_to_end( session, - module_org_with_parameter, - module_repos_collection_with_setup, - vm, - target_sat, - setting_update, + request, + module_org, + module_lce, + module_published_cv, + module_target_sat, + registered_contenthost, ): """Create all entities required for errata, set up applicable host, - read errata details and apply it to host + read errata details and apply it to host. :id: a26182fc-f31a-493f-b094-3f5f8d2ece47 + :setup: A host with content from a custom repo, + contains some outdated packages applicable errata. + :expectedresults: Errata details are the same as expected, errata - installation is successful + installation is successful. :parametrized: yes @@ -203,11 +280,14 @@ def test_end_to_end( :CaseLevel: System """ + ERRATA_DETAILS = { 'advisory': 'RHSA-2012:0055', 'cves': 'N/A', 'type': 'Security Advisory', 'severity': 'N/A', + 'issued': 'January 27, 2012 at 12:00 AM', + 'last_updated_on': 'January 27, 2012 at 12:00 AM', 'reboot_suggested': 'No', 'topic': '', 'description': 'Sea_Erratum', @@ -221,23 +301,41 @@ def test_end_to_end( ], 'module_stream_packages': [], } - assert _install_client_package(vm, FAKE_1_CUSTOM_PACKAGE) + _UTC_format = '%Y-%m-%d %H:%M:%S UTC' + # Capture newest product and repository with the desired content + product_list = module_target_sat.api.Product(organization=module_org).search() + assert len(product_list) > 0 + product_list.sort(key=lambda product: product.id) + _product = product_list[-1].read() + assert len(_product.repository) == 1 + _repository = _product.repository[0].read() + # Remove custom package if present, install outdated version + registered_contenthost.execute(f'yum remove -y {FAKE_1_CUSTOM_PACKAGE_NAME}') + result = registered_contenthost.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') + assert result.status == 0, f'Failed to install package {FAKE_1_CUSTOM_PACKAGE}.' + applicable_errata = registered_contenthost.applicable_errata_count + assert ( + applicable_errata == 1 + ), f'Expected 1 applicable errata: {CUSTOM_REPO_ERRATA_ID}, after setup. Got {applicable_errata}' + with session: - property_value = 'Yes' - session.settings.update(f'name = {setting_update.name}', property_value) # BZ 2029192 + + datetime_utc_start = datetime.utcnow() # Check selection box function for BZ#1688636 session.location.select(loc_name=DEFAULT_LOC) - assert session.errata.search(CUSTOM_REPO_ERRATA_ID, applicable=True)[0]['Errata ID'] - assert session.errata.search(CUSTOM_REPO_ERRATA_ID, installable=True)[0]['Errata ID'] - # Check all tabs of Errata Details page + session.organization.select(org_name=module_org.name) + assert session.errata.search_content_hosts( + CUSTOM_REPO_ERRATA_ID, registered_contenthost.hostname, environment=module_lce.name + ), 'Errata ID not found on registered contenthost or the host lifecycle-environment.' errata = session.errata.read(CUSTOM_REPO_ERRATA_ID) - # We ignore issued date and updated date in ERRATA_DETAILS, so we don't perform an - # equality check here. - # TODO: Find a way to account for browser time zone, so that the errata dates displayed - # in the UI can be compared to the UTC values in ERRATA_DETAILS. + assert errata['repositories']['table'][-1]['Name'] == _repository.name + assert errata['repositories']['table'][-1]['Product'] == _product.name + # Check all tabs of Errata Details page assert ( not ERRATA_DETAILS.items() - errata['details'].items() ), 'Errata details do not match expected values.' + assert errata['details']['issued'] == ERRATA_DETAILS['issued'] + assert errata['details']['last_updated_on'] == ERRATA_DETAILS['last_updated_on'] assert set(errata['packages']['independent_packages']) == set( ERRATA_PACKAGES['independent_packages'] ) @@ -245,22 +343,63 @@ def test_end_to_end( errata['packages']['module_stream_packages'] == ERRATA_PACKAGES['module_stream_packages'] ) - assert ( - errata['repositories']['table'][-1]['Name'] - == module_repos_collection_with_setup.custom_repos_info[-1]['name'] + + # Apply Errata, find REX install task + session.host_new.apply_erratas( + entity_name=registered_contenthost.hostname, + search=f"errata_id == {CUSTOM_REPO_ERRATA_ID}", + ) + results = module_target_sat.wait_for_tasks( + search_query=( + f'"Install errata errata_id == {CUSTOM_REPO_ERRATA_ID}' + f' on {registered_contenthost.hostname}"' + ), + search_rate=2, + max_tries=60, ) + results.sort(key=lambda res: res.id) + task_status = module_target_sat.api.ForemanTask(id=results[-1].id).poll() assert ( - errata['repositories']['table'][-1]['Product'] - == module_repos_collection_with_setup.custom_product['name'] + task_status['result'] == 'success' + ), f'Errata Installation task failed:\n{task_status}' + assert ( + registered_contenthost.applicable_errata_count == 0 + ), 'Unexpected applicable errata found after install.' + # UTC timing for install task and session + install_start = datetime.strptime(task_status['started_at'], _UTC_format) + install_end = datetime.strptime(task_status['ended_at'], _UTC_format) + assert (install_end - install_start).total_seconds() <= 60 + assert (install_end - datetime_utc_start).total_seconds() <= 600 + + # Find bulk generate applicability task + results = module_target_sat.wait_for_tasks( + search_query=( + f'Bulk generate applicability for host {registered_contenthost.hostname}' + ), + search_rate=2, + max_tries=60, ) - status = session.contenthost.install_errata( - vm.hostname, CUSTOM_REPO_ERRATA_ID, install_via='rex' + results.sort(key=lambda res: res.id) + task_status = module_target_sat.api.ForemanTask(id=results[-1].id).poll() + assert ( + task_status['result'] == 'success' + ), f'Bulk Generate Errata Applicability task failed:\n{task_status}' + # UTC timing for generate applicability task + bulk_gen_start = datetime.strptime(task_status['started_at'], _UTC_format) + bulk_gen_end = datetime.strptime(task_status['ended_at'], _UTC_format) + assert (bulk_gen_start - install_end).total_seconds() <= 30 + assert (bulk_gen_end - bulk_gen_start).total_seconds() <= 60 + + # Errata should still be visible on satellite, but not on contenthost + assert session.errata.read(CUSTOM_REPO_ERRATA_ID) + assert not session.errata.search_content_hosts( + CUSTOM_REPO_ERRATA_ID, registered_contenthost.hostname, environment=module_lce.name ) - assert status['overview']['job_status'] == 'Success' - assert status['overview']['job_status_progress'] == '100%' - _generate_errata_applicability(vm.hostname) - vm = vm.nailgun_host.read() - assert vm.applicable_errata_count == 0 + # Check package version was updated on contenthost + _package_version = registered_contenthost.execute( + f'rpm -q {FAKE_1_CUSTOM_PACKAGE_NAME}' + ).stdout + assert FAKE_2_CUSTOM_PACKAGE in _package_version @pytest.mark.tier2 @@ -312,7 +451,7 @@ def test_content_host_errata_page_pagination(session, function_org_with_paramete client.add_rex_key(satellite=target_sat) # Add repo and install packages that need errata repos_collection.setup_virtual_machine(client) - assert _install_client_package(client, pkgs) + assert client.execute(f'yum install -y {pkgs}').status == 0 with session: # Go to content host's Errata tab and read the page's pagination widgets session.organization.select(org_name=org.name) @@ -486,7 +625,7 @@ def test_positive_apply_for_all_hosts( for client in clients: module_repos_collection_with_setup.setup_virtual_machine(client) client.add_rex_key(satellite=target_sat) - assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE) + assert client.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 with session: session.location.select(loc_name=DEFAULT_LOC) for client in clients: @@ -581,7 +720,7 @@ def test_positive_filter_by_environment( ) as clients: for client in clients: module_repos_collection_with_setup.setup_virtual_machine(client) - assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) + assert client.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 # Promote the latest content view version to a new lifecycle environment content_view = entities.ContentView( id=module_repos_collection_with_setup.setup_content_data['content_view']['id'] @@ -654,7 +793,7 @@ def test_positive_content_host_previous_env( """ module_org = module_org_with_parameter hostname = vm.hostname - assert _install_client_package(vm, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) + assert vm.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 # Promote the latest content view version to a new lifecycle environment content_view = entities.ContentView( id=module_repos_collection_with_setup.setup_content_data['content_view']['id'] @@ -711,7 +850,7 @@ def test_positive_content_host_library(session, module_org_with_parameter, vm): :CaseLevel: System """ hostname = vm.hostname - assert _install_client_package(vm, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) + assert vm.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 with session: session.location.select(loc_name=DEFAULT_LOC) content_host_erratum = session.contenthost.search_errata( @@ -753,7 +892,7 @@ def test_positive_content_host_search_type(session, erratatype_vm): """ pkgs = ' '.join(FAKE_9_YUM_OUTDATED_PACKAGES) - assert _install_client_package(erratatype_vm, pkgs, errata_applicability=True) + assert erratatype_vm.execute(f'yum install -y {pkgs}').status == 0 with session: session.location.select(loc_name=DEFAULT_LOC) @@ -839,7 +978,7 @@ def test_positive_show_count_on_content_host_page( assert int(installable_errata[errata_type]) == 0 pkgs = ' '.join(FAKE_9_YUM_OUTDATED_PACKAGES) - assert _install_client_package(vm, pkgs, errata_applicability=True) + assert vm.execute(f'yum install -y {pkgs}').status == 0 content_host_values = session.contenthost.search(hostname) assert content_host_values[0]['Name'] == hostname @@ -892,7 +1031,7 @@ def test_positive_show_count_on_content_host_details_page( assert int(content_host_values['details'][errata_type]) == 0 pkgs = ' '.join(FAKE_9_YUM_OUTDATED_PACKAGES) - assert _install_client_package(vm, pkgs, errata_applicability=True) + assert vm.execute(f'yum install -y {pkgs}').status == 0 # navigate to content host main page by making a search, to refresh the details page session.contenthost.search(hostname) @@ -955,7 +1094,7 @@ def test_positive_filtered_errata_status_installable_param( repos_collection.setup_content(org.id, lce.id) with Broker(nick=repos_collection.distro, host_class=ContentHost) as client: repos_collection.setup_virtual_machine(client) - assert _install_client_package(client, FAKE_1_CUSTOM_PACKAGE, errata_applicability=True) + assert client.execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 # Adding content view filter and content view filter rule to exclude errata for the # installed package. content_view = entities.ContentView( @@ -992,9 +1131,7 @@ def test_positive_filtered_errata_status_installable_param( assert expected_values[key] in actual_values[key], 'Expected text not found' property_value = 'Yes' session.settings.update(f'name = {setting_update.name}', property_value) - assert _install_client_package( - client, FAKE_9_YUM_OUTDATED_PACKAGES[1], errata_applicability=True - ) + assert client.execute(f'yum install -y {FAKE_9_YUM_OUTDATED_PACKAGES[1]}').status == 0 expected_values = { 'Status': 'Error', 'Errata': 'Security errata installable', @@ -1059,13 +1196,10 @@ def test_content_host_errata_search_commands( for client in clients: module_repos_collection_with_setup.setup_virtual_machine(client) # Install pkg walrus-0.71-1.noarch to create need for RHSA on client 1 - assert _install_client_package( - clients[0], FAKE_1_CUSTOM_PACKAGE, errata_applicability=False - ) + assert clients[0].execute(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}').status == 0 # Install pkg kangaroo-0.1-1.noarch to create need for RHBA on client 2 - assert _install_client_package( - clients[1], FAKE_4_CUSTOM_PACKAGE, errata_applicability=False - ) + assert clients[1].execute(f'yum install -y {FAKE_4_CUSTOM_PACKAGE}').status == 0 + with session: session.location.select(loc_name=DEFAULT_LOC) # Search for hosts needing RHSA security errata From 339bf1c94c749bddb12f2610aace676ac5be1cb1 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Fri, 10 Nov 2023 16:59:45 +0530 Subject: [PATCH 79/96] [6.15] Add test coverage for setting display_fqdn_for_hosts (#13056) Add test coverage for setting display_fqdn_for_hosts Signed-off-by: Gaurav Talreja --- tests/foreman/ui/test_settings.py | 36 +++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/foreman/ui/test_settings.py b/tests/foreman/ui/test_settings.py index 923d542443d..d207bed36f0 100644 --- a/tests/foreman/ui/test_settings.py +++ b/tests/foreman/ui/test_settings.py @@ -549,3 +549,39 @@ def test_positive_entries_per_page(session, setting_update): total_pages_str = page_content["Pagination"]['_items'].split()[-2] total_pages = math.ceil(int(total_pages_str.split()[-1]) / property_value) assert str(total_pages) == page_content["Pagination"]['_total_pages'].split()[-1] + + +@pytest.mark.tier2 +@pytest.mark.stream +def test_positive_setting_display_fqdn_for_hosts(session, target_sat): + """Verify setting display_fqdn_for_hosts set as Yes/No, and FQDN is used for host's name + if it's set to Yes else not, according to setting set. + + :id: b1a51594-43e6-49d8-918b-9bc306f3a1a4 + + :steps: + 1. Navigate to Monitor -> Dashboard + 2. Verify NewHosts table view contains host_name is w/ or w/o FQDN value + 3. Navigate to Hosts -> All Hosts -> details page + 4. Verify host_name in breadcrumbs is w/ or w/o FQDN value + + :expectedresults: FQDN is used for hostname if setting is set to Yes(default), + else hostname is present without FQDN. + """ + host_name, domain_name = target_sat.hostname.split('.', 1) + default_value = target_sat.update_setting('display_fqdn_for_hosts', 'No') + with target_sat.ui_session() as session: + dashboard_hosts = session.dashboard.read('NewHosts') + assert host_name in [h['Host'] for h in dashboard_hosts['hosts'] if h['Host'] == host_name] + + values = session.host_new.get_details(host_name, widget_names='breadcrumb') + assert values['breadcrumb'] == host_name + + # Verify with display_fqdn_for_hosts=Yes + target_sat.update_setting('display_fqdn_for_hosts', default_value) + full_name = '.'.join((host_name, domain_name)) + dashboard_hosts = session.dashboard.read('NewHosts') + assert full_name in [h['Host'] for h in dashboard_hosts['hosts'] if h['Host'] == full_name] + + values = session.host_new.get_details(target_sat.hostname, widget_names='breadcrumb') + assert values['breadcrumb'] == full_name From 1c05f470703ff69c5cbc6b675c7fff277676afca Mon Sep 17 00:00:00 2001 From: Peter Ondrejka Date: Fri, 10 Nov 2023 14:30:14 +0100 Subject: [PATCH 80/96] improved webhook event trigger test (#13048) * improved webhook event trigger test * Update tests/foreman/api/test_webhook.py Co-authored-by: Omkar Khatavkar --------- Co-authored-by: Omkar Khatavkar --- tests/foreman/api/test_webhook.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/foreman/api/test_webhook.py b/tests/foreman/api/test_webhook.py index a220b19af99..019ac42484c 100644 --- a/tests/foreman/api/test_webhook.py +++ b/tests/foreman/api/test_webhook.py @@ -148,7 +148,9 @@ def test_positive_end_to_end(self, target_sat): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_event_triggered(self, module_org, module_target_sat): + @pytest.mark.e2e + @pytest.mark.parametrize('setting_update', ['safemode_render=False'], indirect=True) + def test_positive_event_triggered(self, module_org, target_sat, setting_update): """Create a webhook and trigger the event associated with it. @@ -159,13 +161,14 @@ def test_positive_event_triggered(self, module_org, module_target_sat): :CaseImportance: Critical """ - hook = module_target_sat.api.Webhooks( + hook = target_sat.api.Webhooks( event='actions.katello.repository.sync_succeeded', http_method='GET' ).create() - repo = module_target_sat.api.Repository( + repo = target_sat.api.Repository( organization=module_org, content_type='yum', url=settings.repos.yum_0.url ).create() - with module_target_sat.api.session.shell() as shell: + with target_sat.session.shell() as shell: shell.send('foreman-tail') repo.sync() assert_event_triggered(shell, hook.event) + target_sat.wait_for_tasks(f'Deliver webhook {hook.name}') From 5f0c2b0520d54a02cbae97a756b3a6c287cb85a6 Mon Sep 17 00:00:00 2001 From: omkarkhatavkar Date: Fri, 10 Nov 2023 16:29:14 +0530 Subject: [PATCH 81/96] updating the fedora image to 38 as latest using python 3.12 --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index de7b12ac71d..2b88a306431 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM fedora +FROM fedora:38 MAINTAINER https://github.com/SatelliteQE RUN dnf install -y gcc git make cmake libffi-devel openssl-devel python3-devel \ @@ -6,7 +6,6 @@ RUN dnf install -y gcc git make cmake libffi-devel openssl-devel python3-devel \ COPY / /robottelo/ WORKDIR /robottelo -RUN curl https://raw.githubusercontent.com/SatelliteQE/broker/master/broker_settings.yaml.example -o broker_settings.yaml ENV PYCURL_SSL_LIBRARY=openssl RUN pip install -r requirements.txt From 1a7b56644dcd0e28bc99fb13f6ca1bd62c2f9305 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 22:39:19 -0500 Subject: [PATCH 82/96] Bump actions/github-script from 6 to 7 (#13078) --- .github/workflows/auto_cherry_pick.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/auto_cherry_pick.yml b/.github/workflows/auto_cherry_pick.yml index 170cb4cc2a9..4cef50417b0 100644 --- a/.github/workflows/auto_cherry_pick.yml +++ b/.github/workflows/auto_cherry_pick.yml @@ -88,7 +88,7 @@ jobs: - name: is autoMerging enabled for Auto CherryPicked PRs ? if: ${{ always() && steps.cherrypick.outcome == 'success' && contains(github.event.pull_request.labels.*.name, 'AutoMerge_Cherry_Picked') }} - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: github-token: ${{ secrets.CHERRYPICK_PAT }} script: | From 2d3cd63d6b34b9d2449c25c5404e03f6d5d0447a Mon Sep 17 00:00:00 2001 From: Jake Callahan Date: Tue, 14 Nov 2023 10:11:58 -0500 Subject: [PATCH 83/96] Add SharedResource class to robottelo.utils (#12341) This class will be used to manage shared resources between multitple processes or threads. The most common case will likely be a shared upgrade satellite between multiple xdist workers. I also handled an issues I encountered with the config_helpers script. --- robottelo/utils/shared_resource.py | 199 ++++++++++++++++++++++++ scripts/config_helpers.py | 5 +- tests/robottelo/test_shared_resource.py | 61 ++++++++ 3 files changed, 263 insertions(+), 2 deletions(-) create mode 100644 robottelo/utils/shared_resource.py create mode 100644 tests/robottelo/test_shared_resource.py diff --git a/robottelo/utils/shared_resource.py b/robottelo/utils/shared_resource.py new file mode 100644 index 00000000000..0ad0bd92e46 --- /dev/null +++ b/robottelo/utils/shared_resource.py @@ -0,0 +1,199 @@ +"""Allow multiple processes to communicate status on a single shared resource. + +This is useful for cases where multiple processes need to wait for all other processes to be ready +before continuing with some common action. The most common use case in this framework will likely +be to wait for all pre-upgrade setups to be ready before performing the upgrade. + +The system works by creating a file in /tmp with the name of the resource. This is a common file +where each process can communicate its status. The first process to register will be the main +watcher. The main watcher will wait for all other processes to be ready, then perform the action. +If the main actor fails to complete the action, and the action is recoverable, another process +will take over as the main watcher and attempt to perform the action. If the action is not +recoverable, the main watcher will fail and release all other processes. + +It is recommended to use this class as a context manager, as it will automatically register and +report when the process is done. + +Example: + >>> with SharedResource("target_sat.hostname", upgrade_action, **upgrade_kwargs) as resource: + ... # Do pre-upgrade setup steps + ... resource.ready() # tell the other processes that we are ready + ... yield target_sat # give the upgraded satellite to the test + ... # Do post-upgrade cleanup steps if any +""" +import json +from pathlib import Path +import time +from uuid import uuid4 + +from broker.helpers import FileLock + + +class SharedResource: + """A class representing a shared resource. + + Attributes: + action (function): The function to be executed when the resource is ready. + action_args (tuple): The arguments to be passed to the action function. + action_kwargs (dict): The keyword arguments to be passed to the action function. + action_is_recoverable (bool): Whether the action is recoverable or not. + id (str): The unique identifier of the shared resource. + resource_file (Path): The path to the file representing the shared resource. + is_main (bool): Whether the current instance is the main watcher or not. + is_recovering (bool): Whether the current instance is recovering from an error or not. + """ + + def __init__(self, resource_name, action, *action_args, **action_kwargs): + """Initializes a new instance of the SharedResource class. + + Args: + resource_name (str): The name of the shared resource. + action (function): The function to be executed when the resource is ready. + action_args (tuple): The arguments to be passed to the action function. + action_kwargs (dict): The keyword arguments to be passed to the action function. + """ + self.resource_file = Path(f"/tmp/{resource_name}.shared") + self.lock_file = FileLock(self.resource_file) + self.id = str(uuid4().fields[-1]) + self.action = action + self.action_is_recoverable = action_kwargs.pop("action_is_recoverable", False) + self.action_args = action_args + self.action_kwargs = action_kwargs + self.is_recovering = False + + def _update_status(self, status): + """Updates the status of the shared resource. + + Args: + status (str): The new status of the shared resource. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + curr_data["statuses"][self.id] = status + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def _update_main_status(self, status): + """Updates the main status of the shared resource. + + Args: + status (str): The new main status of the shared resource. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + curr_data["main_status"] = status + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def _check_all_status(self, status): + """Checks if all watchers have the specified status. + + Args: + status (str): The status to check for. + + Returns: + bool: True if all watchers have the specified status, False otherwise. + """ + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + for watcher_id in curr_data["watchers"]: + if curr_data["statuses"].get(watcher_id) != status: + return False + return True + + def _wait_for_status(self, status): + """Waits until all watchers have the specified status. + + Args: + status (str): The status to wait for. + """ + while not self._check_all_status(status): + time.sleep(1) + + def _wait_for_main_watcher(self): + """Waits for the main watcher to finish.""" + while True: + curr_data = json.loads(self.resource_file.read_text()) + if curr_data["main_status"] != "done": + time.sleep(60) + elif curr_data["main_status"] == "action_error": + self._try_take_over() + elif curr_data["main_status"] == "error": + raise Exception(f"Error in main watcher: {curr_data['main_watcher']}") + else: + break + + def _try_take_over(self): + """Tries to take over as the main watcher.""" + with self.lock_file: + curr_data = json.loads(self.resource_file.read_text()) + if curr_data["main_status"] in ("action_error", "error"): + curr_data["main_status"] = "recovering" + curr_data["main_watcher"] = self.id + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + self.is_main = True + self.is_recovering = True + self.wait() + + def register(self): + """Registers the current process as a watcher.""" + with self.lock_file: + if self.resource_file.exists(): + curr_data = json.loads(self.resource_file.read_text()) + self.is_main = False + else: # First watcher to register, becomes the main watcher, and creates the file + curr_data = { + "watchers": [], + "statuses": {}, + "main_watcher": self.id, + "main_status": "waiting", + } + self.is_main = True + curr_data["watchers"].append(self.id) + curr_data["statuses"][self.id] = "pending" + self.resource_file.write_text(json.dumps(curr_data, indent=4)) + + def ready(self): + """Marks the current process as ready to perform the action.""" + self._update_status("ready") + self.wait() + + def done(self): + """Marks the current process as done performing post actions.""" + self._update_status("done") + + def act(self): + """Attempt to perform the action.""" + try: + self.action(*self.action_args, **self.action_kwargs) + except Exception as err: + self._update_main_status("error") + raise err + + def wait(self): + """Top-level wait function, separating behavior between main and non-main watchers.""" + if self.is_main and not (self.is_recovering and not self.action_is_recoverable): + self._wait_for_status("ready") + self._update_main_status("acting") + self.act() + self._update_main_status("done") + else: + self._wait_for_main_watcher() + + def __enter__(self): + """Registers the current process as a watcher and returns the instance.""" + self.register() + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Marks the current process as done and updates the main watcher if needed.""" + if exc_type is FileNotFoundError: + raise exc_value + if exc_type is None: + self.done() + if self.is_main: + self._wait_for_status("done") + self.resource_file.unlink() + else: + self._update_status("error") + if self.is_main: + self._update_main_status("error") + raise exc_value diff --git a/scripts/config_helpers.py b/scripts/config_helpers.py index 85283bea05e..feb37c9bd62 100644 --- a/scripts/config_helpers.py +++ b/scripts/config_helpers.py @@ -14,8 +14,8 @@ def merge_nested_dictionaries(original, new, overwrite=False): # if the key is not in the original, add it if key not in original: original[key] = value - # if the key is in the original, and the value is a dictionary, recurse - elif isinstance(value, dict): + # if the key is in the original, and original[key] and value are dictionaries, recurse + elif isinstance(original[key], dict) and isinstance(value, dict): # use deepdiff to check if the dictionaries are the same if deepdiff.DeepDiff(original[key], value): original[key] = merge_nested_dictionaries(original[key], value, overwrite) @@ -24,6 +24,7 @@ def merge_nested_dictionaries(original, new, overwrite=False): # if the key is in the original, and the value is a list, ask the user elif overwrite == "ask": choice_prompt = ( + "-------------------------\n" f"The current value for {key} is {original[key]}.\n" "Please choose an option:\n" "1. Keep the current value\n" diff --git a/tests/robottelo/test_shared_resource.py b/tests/robottelo/test_shared_resource.py new file mode 100644 index 00000000000..ff2146cd80a --- /dev/null +++ b/tests/robottelo/test_shared_resource.py @@ -0,0 +1,61 @@ +import multiprocessing +from pathlib import Path +import random +from threading import Thread +import time + +from robottelo.utils.shared_resource import SharedResource + + +def upgrade_action(*args, **kwargs): + print(f"Upgrading satellite with {args=} and {kwargs=}") + time.sleep(1) + print("Satellite upgraded!") + + +def run_resource(resource_name): + time.sleep(random.random() * 5) # simulate random pre-setup + with SharedResource(resource_name, upgrade_action) as resource: + assert Path(f"/tmp/{resource_name}.shared").exists() + time.sleep(5) # simulate setup actions + resource.ready() + time.sleep(1) # simulate cleanup actions + + +def test_shared_resource(): + """Test the SharedResource class.""" + with SharedResource("test_resource", upgrade_action, 1, 2, 3, foo="bar") as resource: + assert Path("/tmp/test_resource.shared").exists() + assert resource.is_main + assert not resource.is_recovering + assert resource.action == upgrade_action + assert resource.action_args == (1, 2, 3) + assert resource.action_kwargs == {"foo": "bar"} + assert not resource.action_is_recoverable + + resource.ready() + assert resource._check_all_status("ready") + + assert not Path("/tmp/test_resource.shared").exists() + + +def test_shared_resource_multiprocessing(): + """Test the SharedResource class with multiprocessing.""" + with multiprocessing.Pool(2) as pool: + pool.map(run_resource, ["test_resource_mp", "test_resource_mp"]) + + assert not Path("/tmp/test_resource_mp.shared").exists() + + +def test_shared_resource_multithreading(): + """Test the SharedResource class with multithreading.""" + t1 = Thread(target=run_resource, args=("test_resource_th",)) + t2 = Thread(target=run_resource, args=("test_resource_th",)) + + t1.start() + t2.start() + + t1.join() + t2.join() + + assert not Path("/tmp/test_resource_th.shared").exists() From adf6b7e970b0de5c2003fb413c8e37ec55c404f9 Mon Sep 17 00:00:00 2001 From: vsedmik <46570670+vsedmik@users.noreply.github.com> Date: Tue, 14 Nov 2023 17:43:00 +0100 Subject: [PATCH 84/96] Add API tests for capsule content counts (#13068) This PR adds test cases to verify capsule content counts via API for unfiltered and filtered mixed-content CVs and few additions to support it. --- requirements.txt | 1 + robottelo/constants/__init__.py | 2 +- robottelo/host_helpers/repository_mixins.py | 34 ++++ tests/foreman/api/test_capsulecontent.py | 186 +++++++++++++++++++- 4 files changed, 220 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index efde8050ec6..87bdbc306fe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,7 @@ productmd==1.37 pyotp==2.9.0 python-box==7.1.1 pytest==7.4.3 +pytest-order==1.1.0 pytest-services==2.2.1 pytest-mock==3.12.0 pytest-reportportal==5.3.0 diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index 2cf9b98c887..f85dc863727 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -222,7 +222,7 @@ class Colored(Box): 'yum': "yum", 'ostree': "ostree", 'docker': "docker", - 'ansible_collection': "ansible collection", + 'ansible_collection': "ansible_collection", 'file': "file", } diff --git a/robottelo/host_helpers/repository_mixins.py b/robottelo/host_helpers/repository_mixins.py index d5d532b89f9..6af681ff104 100644 --- a/robottelo/host_helpers/repository_mixins.py +++ b/robottelo/host_helpers/repository_mixins.py @@ -123,6 +123,12 @@ class YumRepository(BaseRepository): _type = constants.REPO_TYPE['yum'] +class FileRepository(BaseRepository): + """Custom File repository""" + + _type = constants.REPO_TYPE['file'] + + class DockerRepository(BaseRepository): """Custom Docker repository""" @@ -151,6 +157,34 @@ def create(self, organization_id, product_id, download_policy=None, synchronize= return repo_info +class AnsibleRepository(BaseRepository): + """Custom Ansible Collection repository""" + + _type = constants.REPO_TYPE['ansible_collection'] + + def __init__(self, url=None, distro=None, requirements=None): + self._requirements = requirements + super().__init__(url=url, distro=distro) + + @property + def requirements(self): + return self._requirements + + def create(self, organization_id, product_id, download_policy=None, synchronize=True): + repo_info = self.satellite.cli_factory.make_repository( + { + 'product-id': product_id, + 'content-type': self.content_type, + 'url': self.url, + 'ansible-collection-requirements': f'{{collections: {self.requirements}}}', + } + ) + self._repo_info = repo_info + if synchronize: + self.synchronize() + return repo_info + + class OSTreeRepository(BaseRepository): """Custom OSTree repository""" diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index 2e72bc8998a..f809dda6cc9 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -27,8 +27,12 @@ from robottelo import constants from robottelo.config import settings -from robottelo.constants import DataFile -from robottelo.constants.repos import ANSIBLE_GALAXY +from robottelo.constants import ( + CONTAINER_REGISTRY_HUB, + CONTAINER_UPSTREAM_NAME, + DataFile, +) +from robottelo.constants.repos import ANSIBLE_GALAXY, CUSTOM_FILE_REPO from robottelo.content_info import ( get_repo_files_by_url, get_repomd, @@ -1453,3 +1457,181 @@ def test_positive_capsule_sync_openstack_container_repos( assert len(cvv.environment) == 2 module_capsule_configured.wait_for_sync() + + @pytest.mark.stream + @pytest.mark.parametrize( + 'repos_collection', + [ + { + 'distro': 'rhel8', + 'YumRepository': {'url': settings.repos.module_stream_1.url}, + 'FileRepository': {'url': CUSTOM_FILE_REPO}, + 'DockerRepository': { + 'url': CONTAINER_REGISTRY_HUB, + 'upstream_name': CONTAINER_UPSTREAM_NAME, + }, + 'AnsibleRepository': { + 'url': ANSIBLE_GALAXY, + 'requirements': [ + {'name': 'theforeman.foreman', 'version': '2.1.0'}, + {'name': 'theforeman.operations', 'version': '0.1.0'}, + ], + }, + } + ], + indirect=True, + ) + @pytest.mark.parametrize('filtered', [False, True], ids=['unfiltered', 'filtered']) + def test_positive_content_counts_for_mixed_cv( + self, + target_sat, + module_capsule_configured, + repos_collection, + function_org, + function_lce, + function_lce_library, + filtered, + ): + """Verify the content counts for a mixed-content CV + + :id: d8a0dea1-d30c-4c30-b3b1-46316de4ff29 + + :parametrized: yes + + :setup: + 1. A content view with repos of all content types (currently yum, file, docker, AC) + published into (unfiltered and filtered) CVV and promoted to an LCE. + + :steps: + 1. Assign the Capsule with Library and the LCE where the setup CVV is promoted to. + 2. Check the capsule doesn't provide any content counts for the setup CVV until synced. + 3. Sync the Capsule and get the content counts again. We should get counts for every + repo in the CVV multiplied by shared LCEs (LCEs where the CVV is promoted to and + synced to the Capsule, including Library). + 4. Get the content counts from Satellite side and compare them with Capsule. + + :expectedresults: + 1. Capsule doesn't return any counts for CVV until it is synced. + 2. After sync, content counts from Capsule match those from Satellite. + """ + expected_keys = { + 'yum': {'rpm', 'package_group', 'module_stream', 'erratum'}, + 'file': {'file'}, + 'docker': {'docker_tag', 'docker_manifest', 'docker_manifest_list'}, + 'ansible_collection': {'ansible_collection'}, + } + + repos_collection.setup_content(function_org.id, function_lce.id, upload_manifest=False) + cv_id = repos_collection.setup_content_data['content_view']['id'] + cv = target_sat.api.ContentView(id=cv_id).read() + + if filtered: + for filter_type in ['rpm', 'docker']: + cvf = target_sat.api.AbstractContentViewFilter( + type=filter_type, + content_view=cv, + inclusion=True, + ).create() + target_sat.api.ContentViewFilterRule( + content_view_filter=cvf, name='cat' if filter_type == 'rpm' else 'latest' + ).create() + cv.publish() + cv = cv.read() + cv.version.sort(key=lambda version: version.id) + + cvv = cv.version[-1].read() + + # Assign the Capsule with both content LCEs + module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( + data={'environment_id': [function_lce.id, function_lce_library.id]} + ) + capsule_lces = module_capsule_configured.nailgun_capsule.content_lifecycle_environments()[ + 'results' + ] + assert len(capsule_lces) + assert {function_lce.id, function_lce_library.id}.issubset( + [lce['id'] for lce in capsule_lces] + ) + + # Check the counts for CVV are not present at the Capsule side before sync. + caps_counts = module_capsule_configured.nailgun_capsule.content_counts() + assert caps_counts is None or cvv.id not in caps_counts['content_view_versions'].keys() + + # Sync, wait for counts to be updated and get them from the Capsule. + sync_status = module_capsule_configured.nailgun_capsule.content_sync() + assert sync_status['result'] == 'success', 'Capsule sync task failed.' + + target_sat.wait_for_tasks( + search_query=('label = Actions::Katello::CapsuleContent::UpdateContentCounts'), + search_rate=5, + max_tries=10, + ) + + caps_counts = module_capsule_configured.nailgun_capsule.content_counts()[ + 'content_view_versions' + ] + assert str(cvv.id) in caps_counts.keys(), 'CVV is missing in content counts.' + caps_counts = caps_counts[str(cvv.id)] + + # Every "environment repo" (the one promoted to an LCE and synced to the Capsule) + # is shown in the content_counts, so we get N-times more for every shared lce. + shared_lces = {env.id for env in cvv.environment} & {env['id'] for env in capsule_lces} + assert len(caps_counts['repositories']) == len(cvv.repository) * len( + shared_lces + ), 'Repositories count does not match.' + + # Read the environment repos from Satellite side and compare the counts with Capsule. + sat_repos = [ + target_sat.api.Repository(id=repo).read() for repo in caps_counts['repositories'] + ] + for repo in sat_repos: + cnt = caps_counts['repositories'][str(repo.id)] + assert repo.content_type == cnt['metadata']['content_type'] + common_keys = set(repo.content_counts.keys()) & set(cnt['counts'].keys()) + assert len(common_keys), f'No common keys found for type "{repo.content_type}".' + assert expected_keys[repo.content_type].issubset(common_keys), ( + 'Some fields are missing: expected ' + f'{expected_keys[repo.content_type]} but found {common_keys}' + ) + assert all( + [repo.content_counts.get(key) == cnt['counts'].get(key) for key in common_keys] + ) + + @pytest.mark.stream + @pytest.mark.order(1) + def test_positive_content_counts_blank_update( + self, + target_sat, + module_capsule_configured, + ): + """Verify the content counts and update endpoint for a blank Capsule. + + :id: da9c993e-258e-4215-9d8f-f0feced412d0 + + :setup: + 1. A blank unsynced Capsule. + + :steps: + 1. Get content counts from a blank capsule. + 2. Run content counts update via API. + 3. Check no content counts yet. + + :expectedresults: + 1. Capsule returns None for content counts. + 2. Content update task is created and succeeds. + 3. Capsule keeps returning None or empty list for content counts. + + :CaseImportance: Medium + """ + counts = module_capsule_configured.nailgun_capsule.content_counts() + assert counts is None + + task = module_capsule_configured.nailgun_capsule.content_update_counts() + assert task, 'No task was created for content update.' + assert 'Actions::Katello::CapsuleContent::UpdateContentCounts' in task['label'] + assert 'success' in task['result'] + + counts = module_capsule_configured.nailgun_capsule.content_counts() + assert ( + counts is None or len(counts['content_view_versions']) == 0 + ), f"No content counts expected, but got:\n{counts['content_view_versions']}." From 1ba42180868398530451d1dc25e76d16e74ff5ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 13:16:02 +0530 Subject: [PATCH 85/96] Bump deepdiff from 6.7.0 to 6.7.1 (#13091) Bumps [deepdiff](https://github.com/seperman/deepdiff) from 6.7.0 to 6.7.1. - [Release notes](https://github.com/seperman/deepdiff/releases) - [Changelog](https://github.com/seperman/deepdiff/blob/master/docs/changelog.rst) - [Commits](https://github.com/seperman/deepdiff/commits/6.7.1) --- updated-dependencies: - dependency-name: deepdiff dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 87bdbc306fe..533993a1fc9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ betelgeuse==1.10.0 broker[docker]==0.4.1 cryptography==41.0.5 -deepdiff==6.7.0 +deepdiff==6.7.1 dynaconf[vault]==3.2.4 fauxfactory==3.1.0 jinja2==3.1.2 From adcc7d01a1a139341cdb70f0d9a42914aeb59211 Mon Sep 17 00:00:00 2001 From: Griffin Sullivan Date: Wed, 8 Nov 2023 14:41:44 -0500 Subject: [PATCH 86/96] Add test for satellite package removal --- tests/foreman/destructive/test_packages.py | 32 ++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/tests/foreman/destructive/test_packages.py b/tests/foreman/destructive/test_packages.py index 05c44bf1501..4797588c696 100644 --- a/tests/foreman/destructive/test_packages.py +++ b/tests/foreman/destructive/test_packages.py @@ -20,6 +20,8 @@ import pytest +from robottelo.hosts import Satellite + pytestmark = pytest.mark.destructive @@ -58,3 +60,33 @@ def test_positive_all_packages_update(target_sat): assert matches is None # No packages available to update assert 'FAIL' not in result.stdout assert result.status == 0 + + +@pytest.mark.include_capsule +def test_negative_remove_satellite_packages(target_sat): + """Ensure user can't remove satellite or its dependent packages + + :id: af150302-418a-4d42-8d01-bb0e6b90f81f + + :steps: + 1. yum remove + + :expectedresults: removal should fail due to protecting the satellite package + + :BZ: 1884395 + + :customerscenario: true + """ + # Packages include satellite direct dependencies like foreman, + # but also dependency of dependencies like wget for foreman + if isinstance(target_sat, Satellite): + package_list = ['foreman', 'foreman-proxy', 'katello', 'wget', 'satellite'] + else: + package_list = ['foreman-proxy', 'satellite-capsule'] + for package in package_list: + result = target_sat.execute(f'yum remove {package}') + assert result.status != 0 + assert ( + 'Problem: The operation would result in removing the following protected packages: satellite' + in result.stdout + ) From 5405d12dfb0e66ec879707b1af85c11bb9afc901 Mon Sep 17 00:00:00 2001 From: Griffin Sullivan Date: Wed, 8 Nov 2023 15:01:07 -0500 Subject: [PATCH 87/96] Change target_sat to sat_maintain in destructive/test_packages --- tests/foreman/destructive/test_packages.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/foreman/destructive/test_packages.py b/tests/foreman/destructive/test_packages.py index 4797588c696..4e7fb07c9a6 100644 --- a/tests/foreman/destructive/test_packages.py +++ b/tests/foreman/destructive/test_packages.py @@ -26,7 +26,7 @@ @pytest.mark.include_capsule -def test_positive_all_packages_update(target_sat): +def test_positive_all_packages_update(sat_maintain): """Verify update and check-update work as expected. :id: eb8a5611-b1a8-4a18-b80e-56b045c0d2f6 @@ -44,15 +44,15 @@ def test_positive_all_packages_update(target_sat): :customerscenario: true """ # Register to CDN for package updates - target_sat.register_to_cdn() + sat_maintain.register_to_cdn() # Update packages with yum - result = target_sat.execute('yum update -y --disableplugin=foreman-protector') + result = sat_maintain.execute('yum update -y --disableplugin=foreman-protector') assert result.status == 0 # Reboot - if target_sat.execute('needs-restarting -r').status == 1: - target_sat.power_control(state='reboot') + if sat_maintain.execute('needs-restarting -r').status == 1: + sat_maintain.power_control(state='reboot') # Run check-update again to verify there are no more packages available to update - result = target_sat.cli.Packages.check_update() + result = sat_maintain.cli.Packages.check_update() # Regex to match if there are packages available to update # Matches lines like '\n\nwalrus.noarch 5.21-1 custom_repo\n' pattern = '(\\n){1,2}(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\n)' @@ -63,7 +63,7 @@ def test_positive_all_packages_update(target_sat): @pytest.mark.include_capsule -def test_negative_remove_satellite_packages(target_sat): +def test_negative_remove_satellite_packages(sat_maintain): """Ensure user can't remove satellite or its dependent packages :id: af150302-418a-4d42-8d01-bb0e6b90f81f @@ -79,14 +79,14 @@ def test_negative_remove_satellite_packages(target_sat): """ # Packages include satellite direct dependencies like foreman, # but also dependency of dependencies like wget for foreman - if isinstance(target_sat, Satellite): + if isinstance(sat_maintain, Satellite): package_list = ['foreman', 'foreman-proxy', 'katello', 'wget', 'satellite'] else: package_list = ['foreman-proxy', 'satellite-capsule'] for package in package_list: - result = target_sat.execute(f'yum remove {package}') + result = sat_maintain.execute(f'yum remove {package}') assert result.status != 0 assert ( 'Problem: The operation would result in removing the following protected packages: satellite' - in result.stdout + in str(result.stderr[1]) ) From bc53d944e9f7869efb43d722f60884e10ab61a55 Mon Sep 17 00:00:00 2001 From: Jacob Callahan Date: Wed, 1 Nov 2023 10:57:33 -0400 Subject: [PATCH 88/96] Remove BROKER_DIRECTORY from robottelo config This is something that broker handles on its own and, due to import timing, isn't handled correctly in robottelo anyway. Removing the field and config mechanism should eliminate confusion around this behavior. --- conf/broker.yaml.template | 4 +--- robottelo/config/__init__.py | 9 --------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/conf/broker.yaml.template b/conf/broker.yaml.template index 5f5067c321a..0c039df13bb 100644 --- a/conf/broker.yaml.template +++ b/conf/broker.yaml.template @@ -1,7 +1,5 @@ BROKER: - # The path where your broker settings and inventory are located - # If you leave it blank, the default is the output of `broker --version` - BROKER_DIRECTORY: + # Broker has its own config which you can find by running `broker --version` HOST_WORKFLOWS: POWER_CONTROL: vm-power-operation EXTEND: extend-vm diff --git a/robottelo/config/__init__.py b/robottelo/config/__init__.py index e078d2fcc63..2d66d00054b 100644 --- a/robottelo/config/__init__.py +++ b/robottelo/config/__init__.py @@ -48,15 +48,6 @@ def get_settings(): settings = get_settings() - - -if not os.getenv('BROKER_DIRECTORY'): - # set the BROKER_DIRECTORY envar so broker knows where to operate from - if _broker_dir := settings.robottelo.get('BROKER_DIRECTORY'): - logger.debug(f'Setting BROKER_DIRECTORY to {_broker_dir}') - os.environ['BROKER_DIRECTORY'] = _broker_dir - - robottelo_tmp_dir = Path(settings.robottelo.tmp_dir) robottelo_tmp_dir.mkdir(parents=True, exist_ok=True) From f10676ef421b3dbc69e18b86e0a2850e95f28c62 Mon Sep 17 00:00:00 2001 From: Jacob Callahan Date: Wed, 27 Sep 2023 16:01:47 -0400 Subject: [PATCH 89/96] Add a mechanism to swap nailgun versions on demand Satellite._swap_nailgun("x.y.z") can be used to change out the operating version of nailgun. --- robottelo/hosts.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/robottelo/hosts.py b/robottelo/hosts.py index d8838a9606b..42edd4c446e 100644 --- a/robottelo/hosts.py +++ b/robottelo/hosts.py @@ -1727,6 +1727,18 @@ def __init__(self, hostname=None, **kwargs): self._api = type('api', (), {'_configured': False}) self._cli = type('cli', (), {'_configured': False}) + def _swap_nailgun(self, new_version): + """Install a different version of nailgun from GitHub and invalidate the module cache.""" + import sys + + from pip._internal import main as pip_main + + pip_main(['uninstall', '-y', 'nailgun']) + pip_main(['install', f'https://github.com/SatelliteQE/nailgun/archive/{new_version}.zip']) + self._api = type('api', (), {'_configured': False}) + to_clear = [k for k in sys.modules.keys() if 'nailgun' in k] + [sys.modules.pop(k) for k in to_clear] + @property def api(self): """Import all nailgun entities and wrap them under self.api""" @@ -1734,7 +1746,7 @@ def api(self): self._api = type('api', (), {'_configured': False}) if self._api._configured: return self._api - + from nailgun import entities as _entities # use a private import from nailgun.config import ServerConfig from nailgun.entity_mixins import Entity @@ -1754,7 +1766,7 @@ class DecClass(cls): verify=settings.server.verify_ca, ) # add each nailgun entity to self.api, injecting our server config - for name, obj in entities.__dict__.items(): + for name, obj in _entities.__dict__.items(): try: if Entity in obj.mro(): # create a copy of the class and inject our server config into the __init__ From 406f80297824a2aeacb17ecb7a0b5828abf35683 Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Mon, 13 Nov 2023 15:07:50 +0100 Subject: [PATCH 90/96] Add test for SCA subscription report --- tests/foreman/api/test_reporttemplates.py | 86 +++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index 7d6bf4d2e1d..bdc65fd2592 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -16,6 +16,8 @@ :Upstream: No """ +import re + from broker import Broker from fauxfactory import gen_string import pytest @@ -24,6 +26,7 @@ from robottelo.config import settings from robottelo.constants import ( + DEFAULT_ARCHITECTURE, DEFAULT_SUBSCRIPTION_NAME, FAKE_1_CUSTOM_PACKAGE, FAKE_1_CUSTOM_PACKAGE_NAME, @@ -836,3 +839,86 @@ def test_positive_installable_errata( installable_errata = report[0] assert FAKE_1_CUSTOM_PACKAGE_NAME in installable_errata['Packages'] assert installable_errata['Erratum'] == ERRATUM_ID + + +@pytest.mark.tier2 +@pytest.mark.rhel_ver_match(r'^(?!6$)\d+$') +def test_positive_installed_products( + target_sat, + rhel_contenthost, + default_location, + function_sca_manifest_org, +): + """Generate 'Host - Installed Products' report for an SCA host. + + :id: d290daa2-aaba-4f4d-8eee-d8a540415320 + + :parametrized: yes + + :setup: + 1. RH content published in a CV, promoted to LCE, AK created. + All inside an SCA-enabled organization. + 2. A RHEL content host. + + :steps: + 1. Register the content host using the AK. + 2. Generate 'Host - Installed Products' report. + 3. Verify the report generated from the template. + + :expectedresults: + 1. Report is generated with correct values. + + :CaseImportance: Medium + """ + org = function_sca_manifest_org + lce_name = gen_string('alpha') + cv_name = gen_string('alpha') + + rh_repo = { + 'basearch': DEFAULT_ARCHITECTURE, + 'product': REPOS['rhae2']['product'], + 'name': REPOS['rhae2']['name'], + 'reposet': REPOS['rhae2']['reposet'], + 'releasever': None, + } + repo_id = target_sat.api_factory.enable_sync_redhat_repo(rh_repo, org.id) + cv = target_sat.api_factory.cv_publish_promote(cv_name, lce_name, repo_id, org.id) + ak = target_sat.api.ActivationKey( + content_view=cv, organization=org, environment=cv.environment[-1] + ).create() + + rhel_contenthost.register(org, default_location, ak.name, target_sat) + assert rhel_contenthost.subscribed, 'Host registration failed.' + + input_data = { + 'organization_id': org.id, + 'report_format': "json", + 'input_values': { + 'hosts': rhel_contenthost.hostname, + }, + } + report = ( + target_sat.api.ReportTemplate() + .search(query={'search': 'name="Host - Installed Products"'})[0] + .read() + .generate(data=input_data) + ) + assert report + assert report[0]['Host Name'] == rhel_contenthost.hostname, 'Incorrect host was reported.' + assert report[0]['Organization'] == org.name, 'Incorrect org was reported.' + assert report[0]['Lifecycle Environment'] == lce_name, 'Incorrect LCE was reported.' + assert report[0]['Content View'] == cv_name, 'Incorrect CV was reported.' + + # Get the installed products via rake and compare them with report + rake = target_sat.execute( + f'echo "Host.find_by(name: \'{rhel_contenthost.hostname}\').' + 'subscription_facet.installed_products" | foreman-rake console' + ) + assert rake.status == 0, f'Rake call failed with this output:\n({rake.stdout}).' + + pattern = re.compile(r'name: "(.*?)".*?cp_product_id: "(.*?)"') + matches = pattern.findall(rake.stdout) + products = [f"{match[0]} ({match[1]})" for match in matches] + assert len(products), 'No installed products to compare.' + + assert set(products) == set(report[0]['Products']), 'Reported products do not match.' From d1ad648ace712cb9c653e58f3bd27fccc1d5d7f8 Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Tue, 14 Nov 2023 18:41:15 +0100 Subject: [PATCH 91/96] Add checks for Role and Usage syspurpose tags --- tests/foreman/api/test_reporttemplates.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index bdc65fd2592..b8f42245150 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -873,6 +873,12 @@ def test_positive_installed_products( org = function_sca_manifest_org lce_name = gen_string('alpha') cv_name = gen_string('alpha') + sys_tags = {'role': gen_string('alpha'), 'usage': gen_string('alpha')} + + for key, val in sys_tags.items(): + assert ( + rhel_contenthost.execute(f'subscription-manager {key} --set {val}').status == 0 + ), f'Setting of {key} failed.' rh_repo = { 'basearch': DEFAULT_ARCHITECTURE, @@ -903,11 +909,13 @@ def test_positive_installed_products( .read() .generate(data=input_data) ) - assert report + assert report, 'No report generated.' assert report[0]['Host Name'] == rhel_contenthost.hostname, 'Incorrect host was reported.' assert report[0]['Organization'] == org.name, 'Incorrect org was reported.' assert report[0]['Lifecycle Environment'] == lce_name, 'Incorrect LCE was reported.' assert report[0]['Content View'] == cv_name, 'Incorrect CV was reported.' + assert report[0]['Role'] == sys_tags['role'], 'Incorrect role was reported.' + assert report[0]['Usage'] == sys_tags['usage'], 'Incorrect usage was reported.' # Get the installed products via rake and compare them with report rake = target_sat.execute( From 3e1f600d6353dd1bb400e9f670979788d15480ec Mon Sep 17 00:00:00 2001 From: Vladimir Sedmik Date: Wed, 15 Nov 2023 11:25:47 +0100 Subject: [PATCH 92/96] Address comments --- tests/foreman/api/test_reporttemplates.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index b8f42245150..610f9cf6eea 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -861,9 +861,10 @@ def test_positive_installed_products( 2. A RHEL content host. :steps: - 1. Register the content host using the AK. - 2. Generate 'Host - Installed Products' report. - 3. Verify the report generated from the template. + 1. Set syspurpose tags of the content host via subman. + 2. Register the content host using the AK. + 3. Generate 'Host - Installed Products' report. + 4. Verify the report generated from the template. :expectedresults: 1. Report is generated with correct values. @@ -917,7 +918,7 @@ def test_positive_installed_products( assert report[0]['Role'] == sys_tags['role'], 'Incorrect role was reported.' assert report[0]['Usage'] == sys_tags['usage'], 'Incorrect usage was reported.' - # Get the installed products via rake and compare them with report + # Get the installed products via rake and compare them with generated report rake = target_sat.execute( f'echo "Host.find_by(name: \'{rhel_contenthost.hostname}\').' 'subscription_facet.installed_products" | foreman-rake console' @@ -926,7 +927,7 @@ def test_positive_installed_products( pattern = re.compile(r'name: "(.*?)".*?cp_product_id: "(.*?)"') matches = pattern.findall(rake.stdout) - products = [f"{match[0]} ({match[1]})" for match in matches] + products = [f'{match[0]} ({match[1]})' for match in matches] assert len(products), 'No installed products to compare.' assert set(products) == set(report[0]['Products']), 'Reported products do not match.' From 0cd07ec9a216d88fe77e7461ea7797e3a59ed4a5 Mon Sep 17 00:00:00 2001 From: Gaurav Talreja Date: Thu, 16 Nov 2023 19:01:15 +0530 Subject: [PATCH 93/96] Bump el9 kickstart version to 9.3 (#13069) Signed-off-by: Gaurav Talreja --- robottelo/constants/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index f85dc863727..fc37d74a22f 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -546,16 +546,16 @@ class Colored(Box): }, 'rhel9_bos': { 'id': 'rhel-9-for-x86_64-baseos-kickstart', - 'name': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS Kickstart 9.2', - 'version': '9.2', + 'name': 'Red Hat Enterprise Linux 9 for x86_64 - BaseOS Kickstart 9.3', + 'version': '9.3', 'reposet': REPOSET['kickstart']['rhel9'], 'product': PRDS['rhel9'], 'distro': 'rhel9', }, 'rhel9_aps': { 'id': 'rhel-9-for-x86_64-appstream-kickstart', - 'name': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream Kickstart 9.2', - 'version': '9.2', + 'name': 'Red Hat Enterprise Linux 9 for x86_64 - AppStream Kickstart 9.3', + 'version': '9.3', 'reposet': REPOSET['kickstart']['rhel9_aps'], 'product': PRDS['rhel9'], 'distro': 'rhel9', From f6024657f357d0101efa7181d018732abd30f503 Mon Sep 17 00:00:00 2001 From: Shubham Ganar <67952129+shubhamsg199@users.noreply.github.com> Date: Thu, 16 Nov 2023 19:04:38 +0530 Subject: [PATCH 94/96] Test fix for discovery reboot_all scenario (#13102) Fix discovery test for reboot all scenario --- tests/foreman/api/test_discoveredhost.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/foreman/api/test_discoveredhost.py b/tests/foreman/api/test_discoveredhost.py index 70beeba274e..7d260b3d5e8 100644 --- a/tests/foreman/api/test_discoveredhost.py +++ b/tests/foreman/api/test_discoveredhost.py @@ -414,7 +414,6 @@ def test_positive_reboot_all_pxe_hosts( provision_multiple_hosts, provisioning_hostgroup, pxe_loader, - count, ): """Rebooting all pxe-based discovered hosts From e208153bfb11ab1c5a9f1079164971e2457f9ad5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Nov 2023 20:06:03 +0530 Subject: [PATCH 95/96] Bump pytest-xdist from 3.3.1 to 3.4.0 (#13076) Bumps [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) from 3.3.1 to 3.4.0. - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v3.3.1...v3.4.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 533993a1fc9..86f7aafb96d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ pytest-order==1.1.0 pytest-services==2.2.1 pytest-mock==3.12.0 pytest-reportportal==5.3.0 -pytest-xdist==3.3.1 +pytest-xdist==3.4.0 pytest-ibutsu==2.2.4 PyYAML==6.0.1 requests==2.31.0 From 6e804fa7bbdd1e89ec49f719fe3eb49636f305b4 Mon Sep 17 00:00:00 2001 From: David Moore <109112035+damoore044@users.noreply.github.com> Date: Thu, 16 Nov 2023 14:37:14 -0500 Subject: [PATCH 96/96] Stream fix for e2e api http_proxy failure (#13064) * Stream fix for e2e http_proxy failure * Lastest prt failure, now seeing same issue with rh_repo * Repo discovery fix --- tests/foreman/api/test_http_proxy.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index 0b4446069bb..1e6da4dfcf6 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -69,6 +69,7 @@ def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifes reposet=constants.REPOSET['rhae2'], releasever=None, ) + module_target_sat.api.Repository(id=rh_repo_id).sync() rh_repo = module_target_sat.api.Repository( id=rh_repo_id, http_proxy_policy=http_proxy_policy, @@ -96,12 +97,13 @@ def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifes # Use global_default_http_proxy repo_options['http_proxy_policy'] = 'global_default_http_proxy' repo_2 = module_target_sat.api.Repository(**repo_options).create() + repo_2.sync() assert repo_2.http_proxy_policy == 'global_default_http_proxy' # Update to selected_http_proxy repo_2.http_proxy_policy = 'none' repo_2.update(['http_proxy_policy']) - assert repo_2.http_proxy_policy == 'none' + assert repo_2.read().http_proxy_policy == 'none' # test scenario for yum type repo discovery. repo_name = 'fakerepo01' @@ -116,16 +118,16 @@ def test_positive_end_to_end(setup_http_proxy, module_target_sat, module_manifes assert yum_repo['output'][0] == f'{settings.repos.repo_discovery.url}/{repo_name}/' # test scenario for docker type repo discovery. - yum_repo = module_target_sat.api.Organization(id=module_manifest_org.id).repo_discover( + docker_repo = module_target_sat.api.Organization(id=module_manifest_org.id).repo_discover( data={ "id": module_manifest_org.id, "url": 'quay.io', "content_type": "docker", - "search": 'quay/busybox', + "search": 'foreman/foreman', } ) - assert len(yum_repo['output']) >= 1 - assert 'quay/busybox' in yum_repo['output'] + assert len(docker_repo['output']) > 0 + assert docker_repo['result'] == 'success' @pytest.mark.upgrade