diff --git a/tests/foreman/api/test_activationkey.py b/tests/foreman/api/test_activationkey.py index 96da8a8f4c8..8c6f9ea194d 100644 --- a/tests/foreman/api/test_activationkey.py +++ b/tests/foreman/api/test_activationkey.py @@ -19,7 +19,7 @@ import http from fauxfactory import gen_integer, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -46,7 +46,7 @@ def _bad_max_hosts(): @pytest.mark.tier1 -def test_positive_create_unlimited_hosts(): +def test_positive_create_unlimited_hosts(target_sat): """Create a plain vanilla activation key. :id: 1d73b8cc-a754-4637-8bae-d9d2aaf89003 @@ -56,12 +56,12 @@ def test_positive_create_unlimited_hosts(): :CaseImportance: Critical """ - assert entities.ActivationKey().create().unlimited_hosts is True + assert target_sat.api.ActivationKey().create().unlimited_hosts is True @pytest.mark.tier1 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_create_limited_hosts(max_host): +def test_positive_create_limited_hosts(max_host, target_sat): """Create an activation key with limited hosts. :id: 9bbba620-fd98-4139-a44b-af8ce330c7a4 @@ -73,14 +73,14 @@ def test_positive_create_limited_hosts(max_host): :parametrized: yes """ - act_key = entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + act_key = target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() assert act_key.max_hosts == max_host assert act_key.unlimited_hosts is False @pytest.mark.tier1 @pytest.mark.parametrize('key_name', **parametrized(valid_data_list())) -def test_positive_create_with_name(key_name): +def test_positive_create_with_name(key_name, target_sat): """Create an activation key providing the initial name. :id: 749e0d28-640e-41e5-89d6-b92411ce73a3 @@ -91,13 +91,13 @@ def test_positive_create_with_name(key_name): :parametrized: yes """ - act_key = entities.ActivationKey(name=key_name).create() + act_key = target_sat.api.ActivationKey(name=key_name).create() assert key_name == act_key.name @pytest.mark.tier2 @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create an activation key and provide a description. :id: 64d93726-6f96-4a2e-ab29-eb5bfa2ff8ff @@ -106,12 +106,12 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - act_key = entities.ActivationKey(description=desc).create() + act_key = target_sat.api.ActivationKey(description=desc).create() assert desc == act_key.description @pytest.mark.tier2 -def test_negative_create_with_no_host_limit(): +def test_negative_create_with_no_host_limit(target_sat): """Create activation key without providing limitation for hosts number :id: a9e756e1-886d-4f0d-b685-36ce4247517d @@ -121,12 +121,12 @@ def test_negative_create_with_no_host_limit(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ActivationKey(unlimited_hosts=False).create() + target_sat.api.ActivationKey(unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_create_with_invalid_host_limit(max_host): +def test_negative_create_with_invalid_host_limit(max_host, target_sat): """Create activation key with invalid limit values for hosts number. :id: c018b177-2074-4f1a-a7e0-9f38d6c9a1a6 @@ -138,12 +138,12 @@ def test_negative_create_with_invalid_host_limit(max_host): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() + target_sat.api.ActivationKey(max_hosts=max_host, unlimited_hosts=False).create() @pytest.mark.tier3 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create activation key providing an invalid name. :id: 5f7051be-0320-4d37-9085-6904025ad909 @@ -155,12 +155,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ActivationKey(name=name).create() + target_sat.api.ActivationKey(name=name).create() @pytest.mark.tier2 @pytest.mark.parametrize('max_host', **parametrized(_good_max_hosts())) -def test_positive_update_limited_host(max_host): +def test_positive_update_limited_host(max_host, target_sat): """Create activation key then update it to limited hosts. :id: 34ca8303-8135-4694-9cf7-b20f8b4b0a1e @@ -170,7 +170,7 @@ def test_positive_update_limited_host(max_host): :parametrized: yes """ # unlimited_hosts defaults to True. - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': max_host, 'unlimited_hosts': False} for key, value in want.items(): setattr(act_key, key, value) @@ -181,7 +181,7 @@ def test_positive_update_limited_host(max_host): @pytest.mark.tier2 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat): """Create activation key providing the initial name, then update its name to another valid name. @@ -192,14 +192,14 @@ def test_positive_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() - updated = entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) + act_key = target_sat.api.ActivationKey().create() + updated = target_sat.api.ActivationKey(id=act_key.id, name=new_name).update(['name']) assert new_name == updated.name @pytest.mark.tier3 @pytest.mark.parametrize('max_host', **parametrized(_bad_max_hosts())) -def test_negative_update_limit(max_host): +def test_negative_update_limit(max_host, target_sat): """Create activation key then update its limit to invalid value. :id: 0f857d2f-81ed-4b8b-b26e-34b4f294edbc @@ -214,7 +214,7 @@ def test_negative_update_limit(max_host): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() want = {'max_hosts': act_key.max_hosts, 'unlimited_hosts': act_key.unlimited_hosts} act_key.max_hosts = max_host act_key.unlimited_hosts = False @@ -227,7 +227,7 @@ def test_negative_update_limit(max_host): @pytest.mark.tier3 @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat): """Create activation key then update its name to an invalid name. :id: da85a32c-942b-4ab8-a133-36b028208c4d @@ -239,16 +239,16 @@ def test_negative_update_name(new_name): :parametrized: yes """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, name=new_name).update(['name']) - new_key = entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id, name=new_name).update(['name']) + new_key = target_sat.api.ActivationKey(id=act_key.id).read() assert new_key.name != new_name assert new_key.name == act_key.name @pytest.mark.tier3 -def test_negative_update_max_hosts(): +def test_negative_update_max_hosts(target_sat): """Create an activation key with ``max_hosts == 1``, then update that field with a string value. @@ -258,14 +258,14 @@ def test_negative_update_max_hosts(): :CaseImportance: Low """ - act_key = entities.ActivationKey(max_hosts=1).create() + act_key = target_sat.api.ActivationKey(max_hosts=1).create() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id, max_hosts='foo').update(['max_hosts']) + target_sat.api.ActivationKey(id=act_key.id, max_hosts='foo').update(['max_hosts']) assert act_key.read().max_hosts == 1 @pytest.mark.tier2 -def test_positive_get_releases_status_code(): +def test_positive_get_releases_status_code(target_sat): """Get an activation key's releases. Check response format. :id: e1ea4797-8d92-4bec-ae6b-7a26599825ab @@ -275,7 +275,7 @@ def test_positive_get_releases_status_code(): :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() path = act_key.path('releases') response = client.get(path, auth=get_credentials(), verify=False) status_code = http.client.OK @@ -284,7 +284,7 @@ def test_positive_get_releases_status_code(): @pytest.mark.tier2 -def test_positive_get_releases_content(): +def test_positive_get_releases_content(target_sat): """Get an activation key's releases. Check response contents. :id: 2fec3d71-33e9-40e5-b934-90b03afc26a1 @@ -293,14 +293,14 @@ def test_positive_get_releases_content(): :CaseLevel: Integration """ - act_key = entities.ActivationKey().create() + act_key = target_sat.api.ActivationKey().create() response = client.get(act_key.path('releases'), auth=get_credentials(), verify=False).json() assert 'results' in response.keys() assert type(response['results']) == list @pytest.mark.tier2 -def test_positive_add_host_collections(module_org): +def test_positive_add_host_collections(module_org, module_target_sat): """Associate an activation key with several host collections. :id: 1538808c-621e-4cf9-9b9b-840c5dd54644 @@ -318,23 +318,27 @@ def test_positive_add_host_collections(module_org): :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 # Give activation key one host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 1 # Give activation key second host collection. - act_key.host_collection.append(entities.HostCollection(organization=module_org).create()) + act_key.host_collection.append( + module_target_sat.api.HostCollection(organization=module_org).create() + ) act_key = act_key.update(['host_collection']) assert len(act_key.host_collection) == 2 @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_host_collection(module_org): +def test_positive_remove_host_collection(module_org, module_target_sat): """Disassociate host collection from the activation key :id: 31992ac4-fe55-45bb-bd17-a191928ec2ab @@ -353,10 +357,10 @@ def test_positive_remove_host_collection(module_org): :CaseImportance: Critical """ # An activation key has no host collections by default. - act_key = entities.ActivationKey(organization=module_org).create() + act_key = module_target_sat.api.ActivationKey(organization=module_org).create() assert len(act_key.host_collection) == 0 - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() # Associate host collection with activation key. act_key.add_host_collection(data={'host_collection_ids': [host_collection.id]}) @@ -368,7 +372,7 @@ def test_positive_remove_host_collection(module_org): @pytest.mark.tier1 -def test_positive_update_auto_attach(): +def test_positive_update_auto_attach(target_sat): """Create an activation key, then update the auto_attach field with the inverse boolean value. @@ -378,17 +382,17 @@ def test_positive_update_auto_attach(): :CaseImportance: Critical """ - act_key = entities.ActivationKey().create() - act_key_2 = entities.ActivationKey(id=act_key.id, auto_attach=(not act_key.auto_attach)).update( - ['auto_attach'] - ) + act_key = target_sat.api.ActivationKey().create() + act_key_2 = target_sat.api.ActivationKey( + id=act_key.id, auto_attach=(not act_key.auto_attach) + ).update(['auto_attach']) assert act_key.auto_attach != act_key_2.auto_attach @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete(name): +def test_positive_delete(name, target_sat): """Create activation key and then delete it. :id: aa28d8fb-e07d-45fa-b43a-fc90c706d633 @@ -399,10 +403,10 @@ def test_positive_delete(name): :parametrized: yes """ - act_key = entities.ActivationKey(name=name).create() + act_key = target_sat.api.ActivationKey(name=name).create() act_key.delete() with pytest.raises(HTTPError): - entities.ActivationKey(id=act_key.id).read() + target_sat.api.ActivationKey(id=act_key.id).read() @pytest.mark.tier2 @@ -503,7 +507,7 @@ def test_positive_add_future_subscription(): @pytest.mark.tier1 -def test_positive_search_by_org(): +def test_positive_search_by_org(target_sat): """Search for all activation keys in an organization. :id: aedba598-2e47-44a8-826c-4dc304ba00be @@ -513,8 +517,8 @@ def test_positive_search_by_org(): :CaseImportance: Critical """ - org = entities.Organization().create() - act_key = entities.ActivationKey(organization=org).create() - keys = entities.ActivationKey(organization=org).search() + org = target_sat.api.Organization().create() + act_key = target_sat.api.ActivationKey(organization=org).create() + keys = target_sat.api.ActivationKey(organization=org).search() assert len(keys) == 1 assert act_key.id == keys[0].id diff --git a/tests/foreman/api/test_architecture.py b/tests/foreman/api/test_architecture.py index f88c8adba18..f0b7c428b0f 100644 --- a/tests/foreman/api/test_architecture.py +++ b/tests/foreman/api/test_architecture.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_choice -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -29,7 +28,7 @@ @pytest.mark.tier1 -def test_positive_CRUD(default_os): +def test_positive_CRUD(default_os, target_sat): """Create a new Architecture with several attributes, update the name and delete the Architecture itself. @@ -43,13 +42,13 @@ def test_positive_CRUD(default_os): # Create name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(name=name, operatingsystem=[default_os]).create() + arch = target_sat.api.Architecture(name=name, operatingsystem=[default_os]).create() assert {default_os.id} == {os.id for os in arch.operatingsystem} assert name == arch.name # Update name = gen_choice(list(valid_data_list().values())) - arch = entities.Architecture(id=arch.id, name=name).update(['name']) + arch = target_sat.api.Architecture(id=arch.id, name=name).update(['name']) assert name == arch.name # Delete @@ -60,7 +59,7 @@ def test_positive_CRUD(default_os): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create architecture providing an invalid initial name. :id: 0fa6377d-063a-4e24-b606-b342e0d9108b @@ -74,12 +73,12 @@ def test_negative_create_with_invalid_name(name): :BZ: 1401519 """ with pytest.raises(HTTPError): - entities.Architecture(name=name).create() + target_sat.api.Architecture(name=name).create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) -def test_negative_update_with_invalid_name(name, module_architecture): +def test_negative_update_with_invalid_name(name, module_architecture, module_target_sat): """Update architecture's name to an invalid name. :id: cb27b69b-14e0-42d0-9e44-e09d68324803 @@ -91,6 +90,6 @@ def test_negative_update_with_invalid_name(name, module_architecture): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Architecture(id=module_architecture.id, name=name).update(['name']) - arch = entities.Architecture(id=module_architecture.id).read() + module_target_sat.api.Architecture(id=module_architecture.id, name=name).update(['name']) + arch = module_target_sat.api.Architecture(id=module_architecture.id).read() assert arch.name != name diff --git a/tests/foreman/api/test_audit.py b/tests/foreman/api/test_audit.py index 008ebbc7e1f..198228927fe 100644 --- a/tests/foreman/api/test_audit.py +++ b/tests/foreman/api/test_audit.py @@ -16,14 +16,13 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.utils.datafactory import gen_string @pytest.mark.tier1 -def test_positive_create_by_type(): +def test_positive_create_by_type(target_sat): """Create entities of different types and check audit logs for these events using entity type as search criteria @@ -39,45 +38,49 @@ def test_positive_create_by_type(): :CaseImportance: Medium """ for entity_item in [ - {'entity': entities.Architecture()}, + {'entity': target_sat.api.Architecture()}, { - 'entity': entities.AuthSourceLDAP(), + 'entity': target_sat.api.AuthSourceLDAP(), 'entity_type': 'auth_source', 'value_template': 'LDAP-{entity.name}', }, - {'entity': entities.ComputeProfile(), 'entity_type': 'compute_profile'}, + {'entity': target_sat.api.ComputeProfile(), 'entity_type': 'compute_profile'}, { - 'entity': entities.LibvirtComputeResource(), + 'entity': target_sat.api.LibvirtComputeResource(), 'entity_type': 'compute_resource', 'value_template': '{entity.name} (Libvirt)', }, - {'entity': entities.Domain()}, - {'entity': entities.Host()}, - {'entity': entities.HostGroup()}, - {'entity': entities.Image(compute_resource=entities.LibvirtComputeResource().create())}, - {'entity': entities.Location()}, - {'entity': entities.Media(), 'entity_type': 'medium'}, + {'entity': target_sat.api.Domain()}, + {'entity': target_sat.api.Host()}, + {'entity': target_sat.api.HostGroup()}, { - 'entity': entities.OperatingSystem(), + 'entity': target_sat.api.Image( + compute_resource=target_sat.api.LibvirtComputeResource().create() + ) + }, + {'entity': target_sat.api.Location()}, + {'entity': target_sat.api.Media(), 'entity_type': 'medium'}, + { + 'entity': target_sat.api.OperatingSystem(), 'entity_type': 'os', 'value_template': '{entity.name} {entity.major}', }, - {'entity': entities.PartitionTable(), 'entity_type': 'ptable'}, - {'entity': entities.Role()}, + {'entity': target_sat.api.PartitionTable(), 'entity_type': 'ptable'}, + {'entity': target_sat.api.Role()}, { - 'entity': entities.Subnet(), + 'entity': target_sat.api.Subnet(), 'value_template': '{entity.name} ({entity.network}/{entity.cidr})', }, - {'entity': entities.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, - {'entity': entities.User(), 'value_template': '{entity.login}'}, - {'entity': entities.UserGroup()}, - {'entity': entities.ContentView(), 'entity_type': 'katello/content_view'}, - {'entity': entities.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, - {'entity': entities.ActivationKey(), 'entity_type': 'katello/activation_key'}, - {'entity': entities.HostCollection(), 'entity_type': 'katello/host_collection'}, - {'entity': entities.Product(), 'entity_type': 'katello/product'}, + {'entity': target_sat.api.ProvisioningTemplate(), 'entity_type': 'provisioning_template'}, + {'entity': target_sat.api.User(), 'value_template': '{entity.login}'}, + {'entity': target_sat.api.UserGroup()}, + {'entity': target_sat.api.ContentView(), 'entity_type': 'katello/content_view'}, + {'entity': target_sat.api.LifecycleEnvironment(), 'entity_type': 'katello/kt_environment'}, + {'entity': target_sat.api.ActivationKey(), 'entity_type': 'katello/activation_key'}, + {'entity': target_sat.api.HostCollection(), 'entity_type': 'katello/host_collection'}, + {'entity': target_sat.api.Product(), 'entity_type': 'katello/product'}, { - 'entity': entities.SyncPlan(organization=entities.Organization(id=1)), + 'entity': target_sat.api.SyncPlan(organization=target_sat.api.Organization(id=1)), 'entity_type': 'katello/sync_plan', }, ]: @@ -85,7 +88,7 @@ def test_positive_create_by_type(): entity_type = entity_item.get('entity_type', created_entity.__class__.__name__.lower()) value_template = entity_item.get('value_template', '{entity.name}') entity_value = value_template.format(entity=created_entity) - audits = entities.Audit().search(query={'search': f'type={entity_type}'}) + audits = target_sat.api.Audit().search(query={'search': f'type={entity_type}'}) entity_audits = [entry for entry in audits if entry.auditable_name == entity_value] assert entity_audits, ( f'audit not found by name "{entity_value}" for entity: ' @@ -98,7 +101,7 @@ def test_positive_create_by_type(): @pytest.mark.tier1 -def test_positive_update_by_type(): +def test_positive_update_by_type(target_sat): """Update some entities of different types and check audit logs for these events using entity type as search criteria @@ -110,19 +113,19 @@ def test_positive_update_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + target_sat.api.Architecture(), + target_sat.api.Domain(), + target_sat.api.HostGroup(), + target_sat.api.Location(), + target_sat.api.Role(), + target_sat.api.UserGroup(), ]: created_entity = entity.create() name = created_entity.name new_name = gen_string('alpha') created_entity.name = new_name created_entity = created_entity.update(['name']) - audits = entities.Audit().search( + audits = target_sat.api.Audit().search( query={'search': f'type={created_entity.__class__.__name__.lower()}'} ) entity_audits = [entry for entry in audits if entry.auditable_name == name] @@ -135,7 +138,7 @@ def test_positive_update_by_type(): @pytest.mark.tier1 -def test_positive_delete_by_type(): +def test_positive_delete_by_type(target_sat): """Delete some entities of different types and check audit logs for these events using entity type as search criteria @@ -147,17 +150,17 @@ def test_positive_delete_by_type(): :CaseImportance: Medium """ for entity in [ - entities.Architecture(), - entities.Domain(), - entities.Host(), - entities.HostGroup(), - entities.Location(), - entities.Role(), - entities.UserGroup(), + target_sat.api.Architecture(), + target_sat.api.Domain(), + target_sat.api.Host(), + target_sat.api.HostGroup(), + target_sat.api.Location(), + target_sat.api.Role(), + target_sat.api.UserGroup(), ]: created_entity = entity.create() created_entity.delete() - audits = entities.Audit().search( + audits = target_sat.api.Audit().search( query={'search': f'type={created_entity.__class__.__name__.lower()}'} ) entity_audits = [entry for entry in audits if entry.auditable_name == created_entity.name] diff --git a/tests/foreman/api/test_bookmarks.py b/tests/foreman/api/test_bookmarks.py index 71ae30391ab..6ae674f7699 100644 --- a/tests/foreman/api/test_bookmarks.py +++ b/tests/foreman/api/test_bookmarks.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -32,7 +31,7 @@ @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_name(controller): +def test_positive_create_with_name(controller, target_sat): """Create a bookmark :id: aeef0944-379a-4a27-902d-aa5969dbd441 @@ -51,14 +50,14 @@ def test_positive_create_with_name(controller): :CaseImportance: Critical """ name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, name=name, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, name=name, public=False).create() assert bm.controller == controller assert bm.name == name @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_with_query(controller): +def test_positive_create_with_query(controller, target_sat): """Create a bookmark :id: 9fb6d485-92b5-43ea-b776-012c13734100 @@ -77,7 +76,7 @@ def test_positive_create_with_query(controller): :CaseImportance: Critical """ query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, query=query).create() + bm = target_sat.api.Bookmark(controller=controller, query=query).create() assert bm.controller == controller assert bm.query == query @@ -85,7 +84,7 @@ def test_positive_create_with_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('public', (True, False)) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_create_public(controller, public): +def test_positive_create_public(controller, public, target_sat): """Create a public bookmark :id: 511b9bcf-0661-4e44-b1bc-475a1c207aa9 @@ -103,14 +102,14 @@ def test_positive_create_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=public).create() + bm = target_sat.api.Bookmark(controller=controller, public=public).create() assert bm.controller == controller assert bm.public == public @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_with_invalid_name(controller): +def test_negative_create_with_invalid_name(controller, target_sat): """Create a bookmark with invalid name :id: 9a79c561-8225-43fc-8ec7-b6858e9665e2 @@ -131,14 +130,14 @@ def test_negative_create_with_invalid_name(controller): """ name = random.choice(invalid_values_list()) with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=False).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=False).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_empty_query(controller): +def test_negative_create_empty_query(controller, target_sat): """Create a bookmark with empty query :id: 674d569f-6f86-43ba-b9cc-f43e05e8ab1c @@ -159,14 +158,14 @@ def test_negative_create_empty_query(controller): """ name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, query='').create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, query='').create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_same_name(controller): +def test_negative_create_same_name(controller, target_sat): """Create bookmarks with the same names :id: f78f6e97-da77-4a61-95c2-622c439d325d @@ -187,16 +186,16 @@ def test_negative_create_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() + target_sat.api.Bookmark(controller=controller, name=name).create() with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 1 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_create_null_public(controller): +def test_negative_create_null_public(controller, target_sat): """Create a bookmark omitting the public parameter :id: 0a4cb5ea-912b-445e-a874-b345e43d3eac @@ -220,14 +219,14 @@ def test_negative_create_null_public(controller): """ name = gen_string('alphanumeric') with pytest.raises(HTTPError): - entities.Bookmark(controller=controller, name=name, public=None).create() - result = entities.Bookmark().search(query={'search': f'name="{name}"'}) + target_sat.api.Bookmark(controller=controller, name=name, public=None).create() + result = target_sat.api.Bookmark().search(query={'search': f'name="{name}"'}) assert len(result) == 0 @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_name(controller): +def test_positive_update_name(controller, target_sat): """Update a bookmark :id: 1cde270a-26fb-4cff-bdff-89fef17a7624 @@ -246,7 +245,7 @@ def test_positive_update_name(controller): :CaseImportance: Critical """ new_name = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name bm = bm.update(['name']) assert bm.name == new_name @@ -254,7 +253,7 @@ def test_positive_update_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_same_name(controller): +def test_negative_update_same_name(controller, target_sat): """Update a bookmark with name already taken :id: 6becf121-2bea-4f7e-98f4-338bd88b8f4b @@ -274,8 +273,8 @@ def test_negative_update_same_name(controller): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Bookmark(controller=controller, name=name).create() - bm = entities.Bookmark(controller=controller).create() + target_sat.api.Bookmark(controller=controller, name=name).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.name = name with pytest.raises(HTTPError): bm.update(['name']) @@ -285,7 +284,7 @@ def test_negative_update_same_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_invalid_name(controller): +def test_negative_update_invalid_name(controller, target_sat): """Update a bookmark with an invalid name :id: 479795bb-aeed-45b3-a7e3-d3449c808087 @@ -304,7 +303,7 @@ def test_negative_update_invalid_name(controller): :CaseImportance: Critical """ new_name = random.choice(invalid_values_list()) - bm = entities.Bookmark(controller=controller, public=False).create() + bm = target_sat.api.Bookmark(controller=controller, public=False).create() bm.name = new_name with pytest.raises(HTTPError): bm.update(['name']) @@ -314,7 +313,7 @@ def test_negative_update_invalid_name(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_query(controller): +def test_positive_update_query(controller, target_sat): """Update a bookmark query :id: 92a31de2-bebf-4396-94f5-adf59f8d66a5 @@ -333,7 +332,7 @@ def test_positive_update_query(controller): :CaseImportance: Critical """ new_query = random.choice(list(valid_data_list().values())) - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = new_query bm = bm.update(['query']) assert bm.query == new_query @@ -341,7 +340,7 @@ def test_positive_update_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('controller', CONTROLLERS) -def test_negative_update_empty_query(controller): +def test_negative_update_empty_query(controller, target_sat): """Update a bookmark with an empty query :id: 948602d3-532a-47fe-b313-91e3fab809bf @@ -359,7 +358,7 @@ def test_negative_update_empty_query(controller): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller).create() + bm = target_sat.api.Bookmark(controller=controller).create() bm.query = '' with pytest.raises(HTTPError): bm.update(['query']) @@ -370,7 +369,7 @@ def test_negative_update_empty_query(controller): @pytest.mark.tier1 @pytest.mark.parametrize('public', (True, False)) @pytest.mark.parametrize('controller', CONTROLLERS) -def test_positive_update_public(controller, public): +def test_positive_update_public(controller, public, target_sat): """Update a bookmark public state to private and vice versa :id: 2717360d-37c4-4bb9-bce1-b1edabdf11b3 @@ -389,7 +388,7 @@ def test_positive_update_public(controller, public): :CaseImportance: Critical """ - bm = entities.Bookmark(controller=controller, public=not public).create() + bm = target_sat.api.Bookmark(controller=controller, public=not public).create() assert bm.public != public bm.public = public bm = bm.update(['public']) diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index ed436479831..720003ff4e3 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -21,7 +21,7 @@ import re from time import sleep -from nailgun import client, entities +from nailgun import client from nailgun.entity_mixins import call_entity_method_with_timeout import pytest @@ -43,9 +43,13 @@ class TestCapsuleContentManagement: interactions and use capsule. """ - def update_capsule_download_policy(self, module_capsule_configured, download_policy): + def update_capsule_download_policy( + self, module_capsule_configured, download_policy, module_target_sat + ): """Updates capsule's download policy to desired value""" - proxy = entities.SmartProxy(id=module_capsule_configured.nailgun_capsule.id).read() + proxy = module_target_sat.api.SmartProxy( + id=module_capsule_configured.nailgun_capsule.id + ).read() proxy.download_policy = download_policy proxy.update(['download_policy']) @@ -78,7 +82,12 @@ def test_positive_insights_puppet_package_availability(self, module_capsule_conf @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_uploaded_content_library_sync( - self, module_capsule_configured, function_org, function_product, function_lce_library + self, + module_capsule_configured, + function_org, + function_product, + function_lce_library, + target_sat, ): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule @@ -92,7 +101,7 @@ def test_positive_uploaded_content_library_sync( :expectedresults: custom content is present on external capsule """ - repo = entities.Repository(product=function_product, url=None).create() + repo = target_sat.api.Repository(product=function_product, url=None).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce_library.id} @@ -103,7 +112,7 @@ def test_positive_uploaded_content_library_sync( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Upload custom content into the repo with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -134,7 +143,7 @@ def test_positive_uploaded_content_library_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_checksum_sync( - self, module_capsule_configured, function_org, function_product, function_lce + self, module_capsule_configured, function_org, function_product, function_lce, target_sat ): """Synchronize repository to capsule, update repository's checksum type, trigger capsule sync and make sure checksum type was updated on @@ -152,7 +161,7 @@ def test_positive_checksum_sync( :CaseImportance: Critical """ # Create repository with sha256 checksum type - repo = entities.Repository( + repo = target_sat.api.Repository( product=function_product, checksum_type='sha256', mirroring_policy='additive', @@ -168,7 +177,7 @@ def test_positive_checksum_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync, publish and promote a repo - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() repo.sync() repo = repo.read() cv.publish() @@ -227,7 +236,12 @@ def test_positive_checksum_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_updated_repo( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Sync a custom repo with no upstream url but uploaded content to the Capsule via promoted CV, update content of the repo, publish and promote the CV again, resync @@ -255,7 +269,7 @@ def test_positive_sync_updated_repo( :BZ: 2025494 """ - repo = entities.Repository(url=None, product=function_product).create() + repo = target_sat.api.Repository(url=None, product=function_product).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -273,7 +287,7 @@ def test_positive_sync_updated_repo( assert repo.read().content_counts['rpm'] == 1 # Create, publish and promote CV with the repository to the Capsule's LCE - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 @@ -327,7 +341,12 @@ def test_positive_sync_updated_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_capsule_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create repository, add it to lifecycle environment, assign lifecycle environment with a capsule, sync repository, sync it once again, update @@ -351,7 +370,7 @@ def test_positive_capsule_sync( capsule """ repo_url = settings.repos.yum_1.url - repo = entities.Repository(product=function_product, url=repo_url).create() + repo = target_sat.api.Repository(product=function_product, url=repo_url).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce.id} @@ -362,7 +381,7 @@ def test_positive_capsule_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -489,12 +508,12 @@ def test_positive_iso_library_sync( reposet=constants.REPOSET['rhsc7_iso'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization - lce = entities.LifecycleEnvironment(organization=module_entitlement_manifest_org).search( - query={'search': f'name={constants.ENVIRONMENT}'} - )[0] + lce = module_target_sat.api.LifecycleEnvironment( + organization=module_entitlement_manifest_org + ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -506,7 +525,7 @@ def test_positive_iso_library_sync( assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView( + cv = module_target_sat.api.ContentView( organization=module_entitlement_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view @@ -534,7 +553,12 @@ def test_positive_iso_library_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_on_demand_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with 'on_demand' policy, add it to a CV, promote to an 'on_demand' Capsule's LCE, download a published package, @@ -553,7 +577,7 @@ def test_positive_on_demand_sync( repo_url = settings.repos.yum_3.url packages_count = constants.FAKE_3_YUM_REPOS_COUNT package = constants.FAKE_3_YUM_REPO_RPMS[0] - repo = entities.Repository( + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, @@ -572,7 +596,7 @@ def test_positive_on_demand_sync( self.update_capsule_download_policy(module_capsule_configured, 'on_demand') # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -614,7 +638,12 @@ def test_positive_on_demand_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') def test_positive_update_with_immediate_sync( - self, target_sat, module_capsule_configured, function_org, function_product, function_lce + self, + target_sat, + module_capsule_configured, + function_org, + function_product, + function_lce, ): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once @@ -631,7 +660,7 @@ def test_positive_update_with_immediate_sync( """ repo_url = settings.repos.yum_1.url packages_count = constants.FAKE_1_YUM_REPOS_COUNT - repo = entities.Repository( + repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', product=function_product, @@ -649,7 +678,7 @@ def test_positive_update_with_immediate_sync( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() @@ -762,8 +791,10 @@ def test_positive_sync_kickstart_repo( repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - repo = entities.Repository(id=repo_id).read() - lce = entities.LifecycleEnvironment(organization=function_entitlement_manifest_org).create() + repo = target_sat.api.Repository(id=repo_id).read() + lce = target_sat.api.LifecycleEnvironment( + organization=function_entitlement_manifest_org + ).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -777,7 +808,7 @@ def test_positive_sync_kickstart_repo( self.update_capsule_download_policy(module_capsule_configured, 'on_demand') # Create a content view with the repository - cv = entities.ContentView( + cv = target_sat.api.ContentView( organization=function_entitlement_manifest_org, repository=[repo] ).create() # Sync repository @@ -865,7 +896,7 @@ def test_positive_sync_container_repo_end_to_end( repos = [] for ups_name in upstream_names: - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='docker', docker_upstream_name=ups_name, product=function_product, @@ -883,7 +914,7 @@ def test_positive_sync_container_repo_end_to_end( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=repos).create() + cv = target_sat.api.ContentView(organization=function_org, repository=repos).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 @@ -992,7 +1023,7 @@ def test_positive_sync_collection_repo( - name: theforeman.operations version: "0.1.0" ''' - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='ansible_collection', ansible_collection_requirements=requirements, product=function_product, @@ -1063,7 +1094,7 @@ def test_positive_sync_file_repo( :BZ: 1985122 """ - repo = entities.Repository( + repo = target_sat.api.Repository( content_type='file', product=function_product, url=constants.FAKE_FILE_LARGE_URL, @@ -1083,7 +1114,7 @@ def test_positive_sync_file_repo( assert function_lce.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create and publish a content view with all repositories - cv = entities.ContentView(organization=function_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=function_org, repository=[repo]).create() cv.publish() cv = cv.read() assert len(cv.version) == 1 diff --git a/tests/foreman/api/test_computeprofile.py b/tests/foreman/api/test_computeprofile.py index d0ee003c7a3..dffe47620b4 100644 --- a/tests/foreman/api/test_computeprofile.py +++ b/tests/foreman/api/test_computeprofile.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -29,7 +28,7 @@ @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create new Compute Profile using different names :id: 97d04911-9368-4674-92c7-1e3ff114bc18 @@ -42,13 +41,13 @@ def test_positive_create_with_name(name): :parametrized: yes """ - profile = entities.ComputeProfile(name=name).create() + profile = target_sat.api.ComputeProfile(name=name).create() assert name == profile.name @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create(name): +def test_negative_create(name, target_sat): """Attempt to create Compute Profile using invalid names only :id: 2d34a1fd-70a5-4e59-b2e2-86fbfe8e31ab @@ -62,12 +61,12 @@ def test_negative_create(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.ComputeProfile(name=name).create() + target_sat.api.ComputeProfile(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(new_name): +def test_positive_update_name(new_name, target_sat): """Update selected Compute Profile entity using proper names :id: c79193d7-2e0f-4ed9-b947-05feeddabfda @@ -80,15 +79,15 @@ def test_positive_update_name(new_name): :parametrized: yes """ - profile = entities.ComputeProfile().create() - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + profile = target_sat.api.ComputeProfile().create() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name == updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(new_name): +def test_negative_update_name(new_name, target_sat): """Attempt to update Compute Profile entity using invalid names only :id: 042b40d5-a78b-4e65-b5cb-5b270b800b37 @@ -101,16 +100,16 @@ def test_negative_update_name(new_name): :parametrized: yes """ - profile = entities.ComputeProfile().create() + profile = target_sat.api.ComputeProfile().create() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id, name=new_name).update(['name']) - updated_profile = entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id, name=new_name).update(['name']) + updated_profile = target_sat.api.ComputeProfile(id=profile.id).read() assert new_name != updated_profile.name @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_delete(new_name): +def test_positive_delete(new_name, target_sat): """Delete Compute Profile entity :id: 0a620e23-7ba6-4178-af7a-fd1e332f478f @@ -123,7 +122,7 @@ def test_positive_delete(new_name): :parametrized: yes """ - profile = entities.ComputeProfile(name=new_name).create() + profile = target_sat.api.ComputeProfile(name=new_name).create() profile.delete() with pytest.raises(HTTPError): - entities.ComputeProfile(id=profile.id).read() + target_sat.api.ComputeProfile(id=profile.id).read() diff --git a/tests/foreman/api/test_contentcredentials.py b/tests/foreman/api/test_contentcredentials.py index 81602c953e4..aaad0322976 100644 --- a/tests/foreman/api/test_contentcredentials.py +++ b/tests/foreman/api/test_contentcredentials.py @@ -19,7 +19,6 @@ from copy import copy from fauxfactory import gen_string -from nailgun import entities import pytest from requests import HTTPError @@ -35,7 +34,7 @@ @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a GPG key with valid name. :id: 741d969b-28ef-481f-bcf7-ed4cd920b030 @@ -46,12 +45,12 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, name=name).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert name == gpg_key.name @pytest.mark.tier1 -def test_positive_create_with_content(module_org): +def test_positive_create_with_content(module_org, module_target_sat): """Create a GPG key with valid name and valid gpg key text. :id: cfa6690e-fed7-49cf-94f9-fd2deed941c0 @@ -60,13 +59,13 @@ def test_positive_create_with_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() assert key_content == gpg_key.content @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_name(module_org, name): +def test_negative_create_name(module_org, name, module_target_sat): """Attempt to create GPG key with invalid names only. :id: 904a3ed0-7d50-495e-a700-b4f1ae913599 @@ -78,13 +77,13 @@ def test_negative_create_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(module_org, module_target_sat): """Attempt to create a GPG key providing a name of already existent entity @@ -95,15 +94,15 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, name=name).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.tier1 -def test_negative_create_with_content(module_org): +def test_negative_create_with_content(module_org, module_target_sat): """Attempt to create GPG key with empty content. :id: fc79c840-6bcb-4d97-9145-c0008d5b028d @@ -113,14 +112,14 @@ def test_negative_create_with_content(module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.GPGKey(organization=module_org, content='').create() + module_target_sat.api.GPGKey(organization=module_org, content='').create() assert error.value.response.status_code == 422 assert 'Validation failed:' in error.value.response.text @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Update GPG key name to another valid name. :id: 9868025d-5346-42c9-b850-916ce37a9541 @@ -131,14 +130,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name gpg_key = gpg_key.update(['name']) assert new_name == gpg_key.name @pytest.mark.tier1 -def test_positive_update_content(module_org): +def test_positive_update_content(module_org, module_target_sat): """Update GPG key content text to another valid one. :id: 62fdaf55-c931-4be6-9857-68cc816046ad @@ -147,7 +146,7 @@ def test_positive_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -158,7 +157,7 @@ def test_positive_update_content(module_org): @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, new_name): +def test_negative_update_name(module_org, new_name, module_target_sat): """Attempt to update GPG key name to invalid one :id: 1a43f610-8969-4f08-967f-fb6af0fca31b @@ -169,7 +168,7 @@ def test_negative_update_name(module_org, new_name): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.name = new_name with pytest.raises(HTTPError) as error: gpg_key.update(['name']) @@ -178,7 +177,7 @@ def test_negative_update_name(module_org, new_name): @pytest.mark.tier1 -def test_negative_update_same_name(module_org): +def test_negative_update_same_name(module_org, module_target_sat): """Attempt to update GPG key name to the name of existing GPG key entity @@ -189,8 +188,8 @@ def test_negative_update_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alpha') - entities.GPGKey(organization=module_org, name=name).create() - new_gpg_key = entities.GPGKey(organization=module_org).create() + module_target_sat.api.GPGKey(organization=module_org, name=name).create() + new_gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() new_gpg_key.name = name with pytest.raises(HTTPError) as error: new_gpg_key.update(['name']) @@ -199,7 +198,7 @@ def test_negative_update_same_name(module_org): @pytest.mark.tier1 -def test_negative_update_content(module_org): +def test_negative_update_content(module_org, module_target_sat): """Attempt to update GPG key content to invalid one :id: fee30ef8-370a-4fdd-9e45-e7ab95dade8b @@ -208,7 +207,7 @@ def test_negative_update_content(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org, content=key_content).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org, content=key_content).create() gpg_key.content = '' with pytest.raises(HTTPError) as error: gpg_key.update(['content']) @@ -218,7 +217,7 @@ def test_negative_update_content(module_org): @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Create a GPG key with different names and then delete it. :id: b06d211f-2827-40f7-b627-8b1fbaee2eb4 @@ -227,7 +226,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - gpg_key = entities.GPGKey(organization=module_org).create() + gpg_key = module_target_sat.api.GPGKey(organization=module_org).create() gpg_key.delete() with pytest.raises(HTTPError): gpg_key.read() diff --git a/tests/foreman/api/test_contentview.py b/tests/foreman/api/test_contentview.py index f1805bd06d0..3e1c418588c 100644 --- a/tests/foreman/api/test_contentview.py +++ b/tests/foreman/api/test_contentview.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_integer, gen_string, gen_utf8 -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -47,8 +46,8 @@ @pytest.fixture(scope='class') -def class_cv(module_org): - return entities.ContentView(organization=module_org).create() +def class_cv(module_org, class_target_sat): + return class_target_sat.api.ContentView(organization=module_org).create() @pytest.fixture(scope='class') @@ -64,22 +63,22 @@ def class_promoted_cv(class_published_cv, module_lce): @pytest.fixture(scope='class') -def class_cloned_cv(class_cv): - copied_cv_id = entities.ContentView(id=class_cv.id).copy( +def class_cloned_cv(class_cv, class_target_sat): + copied_cv_id = class_target_sat.api.ContentView(id=class_cv.id).copy( data={'name': gen_string('alpha', gen_integer(3, 30))} )['id'] - return entities.ContentView(id=copied_cv_id).read() + return class_target_sat.api.ContentView(id=copied_cv_id).read() @pytest.fixture(scope='class') -def class_published_cloned_cv(class_cloned_cv): +def class_published_cloned_cv(class_cloned_cv, class_target_sat): class_cloned_cv.publish() - return entities.ContentView(id=class_cloned_cv.id).read() + return class_target_sat.api.ContentView(id=class_cloned_cv.id).read() @pytest.fixture -def content_view(module_org): - return entities.ContentView(organization=module_org).create() +def content_view(module_org, module_target_sat): + return module_target_sat.api.ContentView(organization=module_org).create() def apply_package_filter(content_view, repo, package, target_sat, inclusion=True): @@ -92,7 +91,7 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True :return list : list of content view versions """ - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[repo] ).create() cv_filter_rule = target_sat.api.ContentViewFilterRule( @@ -108,7 +107,9 @@ def apply_package_filter(content_view, repo, package, target_sat, inclusion=True class TestContentView: @pytest.mark.upgrade @pytest.mark.tier3 - def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, module_org): + def test_positive_subscribe_host( + self, class_cv, class_promoted_cv, module_lce, module_org, module_target_sat + ): """Subscribe a host to a content view :id: b5a08369-bf92-48ab-b9aa-10f5b9774b79 @@ -129,7 +130,7 @@ def test_positive_subscribe_host(self, class_cv, class_promoted_cv, module_lce, # Check that no host associated to just created content view assert class_cv.content_host_count == 0 assert len(class_promoted_cv.version) == 1 - host = entities.Host( + host = module_target_sat.api.Host( content_facet_attributes={ 'content_view_id': class_cv.id, 'lifecycle_environment_id': module_lce.id, @@ -159,7 +160,9 @@ def test_positive_clone_within_same_env(self, class_published_cloned_cv, module_ @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_cv): + def test_positive_clone_with_diff_env( + self, module_org, class_published_cloned_cv, module_target_sat + ): """attempt to create, publish and promote new content view based on existing view but promoted to a different environment @@ -173,11 +176,11 @@ def test_positive_clone_with_diff_env(self, module_org, class_published_cloned_c :CaseImportance: Medium """ - le_clone = entities.LifecycleEnvironment(organization=module_org).create() + le_clone = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() class_published_cloned_cv.read().version[0].promote(data={'environment_ids': le_clone.id}) @pytest.mark.tier2 - def test_positive_add_custom_content(self, module_product, module_org): + def test_positive_add_custom_content(self, module_product, module_org, module_target_sat): """Associate custom content in a view :id: db452e0c-0c17-40f2-bab4-8467e7a875f1 @@ -188,9 +191,9 @@ def test_positive_add_custom_content(self, module_product, module_org): :CaseImportance: Critical """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() - content_view = entities.ContentView(organization=module_org.id).create() + content_view = module_target_sat.api.ContentView(organization=module_org.id).create() assert len(content_view.repository) == 0 content_view.repository = [yum_repo] content_view = content_view.update(['repository']) @@ -201,7 +204,9 @@ def test_positive_add_custom_content(self, module_product, module_org): @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_custom_module_streams(self, content_view, module_product, module_org): + def test_positive_add_custom_module_streams( + self, content_view, module_product, module_org, module_target_sat + ): """Associate custom content (module streams) in a view :id: 9e4821cb-293a-4d84-bd1f-bb9fff36b143 @@ -212,7 +217,7 @@ def test_positive_add_custom_module_streams(self, content_view, module_product, :CaseImportance: High """ - yum_repo = entities.Repository( + yum_repo = module_target_sat.api.Repository( product=module_product, url=settings.repos.module_stream_1.url ).create() yum_repo.sync() @@ -225,7 +230,9 @@ def test_positive_add_custom_module_streams(self, content_view, module_product, assert repo.content_counts['module_stream'] == 7 @pytest.mark.tier2 - def test_negative_add_dupe_repos(self, content_view, module_product, module_org): + def test_negative_add_dupe_repos( + self, content_view, module_product, module_org, module_target_sat + ): """Attempt to associate the same repo multiple times within a content view @@ -237,7 +244,7 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) :CaseImportance: Low """ - yum_repo = entities.Repository(product=module_product).create() + yum_repo = module_target_sat.api.Repository(product=module_product).create() yum_repo.sync() assert len(content_view.repository) == 0 with pytest.raises(HTTPError): @@ -250,7 +257,7 @@ def test_negative_add_dupe_repos(self, content_view, module_product, module_org) @pytest.mark.skipif( (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) - def test_positive_add_sha512_rpm(self, content_view, module_org): + def test_positive_add_sha512_rpm(self, content_view, module_org, module_target_sat): """Associate sha512 RPM content in a view :id: 1f473b02-5e2b-41ff-a706-c0635abc2476 @@ -269,8 +276,10 @@ def test_positive_add_sha512_rpm(self, content_view, module_org): :BZ: 1639406 """ - product = entities.Product(organization=module_org).create() - yum_sha512_repo = entities.Repository(product=product, url=CUSTOM_RPM_SHA_512).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_sha512_repo = module_target_sat.api.Repository( + product=product, url=CUSTOM_RPM_SHA_512 + ).create() yum_sha512_repo.sync() repo_content = yum_sha512_repo.read() # Assert that the repository content was properly synced @@ -294,7 +303,7 @@ class TestContentViewCreate: @pytest.mark.parametrize('composite', [True, False]) @pytest.mark.tier1 - def test_positive_create_composite(self, composite): + def test_positive_create_composite(self, composite, target_sat): """Create composite and non-composite content views. :id: 4a3b616d-53ab-4396-9a50-916d6c42a401 @@ -306,11 +315,11 @@ def test_positive_create_composite(self, composite): :CaseImportance: Critical """ - assert entities.ContentView(composite=composite).create().composite == composite + assert target_sat.api.ContentView(composite=composite).create().composite == composite @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, name, target_sat): """Create empty content-view with random names. :id: 80d36498-2e71-4aa9-b696-f0a45e86267f @@ -321,11 +330,11 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - assert entities.ContentView(name=name).create().name == name + assert target_sat.api.ContentView(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_create_with_description(self, desc): + def test_positive_create_with_description(self, desc, target_sat): """Create empty content view with random description. :id: 068e3e7c-34ac-47cb-a1bb-904d12c74cc7 @@ -336,10 +345,10 @@ def test_positive_create_with_description(self, desc): :CaseImportance: High """ - assert entities.ContentView(description=desc).create().description == desc + assert target_sat.api.ContentView(description=desc).create().description == desc @pytest.mark.tier1 - def test_positive_clone(self, content_view, module_org): + def test_positive_clone(self, content_view, module_org, module_target_sat): """Create a content view by copying an existing one :id: ee03dc63-e2b0-4a89-a828-2910405279ff @@ -348,7 +357,7 @@ def test_positive_clone(self, content_view, module_org): :CaseImportance: Critical """ - cloned_cv = entities.ContentView( + cloned_cv = module_target_sat.api.ContentView( id=content_view.copy(data={'name': gen_string('alpha', gen_integer(3, 30))})['id'] ).read_json() cv_origin = content_view.read_json() @@ -360,7 +369,7 @@ def test_positive_clone(self, content_view, module_org): @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Create content view providing an invalid name. :id: 261376ca-7d12-41b6-9c36-5f284865243e @@ -372,7 +381,7 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.ContentView(name=name).create() + target_sat.api.ContentView(name=name).create() class TestContentViewPublishPromote: @@ -382,16 +391,18 @@ class TestContentViewPublishPromote: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.fixture(scope='class', autouse=True) - def class_setup(self, request, module_product): + def class_setup(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.yum_repo = entities.Repository(product=module_product).create() + request.cls.yum_repo = class_target_sat.api.Repository(product=module_product).create() self.yum_repo.sync() - request.cls.swid_repo = entities.Repository( + request.cls.swid_repo = class_target_sat.api.Repository( product=module_product, url=settings.repos.swid_tag.url ).create() self.swid_repo.sync() - def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): + def add_content_views_to_composite( + self, module_target_sat, composite_cv, module_org, cv_amount=1 + ): """Add necessary number of content views to the composite one :param composite_cv: Composite content view object @@ -399,7 +410,7 @@ def add_content_views_to_composite(self, composite_cv, module_org, cv_amount=1): """ cv_versions = [] for _ in range(cv_amount): - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() cv_versions.append(content_view.read().version[0]) composite_cv.component = cv_versions @@ -434,7 +445,7 @@ def test_positive_publish_with_content_multiple(self, content_view, module_org): assert cvv.read_json()['package_count'] > 0 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_once(self, module_org): + def test_positive_publish_composite_multiple_content_once(self, module_org, module_target_sat): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view once. @@ -448,16 +459,20 @@ def test_positive_publish_composite_multiple_content_once(self, module_org): :CaseImportance: Critical """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() - self.add_content_views_to_composite(composite_cv, module_org, random.randint(2, 3)) + self.add_content_views_to_composite( + module_target_sat, composite_cv, module_org, random.randint(2, 3) + ) composite_cv.publish() assert len(composite_cv.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_composite_multiple_content_multiple(self, module_org): + def test_positive_publish_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that publish that composite content view several times. @@ -471,7 +486,7 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -482,7 +497,7 @@ def test_positive_publish_composite_multiple_content_multiple(self, module_org): assert len(composite_cv.read().version) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_yum_multiple(self, content_view, module_org): + def test_positive_promote_with_yum_multiple(self, content_view, module_org, module_target_sat): """Give a content view a yum repo, publish it once and promote the content view version ``REPEAT + 1`` times. @@ -503,7 +518,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): # Promote the content view version. for _ in range(REPEAT): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id}) # Everything's done - check some content view attributes... @@ -517,7 +532,7 @@ def test_positive_promote_with_yum_multiple(self, content_view, module_org): assert cvv_attrs['package_count'] > 0 @pytest.mark.tier2 - def test_positive_add_to_composite(self, content_view, module_org): + def test_positive_add_to_composite(self, content_view, module_org, module_target_sat): """Create normal content view, publish and add it to a new composite content view @@ -535,7 +550,7 @@ def test_positive_add_to_composite(self, content_view, module_org): content_view.publish() content_view = content_view.read() - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -548,7 +563,9 @@ def test_positive_add_to_composite(self, content_view, module_org): assert composite_cv.component[0].read().content_view.id == content_view.id @pytest.mark.tier2 - def test_negative_add_components_to_composite(self, content_view, module_org): + def test_negative_add_components_to_composite( + self, content_view, module_org, module_target_sat + ): """Attempt to associate components in a non-composite content view @@ -564,7 +581,7 @@ def test_negative_add_components_to_composite(self, content_view, module_org): content_view.update(['repository']) content_view.publish() content_view = content_view.read() - non_composite_cv = entities.ContentView( + non_composite_cv = module_target_sat.api.ContentView( composite=False, organization=module_org, ).create() @@ -575,7 +592,9 @@ def test_negative_add_components_to_composite(self, content_view, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_once(self, module_lce, module_org): + def test_positive_promote_composite_multiple_content_once( + self, module_lce, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view once. @@ -589,7 +608,7 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -602,7 +621,9 @@ def test_positive_promote_composite_multiple_content_once(self, module_lce, modu @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_composite_multiple_content_multiple(self, module_org): + def test_positive_promote_composite_multiple_content_multiple( + self, module_org, module_target_sat + ): """Create empty composite view and assign random number of normal content views to it. After that promote that composite content view ``Library + random`` times. @@ -616,7 +637,7 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): :CaseImportance: High """ - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -626,7 +647,7 @@ def test_positive_promote_composite_multiple_content_multiple(self, module_org): envs_amount = random.randint(2, 3) for _ in range(envs_amount): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() composite_cv.version[0].promote(data={'environment_ids': lce.id}) composite_cv = composite_cv.read() assert len(composite_cv.version) == 1 @@ -670,7 +691,7 @@ def test_positive_promote_out_of_sequence(self, content_view, module_org): @pytest.mark.tier3 @pytest.mark.pit_server - def test_positive_publish_multiple_repos(self, content_view, module_org): + def test_positive_publish_multiple_repos(self, content_view, module_org, module_target_sat): """Attempt to publish a content view with multiple YUM repos. :id: 5557a33b-7a6f-45f5-9fe4-23a704ed9e21 @@ -689,9 +710,9 @@ def test_positive_publish_multiple_repos(self, content_view, module_org): :BZ: 1651930 """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(10): - repo = entities.Repository(product=product).create() + repo = module_target_sat.api.Repository(product=product).create() repo.sync() content_view.repository.append(repo) content_view = content_view.update(['repository']) @@ -720,13 +741,13 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): :CaseImportance: Medium """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository( content_type='yum', product=product, url=settings.repos.module_stream_1.url ).create() repo.sync() - content_view_1 = entities.ContentView(organization=module_org).create() - content_view_2 = entities.ContentView(organization=module_org).create() + content_view_1 = target_sat.api.ContentView(organization=module_org).create() + content_view_2 = target_sat.api.ContentView(organization=module_org).create() # create content views with same repo and different filter for content_view, package in [(content_view_1, 'camel'), (content_view_2, 'cow')]: @@ -736,7 +757,7 @@ def test_composite_content_view_with_same_repos(self, module_org, target_sat): assert content_view_info.package_count == 35 # create composite content view with these two published content views - comp_content_view = entities.ContentView( + comp_content_view = target_sat.api.ContentView( composite=True, organization=module_org, ).create() @@ -778,7 +799,7 @@ def test_positive_update_attributes(self, module_cv, key, value): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_update_name(self, module_cv, new_name): + def test_positive_update_name(self, module_cv, new_name, module_target_sat): """Create content view providing the initial name, then update its name to another valid name. @@ -792,12 +813,12 @@ def test_positive_update_name(self, module_cv, new_name): """ module_cv.name = new_name module_cv.update(['name']) - updated = entities.ContentView(id=module_cv.id).read() + updated = module_target_sat.api.ContentView(id=module_cv.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_name', **parametrized(invalid_names_list())) @pytest.mark.tier1 - def test_negative_update_name(self, module_cv, new_name): + def test_negative_update_name(self, module_cv, new_name, module_target_sat): """Create content view then update its name to an invalid name. @@ -821,7 +842,7 @@ class TestContentViewDelete: @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 - def test_positive_delete(self, content_view, name): + def test_positive_delete(self, content_view, name, target_sat): """Create content view and then delete it. :id: d582f1b3-8118-4e78-a639-237c6f9d27c6 @@ -834,7 +855,7 @@ def test_positive_delete(self, content_view, name): """ content_view.delete() with pytest.raises(HTTPError): - entities.ContentView(id=content_view.id).read() + target_sat.api.ContentView(id=content_view.id).read() @pytest.mark.run_in_one_thread @@ -843,11 +864,11 @@ class TestContentViewRedHatContent: @pytest.fixture(scope='class', autouse=True) def initiate_testclass( - self, request, module_cv, module_entitlement_manifest_org, module_target_sat + self, request, module_cv, module_entitlement_manifest_org, class_target_sat ): """Set up organization, product and repositories for tests.""" - repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( + repo_id = class_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_entitlement_manifest_org.id, product=PRDS['rhel'], @@ -855,7 +876,7 @@ def initiate_testclass( reposet=REPOSET['rhst7'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() module_cv.repository = [self.repo] module_cv.update(['repository']) @@ -877,7 +898,7 @@ def test_positive_add_rh(self): assert self.yumcv.repository[0].read().name == REPOS['rhst7']['name'] @pytest.mark.tier2 - def test_positive_add_rh_custom_spin(self): + def test_positive_add_rh_custom_spin(self, target_sat): """Associate Red Hat content in a view and filter it using rule :id: 30c3103d-9503-4501-8117-1f2d25353215 @@ -890,7 +911,7 @@ def test_positive_add_rh_custom_spin(self): :CaseImportance: High """ # content_view ← cv_filter - cv_filter = entities.RPMContentViewFilter( + cv_filter = target_sat.api.RPMContentViewFilter( content_view=self.yumcv, inclusion='true', name=gen_string('alphanumeric'), @@ -898,13 +919,13 @@ def test_positive_add_rh_custom_spin(self): assert self.yumcv.id == cv_filter.content_view.id # content_view ← cv_filter ← cv_filter_rule - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, name=gen_string('alphanumeric'), version='1.0' ).create() assert cv_filter.id == cv_filter_rule.content_view_filter.id @pytest.mark.tier2 - def test_positive_update_rh_custom_spin(self): + def test_positive_update_rh_custom_spin(self, target_sat): """Edit content views for a custom rh spin. For example, modify a filter @@ -917,12 +938,12 @@ def test_positive_update_rh_custom_spin(self): :CaseImportance: High """ - cvf = entities.ErratumContentViewFilter( + cvf = target_sat.api.ErratumContentViewFilter( content_view=self.yumcv, ).create() assert self.yumcv.id == cvf.content_view.id - cv_filter_rule = entities.ContentViewFilterRule( + cv_filter_rule = target_sat.api.ContentViewFilterRule( content_view_filter=cvf, types=[FILTER_ERRATA_TYPE['enhancement']] ).create() assert cv_filter_rule.types == [FILTER_ERRATA_TYPE['enhancement']] @@ -949,7 +970,7 @@ def test_positive_publish_rh(self, module_org, content_view): assert len(content_view.read().version) == 1 @pytest.mark.tier2 - def test_positive_publish_rh_custom_spin(self, module_org, content_view): + def test_positive_publish_rh_custom_spin(self, module_org, content_view, module_target_sat): """Attempt to publish a content view containing Red Hat spin - i.e., contains filters. @@ -963,7 +984,7 @@ def test_positive_publish_rh_custom_spin(self, module_org, content_view): """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() @@ -994,7 +1015,7 @@ def test_positive_promote_rh(self, module_org, content_view, module_lce): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_rh_custom_spin(self, content_view, module_lce): + def test_positive_promote_rh_custom_spin(self, content_view, module_lce, module_target_sat): """Attempt to promote a content view containing Red Hat spin - i.e., contains filters. @@ -1008,7 +1029,7 @@ def test_positive_promote_rh_custom_spin(self, content_view, module_lce): """ content_view.repository = [self.repo] content_view = content_view.update(['repository']) - entities.RPMContentViewFilter( + module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion='true', name=gen_string('alphanumeric') ).create() content_view.publish() @@ -1170,7 +1191,7 @@ def test_negative_readonly_user_actions( # create a role with content views read only permissions target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( filters={'name': 'view_content_views'}, query={'search': 'resource_type="Katello::ContentView"'}, ), @@ -1179,7 +1200,7 @@ def test_negative_readonly_user_actions( # create environment permissions and assign it to our role target_sat.api.Filter( organization=[module_org], - permission=entities.Permission().search( + permission=target_sat.api.Permission().search( query={'search': 'resource_type="Katello::KTEnvironment"'} ), role=function_role, @@ -1284,9 +1305,9 @@ class TestOstreeContentView: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.fixture(scope='class', autouse=True) - def initiate_testclass(self, request, module_product): + def initiate_testclass(self, request, module_product, class_target_sat): """Set up organization, product and repositories for tests.""" - request.cls.ostree_repo = entities.Repository( + request.cls.ostree_repo = class_target_sat.api.Repository( product=module_product, content_type='ostree', url=FEDORA_OSTREE_REPO, @@ -1294,13 +1315,13 @@ def initiate_testclass(self, request, module_product): ).create() self.ostree_repo.sync() # Create new yum repository - request.cls.yum_repo = entities.Repository( + request.cls.yum_repo = class_target_sat.api.Repository( url=settings.repos.yum_1.url, product=module_product, ).create() self.yum_repo.sync() # Create new docker repository - request.cls.docker_repo = entities.Repository( + request.cls.docker_repo = class_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product, @@ -1408,7 +1429,7 @@ def initiate_testclass(self, request, module_entitlement_manifest_org, class_tar reposet=REPOSET['rhaht'], releasever=None, ) - request.cls.repo = entities.Repository(id=repo_id) + request.cls.repo = class_target_sat.api.Repository(id=repo_id) self.repo.sync() @pytest.mark.tier2 @@ -1494,7 +1515,7 @@ def test_positive_publish_promote_with_RH_ostree_and_other( releasever=None, ) # Sync repository - rpm_repo = entities.Repository(id=repo_id) + rpm_repo = module_target_sat.api.Repository(id=repo_id) rpm_repo.sync() content_view.repository = [self.repo, rpm_repo] content_view = content_view.update(['repository']) diff --git a/tests/foreman/api/test_contentviewfilter.py b/tests/foreman/api/test_contentviewfilter.py index e37290422c5..42d34d54206 100644 --- a/tests/foreman/api/test_contentviewfilter.py +++ b/tests/foreman/api/test_contentviewfilter.py @@ -24,7 +24,7 @@ from random import randint from fauxfactory import gen_integer, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -38,15 +38,15 @@ @pytest.fixture(scope='module') -def sync_repo(module_product): - repo = entities.Repository(product=module_product).create() +def sync_repo(module_product, module_target_sat): + repo = module_target_sat.api.Repository(product=module_product).create() repo.sync() return repo @pytest.fixture(scope='module') -def sync_repo_module_stream(module_product): - repo = entities.Repository( +def sync_repo_module_stream(module_product, module_target_sat): + repo = module_target_sat.api.Repository( content_type='yum', product=module_product, url=settings.repos.module_stream_1.url ).create() repo.sync() @@ -54,13 +54,15 @@ def sync_repo_module_stream(module_product): @pytest.fixture -def content_view(module_org, sync_repo): - return entities.ContentView(organization=module_org, repository=[sync_repo]).create() +def content_view(module_org, sync_repo, module_target_sat): + return module_target_sat.api.ContentView( + organization=module_org, repository=[sync_repo] + ).create() @pytest.fixture -def content_view_module_stream(module_org, sync_repo_module_stream): - return entities.ContentView( +def content_view_module_stream(module_org, sync_repo_module_stream, module_target_sat): + return module_target_sat.api.ContentView( organization=module_org, repository=[sync_repo_module_stream] ).create() @@ -69,7 +71,7 @@ class TestContentViewFilter: """Tests for content view filters.""" @pytest.mark.tier2 - def test_negative_get_with_no_args(self): + def test_negative_get_with_no_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: da29fd90-cd96-49f9-b94e-71d4e3a35a57 @@ -82,14 +84,14 @@ def test_negative_get_with_no_args(self): :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, ) assert response.status_code == http.client.OK @pytest.mark.tier2 - def test_negative_get_with_bad_args(self): + def test_negative_get_with_bad_args(self, target_sat): """Issue an HTTP GET to the base content view filters path. :id: e6fea726-930b-4b74-b784-41528811994f @@ -102,7 +104,7 @@ def test_negative_get_with_bad_args(self): :CaseImportance: Low """ response = client.get( - entities.AbstractContentViewFilter().path(), + target_sat.api.AbstractContentViewFilter().path(), auth=get_credentials(), verify=False, data={'foo': 'bar'}, @@ -111,7 +113,7 @@ def test_negative_get_with_bad_args(self): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_erratum_with_name(self, name, content_view): + def test_positive_create_erratum_with_name(self, name, content_view, target_sat): """Create new erratum content filter using different inputs as a name :id: f78a133f-441f-4fcc-b292-b9eed228d755 @@ -123,13 +125,13 @@ def test_positive_create_erratum_with_name(self, name, content_view): :CaseLevel: Integration """ - cvf = entities.ErratumContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.ErratumContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'erratum' @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_pkg_group_with_name(self, name, content_view): + def test_positive_create_pkg_group_with_name(self, name, content_view, target_sat): """Create new package group content filter using different inputs as a name :id: f9bfb6bf-a879-4f1a-970d-8f4df533cd59 @@ -143,7 +145,7 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): :CaseImportance: Medium """ - cvf = entities.PackageGroupContentViewFilter( + cvf = target_sat.api.PackageGroupContentViewFilter( content_view=content_view, name=name, ).create() @@ -152,7 +154,7 @@ def test_positive_create_pkg_group_with_name(self, name, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_rpm_with_name(self, name, content_view): + def test_positive_create_rpm_with_name(self, name, content_view, target_sat): """Create new RPM content filter using different inputs as a name :id: f1c88e72-7993-47ac-8fbc-c749d32bc768 @@ -166,13 +168,13 @@ def test_positive_create_rpm_with_name(self, name, content_view): :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() assert cvf.name == name assert cvf.type == 'rpm' @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_create_with_inclusion(self, inclusion, content_view): + def test_positive_create_with_inclusion(self, inclusion, content_view, target_sat): """Create new content view filter with different inclusion values :id: 81130dc9-ae33-48bc-96a7-d54d3e99448e @@ -184,12 +186,14 @@ def test_positive_create_with_inclusion(self, inclusion, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view, inclusion=inclusion).create() + cvf = target_sat.api.RPMContentViewFilter( + content_view=content_view, inclusion=inclusion + ).create() assert cvf.inclusion == inclusion @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description, content_view): + def test_positive_create_with_description(self, description, content_view, target_sat): """Create new content filter using different inputs as a description :id: e057083f-e69d-46e7-b336-45faaf67fa52 @@ -203,14 +207,14 @@ def test_positive_create_with_description(self, description, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, description=description, ).create() assert cvf.description == description @pytest.mark.tier2 - def test_positive_create_with_repo(self, content_view, sync_repo): + def test_positive_create_with_repo(self, content_view, sync_repo, target_sat): """Create new content filter with repository assigned :id: 7207d4cf-3ccf-4d63-a50a-1373b16062e2 @@ -220,7 +224,7 @@ def test_positive_create_with_repo(self, content_view, sync_repo): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -230,7 +234,7 @@ def test_positive_create_with_repo(self, content_view, sync_repo): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) def test_positive_create_with_original_packages( - self, original_packages, content_view, sync_repo + self, original_packages, content_view, sync_repo, target_sat ): """Create new content view filter with different 'original packages' option values @@ -246,7 +250,7 @@ def test_positive_create_with_original_packages( :CaseImportance: Medium """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -255,7 +259,9 @@ def test_positive_create_with_original_packages( assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_create_with_docker_repos(self, module_product, sync_repo, content_view): + def test_positive_create_with_docker_repos( + self, module_product, sync_repo, content_view, module_target_sat + ): """Create new docker repository and add to content view that has yum repo already assigned to it. Create new content view filter and assign it to the content view. @@ -267,7 +273,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont :CaseLevel: Integration """ - docker_repository = entities.Repository( + docker_repository = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -276,7 +282,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont content_view.repository = [sync_repo, docker_repository] content_view.update(['repository']) - cvf = entities.RPMContentViewFilter( + cvf = module_target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo, docker_repository], @@ -290,7 +296,7 @@ def test_positive_create_with_docker_repos(self, module_product, sync_repo, cont (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) def test_positive_create_with_module_streams( - self, module_product, sync_repo, sync_repo_module_stream, content_view + self, module_product, sync_repo, sync_repo_module_stream, content_view, target_sat ): """Verify Include and Exclude Filters creation for modulemd (module streams) @@ -304,7 +310,7 @@ def test_positive_create_with_module_streams( content_view.repository += [sync_repo_module_stream] content_view.update(['repository']) for inclusion in (True, False): - cvf = entities.ModuleStreamContentViewFilter( + cvf = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=inclusion, repository=[sync_repo, sync_repo_module_stream], @@ -316,7 +322,7 @@ def test_positive_create_with_module_streams( @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_name(self, name, content_view): + def test_negative_create_with_invalid_name(self, name, content_view, target_sat): """Try to create content view filter using invalid names only :id: 8cf4227b-75c4-4d6f-b94f-88e4eb586435 @@ -330,10 +336,10 @@ def test_negative_create_with_invalid_name(self, name, content_view): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=content_view, name=name).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() @pytest.mark.tier2 - def test_negative_create_with_same_name(self, content_view): + def test_negative_create_with_same_name(self, content_view, target_sat): """Try to create content view filter using same name twice :id: 73a64ca7-07a3-49ee-8921-0474a16a23ff @@ -345,12 +351,12 @@ def test_negative_create_with_same_name(self, content_view): :CaseImportance: Low """ kwargs = {'content_view': content_view, 'name': gen_string('alpha')} - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() with pytest.raises(HTTPError): - entities.RPMContentViewFilter(**kwargs).create() + target_sat.api.RPMContentViewFilter(**kwargs).create() @pytest.mark.tier2 - def test_negative_create_without_cv(self): + def test_negative_create_without_cv(self, target_sat): """Try to create content view filter without providing content view @@ -363,10 +369,10 @@ def test_negative_create_without_cv(self): :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter(content_view=None).create() + target_sat.api.RPMContentViewFilter(content_view=None).create() @pytest.mark.tier2 - def test_negative_create_with_invalid_repo_id(self, content_view): + def test_negative_create_with_invalid_repo_id(self, content_view, target_sat): """Try to create content view filter using incorrect repository id @@ -379,13 +385,13 @@ def test_negative_create_with_invalid_repo_id(self, content_view): :CaseImportance: Low """ with pytest.raises(HTTPError): - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[gen_integer(10000, 99999)], ).create() @pytest.mark.tier2 - def test_positive_delete_by_id(self, content_view): + def test_positive_delete_by_id(self, content_view, target_sat): """Delete content view filter :id: 07caeb9d-419d-43f8-996b-456b0cc0f70d @@ -396,14 +402,14 @@ def test_positive_delete_by_id(self, content_view): :CaseImportance: Critical """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.delete() with pytest.raises(HTTPError): cvf.read() @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_update_name(self, name, content_view): + def test_positive_update_name(self, name, content_view, target_sat): """Update content view filter with new name :id: f310c161-00d2-4281-9721-6e45cbc5e4ec @@ -415,13 +421,13 @@ def test_positive_update_name(self, name, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name assert cvf.update(['name']).name == name @pytest.mark.tier2 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_update_description(self, description, content_view): + def test_positive_update_description(self, description, content_view, target_sat): """Update content view filter with new description :id: f2c5db28-0163-4cf3-929a-16ba1cb98c34 @@ -435,14 +441,14 @@ def test_positive_update_description(self, description, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.description = description cvf = cvf.update(['description']) assert cvf.description == description @pytest.mark.tier2 @pytest.mark.parametrize('inclusion', [True, False]) - def test_positive_update_inclusion(self, inclusion, content_view): + def test_positive_update_inclusion(self, inclusion, content_view, target_sat): """Update content view filter with new inclusion value :id: 0aedd2d6-d020-4a90-adcd-01694b47c0b0 @@ -454,13 +460,13 @@ def test_positive_update_inclusion(self, inclusion, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.inclusion = inclusion cvf = cvf.update(['inclusion']) assert cvf.inclusion == inclusion @pytest.mark.tier2 - def test_positive_update_repo(self, module_product, sync_repo, content_view): + def test_positive_update_repo(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with new repository :id: 329ef155-c2d0-4aa2-bac3-79087ae49bdf @@ -470,12 +476,12 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() content_view.repository = [new_repo] content_view.update(['repository']) @@ -485,7 +491,7 @@ def test_positive_update_repo(self, module_product, sync_repo, content_view): assert cvf.repository[0].id == new_repo.id @pytest.mark.tier2 - def test_positive_update_repos(self, module_product, sync_repo, content_view): + def test_positive_update_repos(self, module_product, sync_repo, content_view, target_sat): """Update content view filter with multiple repositories :id: 478fbb1c-fa1d-4fcd-93d6-3a7f47092ed3 @@ -497,12 +503,14 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - repos = [entities.Repository(product=module_product).create() for _ in range(randint(3, 5))] + repos = [ + target_sat.api.Repository(product=module_product).create() for _ in range(randint(3, 5)) + ] for repo in repos: repo.sync() content_view.repository = repos @@ -513,7 +521,9 @@ def test_positive_update_repos(self, module_product, sync_repo, content_view): @pytest.mark.tier2 @pytest.mark.parametrize('original_packages', [True, False]) - def test_positive_update_original_packages(self, original_packages, sync_repo, content_view): + def test_positive_update_original_packages( + self, original_packages, sync_repo, content_view, target_sat + ): """Update content view filter with new 'original packages' option value :id: 0c41e57a-afa3-479e-83ba-01f09f0fd2b6 @@ -525,7 +535,7 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], @@ -535,7 +545,9 @@ def test_positive_update_original_packages(self, original_packages, sync_repo, c assert cvf.original_packages == original_packages @pytest.mark.tier2 - def test_positive_update_repo_with_docker(self, module_product, sync_repo, content_view): + def test_positive_update_repo_with_docker( + self, module_product, sync_repo, content_view, target_sat + ): """Update existing content view filter which has yum repository assigned with new docker repository @@ -546,12 +558,12 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - docker_repository = entities.Repository( + docker_repository = target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=module_product.id, @@ -567,7 +579,7 @@ def test_positive_update_repo_with_docker(self, module_product, sync_repo, conte @pytest.mark.tier2 @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) - def test_negative_update_name(self, name, content_view): + def test_negative_update_name(self, name, content_view, target_sat): """Try to update content view filter using invalid names only :id: 9799648a-3900-4186-8271-6b2dedb547ab @@ -580,13 +592,13 @@ def test_negative_update_name(self, name, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_same_name(self, content_view): + def test_negative_update_same_name(self, content_view, target_sat): """Try to update content view filter's name to already used one :id: b68569f1-9f7b-4a95-9e2a-a5da348abff7 @@ -598,14 +610,14 @@ def test_negative_update_same_name(self, content_view): :CaseImportance: Low """ name = gen_string('alpha', 8) - entities.RPMContentViewFilter(content_view=content_view, name=name).create() - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.RPMContentViewFilter(content_view=content_view, name=name).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.name = name with pytest.raises(HTTPError): cvf.update(['name']) @pytest.mark.tier2 - def test_negative_update_cv_by_id(self, content_view): + def test_negative_update_cv_by_id(self, content_view, target_sat): """Try to update content view filter using incorrect content view ID @@ -615,13 +627,13 @@ def test_negative_update_cv_by_id(self, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter(content_view=content_view).create() + cvf = target_sat.api.RPMContentViewFilter(content_view=content_view).create() cvf.content_view.id = gen_integer(10000, 99999) with pytest.raises(HTTPError): cvf.update(['content_view']) @pytest.mark.tier2 - def test_negative_update_repo_by_id(self, sync_repo, content_view): + def test_negative_update_repo_by_id(self, sync_repo, content_view, target_sat): """Try to update content view filter using incorrect repository ID @@ -631,7 +643,7 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): :CaseLevel: Integration """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, repository=[sync_repo], ).create() @@ -640,7 +652,7 @@ def test_negative_update_repo_by_id(self, sync_repo, content_view): cvf.update(['repository']) @pytest.mark.tier2 - def test_negative_update_repo(self, module_product, sync_repo, content_view): + def test_negative_update_repo(self, module_product, sync_repo, content_view, target_sat): """Try to update content view filter with new repository which doesn't belong to filter's content view @@ -652,12 +664,12 @@ def test_negative_update_repo(self, module_product, sync_repo, content_view): :CaseImportance: Low """ - cvf = entities.RPMContentViewFilter( + cvf = target_sat.api.RPMContentViewFilter( content_view=content_view, inclusion=True, repository=[sync_repo], ).create() - new_repo = entities.Repository(product=module_product).create() + new_repo = target_sat.api.Repository(product=module_product).create() new_repo.sync() cvf.repository = [new_repo] with pytest.raises(HTTPError): @@ -668,7 +680,7 @@ class TestContentViewFilterSearch: """Tests that search through content view filters.""" @pytest.mark.tier1 - def test_positive_search_erratum(self, content_view): + def test_positive_search_erratum(self, content_view, target_sat): """Search for an erratum content view filter's rules. :id: 6a86060f-6b4f-4688-8ea9-c198e0aeb3f6 @@ -679,11 +691,11 @@ def test_positive_search_erratum(self, content_view): :BZ: 1242534 """ - cv_filter = entities.ErratumContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.ErratumContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_package_group(self, content_view): + def test_positive_search_package_group(self, content_view, target_sat): """Search for an package group content view filter's rules. :id: 832c50cc-c2c8-48c9-9a23-80956baf5f3c @@ -692,11 +704,11 @@ def test_positive_search_package_group(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.PackageGroupContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.PackageGroupContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() @pytest.mark.tier1 - def test_positive_search_rpm(self, content_view): + def test_positive_search_rpm(self, content_view, target_sat): """Search for an rpm content view filter's rules. :id: 1c9058f1-35c4-46f2-9b21-155ef988564a @@ -705,8 +717,8 @@ def test_positive_search_rpm(self, content_view): :CaseImportance: Critical """ - cv_filter = entities.RPMContentViewFilter(content_view=content_view).create() - entities.ContentViewFilterRule(content_view_filter=cv_filter).search() + cv_filter = target_sat.api.RPMContentViewFilter(content_view=content_view).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter).search() class TestContentViewFilterRule: @@ -716,7 +728,9 @@ class TestContentViewFilterRule: (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_promote_module_stream_filter(self, module_org, content_view_module_stream): + def test_positive_promote_module_stream_filter( + self, module_org, content_view_module_stream, target_sat + ): """Verify Module Stream, Errata Count after Promote, Publish for Content View with Module Stream Exclude Filter @@ -729,14 +743,14 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo """ # Exclude module stream filter content_view = content_view_module_stream - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('duck')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -750,7 +764,7 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo assert content_view_version_info.errata_counts['total'] == 3 # Promote Content View - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) content_view = content_view.read() content_view_version_info = content_view.version[0].read() @@ -763,7 +777,9 @@ def test_positive_promote_module_stream_filter(self, module_org, content_view_mo (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_include_exclude_module_stream_filter(self, content_view_module_stream): + def test_positive_include_exclude_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Include and Exclude Errata filter(modular errata) automatically force the copy of the module streams associated to it. @@ -782,14 +798,14 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module :CaseLevel: Integration """ content_view = content_view_module_stream - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view = content_view.read() @@ -801,14 +817,14 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module # delete the previous content_view_filter cv_filter.delete() - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=False, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() content_view.publish() content_view_version_info = content_view.read().version[1].read() @@ -821,7 +837,7 @@ def test_positive_include_exclude_module_stream_filter(self, content_view_module (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_multi_level_filters(self, content_view_module_stream): + def test_positive_multi_level_filters(self, content_view_module_stream, target_sat): """Verify promotion of Content View and Verify count after applying multi_filters (errata and module stream) @@ -833,24 +849,24 @@ def test_positive_multi_level_filters(self, content_view_module_stream): """ content_view = content_view_module_stream # apply include errata filter - cv_filter = entities.ErratumContentViewFilter( + cv_filter = target_sat.api.ErratumContentViewFilter( content_view=content_view, inclusion=True, ).create() - errata = entities.Errata().search( + errata = target_sat.api.Errata().search( query={'search': f'errata_id="{settings.repos.module_stream_0.errata[2]}"'} )[0] - entities.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() + target_sat.api.ContentViewFilterRule(content_view_filter=cv_filter, errata=errata).create() # apply exclude module filter - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}"'.format('walrus')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() @@ -864,7 +880,9 @@ def test_positive_multi_level_filters(self, content_view_module_stream): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_dependency_solving_module_stream_filter(self, content_view_module_stream): + def test_positive_dependency_solving_module_stream_filter( + self, content_view_module_stream, target_sat + ): """Verify Module Stream Content View Filter's with Dependency Solve 'Yes'. If dependency solving enabled then module streams with deps will not get fetched over even if the exclude filter has been applied. @@ -882,14 +900,14 @@ def test_positive_dependency_solving_module_stream_filter(self, content_view_mod content_view = content_view_module_stream content_view.solve_dependencies = True content_view = content_view.update(['solve_dependencies']) - cv_filter = entities.ModuleStreamContentViewFilter( + cv_filter = target_sat.api.ModuleStreamContentViewFilter( content_view=content_view, inclusion=False, ).create() - module_streams = entities.ModuleStream().search( + module_streams = target_sat.api.ModuleStream().search( query={'search': 'name="{}" and version="{}'.format('duck', '20180704244205')} ) - entities.ContentViewFilterRule( + target_sat.api.ContentViewFilterRule( content_view_filter=cv_filter, module_stream=module_streams ).create() content_view.publish() diff --git a/tests/foreman/api/test_contentviewversion.py b/tests/foreman/api/test_contentviewversion.py index ca0e5e7cf36..5953edeed42 100644 --- a/tests/foreman/api/test_contentviewversion.py +++ b/tests/foreman/api/test_contentviewversion.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -31,11 +30,13 @@ @pytest.fixture(scope='module') -def module_lce_cv(module_org): +def module_lce_cv(module_org, module_target_sat): """Create some entities for all tests.""" - lce1 = entities.LifecycleEnvironment(organization=module_org).create() - lce2 = entities.LifecycleEnvironment(organization=module_org, prior=lce1).create() - default_cv = entities.ContentView(organization=module_org, name=DEFAULT_CV).search() + lce1 = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce2 = module_target_sat.api.LifecycleEnvironment(organization=module_org, prior=lce1).create() + default_cv = module_target_sat.api.ContentView( + organization=module_org, name=DEFAULT_CV + ).search() default_cvv = default_cv[0].version[0] return lce1, lce2, default_cvv @@ -68,7 +69,7 @@ def test_positive_create(module_cv): @pytest.mark.tier2 -def test_negative_create(module_org): +def test_negative_create(module_org, module_target_sat): """Attempt to create content view version using the 'Default Content View'. :id: 0afd49c6-f3a4-403e-9929-849f51ffa922 @@ -80,7 +81,7 @@ def test_negative_create(module_org): :CaseImportance: Critical """ # The default content view cannot be published - cv = entities.ContentView(organization=module_org.id, name=DEFAULT_CV).search() + cv = module_target_sat.api.ContentView(organization=module_org.id, name=DEFAULT_CV).search() # There should be only 1 record returned assert len(cv) == 1 with pytest.raises(HTTPError): @@ -91,8 +92,8 @@ def test_negative_create(module_org): @pytest.mark.tier2 -def test_positive_promote_valid_environment(module_lce_cv, module_org): - """Promote a content view version to 'next in sequence lifecycle environment. +def test_positive_promote_valid_environment(module_lce_cv, module_org, module_target_sat): + """Promote a content view version to next in sequence lifecycle environment. :id: f205ca06-8ab5-4546-83bd-deac4363d487 @@ -103,7 +104,7 @@ def test_positive_promote_valid_environment(module_lce_cv, module_org): :CaseImportance: Critical """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and promote it. cv.publish() # Refresh the entity @@ -123,7 +124,7 @@ def test_positive_promote_valid_environment(module_lce_cv, module_org): @pytest.mark.tier2 -def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv): +def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv, module_target_sat): """Promote a content view version to a lifecycle environment that is 'out of sequence'. @@ -134,7 +135,7 @@ def test_positive_promote_out_of_sequence_environment(module_org, module_lce_cv) :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -168,7 +169,7 @@ def test_negative_promote_valid_environment(module_lce_cv): @pytest.mark.tier2 -def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org): +def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org, module_target_sat): """Attempt to promote a content view version to a Lifecycle environment that is 'out of sequence'. @@ -179,7 +180,7 @@ def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org) :CaseLevel: Integration """ # Create a new content view... - cv = entities.ContentView(organization=module_org).create() + cv = module_target_sat.api.ContentView(organization=module_org).create() # ... and publish it. cv.publish() # Refresh the entity @@ -197,7 +198,7 @@ def test_negative_promote_out_of_sequence_environment(module_lce_cv, module_org) @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete(module_org, module_product): +def test_positive_delete(module_org, module_product, module_target_sat): """Create content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -213,15 +214,15 @@ def test_positive_delete(module_org, module_product): :CaseImportance: Critical """ key_content = DataFile.ZOO_CUSTOM_GPG_KEY.read_text() - gpgkey = entities.GPGKey(content=key_content, organization=module_org).create() + gpgkey = module_target_sat.api.GPGKey(content=key_content, organization=module_org).create() # Creates new repository with GPGKey - repo = entities.Repository( + repo = module_target_sat.api.Repository( gpg_key=gpgkey, product=module_product, url=settings.repos.yum_1.url ).create() # sync repository repo.sync() # Create content view - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Associate repository to new content view content_view.repository = [repo] content_view = content_view.update(['repository']) @@ -242,7 +243,7 @@ def test_positive_delete(module_org, module_product): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_delete_non_default(module_org): +def test_positive_delete_non_default(module_org, module_target_sat): """Create content view and publish and promote it to new environment. After that try to disassociate content view from 'Library' and one more non-default environment through 'delete_from_environment' @@ -256,13 +257,13 @@ def test_positive_delete_non_default(module_org): :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() assert len(content_view.version) == 1 assert len(content_view.version[0].read().environment) == 1 - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) cvv = content_view.version[0].read() assert len(cvv.environment) == 2 @@ -277,7 +278,7 @@ def test_positive_delete_non_default(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_composite_version(module_org): +def test_positive_delete_composite_version(module_org, module_target_sat): """Create composite content view and publish it. After that try to disassociate content view from 'Library' environment through 'delete_from_environment' command and delete content view version from @@ -293,14 +294,16 @@ def test_positive_delete_composite_version(module_org): :BZ: 1276479 """ # Create product with repository and publish it - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_1.url).create() + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository(product=product, url=settings.repos.yum_1.url).create() repo.sync() # Create and publish content views - content_view = entities.ContentView(organization=module_org, repository=[repo]).create() + content_view = module_target_sat.api.ContentView( + organization=module_org, repository=[repo] + ).create() content_view.publish() # Create and publish composite content view - composite_cv = entities.ContentView( + composite_cv = module_target_sat.api.ContentView( organization=module_org, composite=True, component=[content_view.read().version[0]] ).create() composite_cv.publish() @@ -318,7 +321,7 @@ def test_positive_delete_composite_version(module_org): @pytest.mark.tier2 -def test_negative_delete(module_org): +def test_negative_delete(module_org, module_target_sat): """Create content view and publish it. Try to delete content view version while content view is still associated with lifecycle environment @@ -331,7 +334,7 @@ def test_negative_delete(module_org): :CaseImportance: Critical """ - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() # Publish content view content_view.publish() content_view = content_view.read() @@ -344,7 +347,7 @@ def test_negative_delete(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_renamed_cv_version_from_default_env(module_org): +def test_positive_remove_renamed_cv_version_from_default_env(module_org, module_target_sat): """Remove version of renamed content view from Library environment :id: 7d5961d0-6a9a-4610-979e-cbc4ddbc50ca @@ -364,11 +367,13 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): """ new_name = gen_string('alpha') # create yum product and repo - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo] content_view = content_view.update(['repository']) # publish the content view @@ -377,7 +382,9 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() # ensure that the content view version is promoted to the Library # lifecycle environment assert lce_library.name == ENVIRONMENT @@ -393,7 +400,7 @@ def test_positive_remove_renamed_cv_version_from_default_env(module_org): @pytest.mark.tier2 -def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove QE promoted content view version from Library environment :id: c7795762-93bd-419c-ac49-d10dc26b842b @@ -412,10 +419,12 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - product = entities.Product(organization=module_org).create() - docker_repo = entities.Repository( + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -423,7 +432,7 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the docker repo - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -432,7 +441,9 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV and QE lifecycle environments for lce in [lce_dev, lce_qe]: @@ -449,7 +460,7 @@ def test_positive_remove_qe_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): +def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org, module_target_sat): """Remove PROD promoted content view version from Library environment :id: 24911876-7c2a-4a12-a3aa-98051dfda29d @@ -468,13 +479,19 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -482,7 +499,7 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): ).create() docker_repo.sync() # create a content view and add to it the yum and docker repos - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -491,7 +508,9 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE PROD lifecycle environments for lce in [lce_dev, lce_qe, lce_prod]: @@ -510,7 +529,7 @@ def test_positive_remove_prod_promoted_cv_version_from_default_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_env(module_org): +def test_positive_remove_cv_version_from_env(module_org, module_target_sat): """Remove promoted content view version from environment :id: 17cf18bf-09d5-4641-b0e0-c50e628fa6c8 @@ -532,15 +551,23 @@ def test_positive_remove_cv_version_from_env(module_org): :CaseLevel: Integration """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() # docker repo - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -548,7 +575,7 @@ def test_positive_remove_cv_version_from_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum and docker repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -557,7 +584,9 @@ def test_positive_remove_cv_version_from_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -582,7 +611,7 @@ def test_positive_remove_cv_version_from_env(module_org): @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_remove_cv_version_from_multi_env(module_org): +def test_positive_remove_cv_version_from_multi_env(module_org, module_target_sat): """Remove promoted content view version from multiple environments :id: 18b86a68-8e6a-43ea-b95e-188fba125a26 @@ -602,14 +631,22 @@ def test_positive_remove_cv_version_from_multi_env(module_org): :CaseImportance: Low """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -617,7 +654,7 @@ def test_positive_remove_cv_version_from_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -626,7 +663,9 @@ def test_positive_remove_cv_version_from_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments @@ -648,7 +687,7 @@ def test_positive_remove_cv_version_from_multi_env(module_org): @pytest.mark.upgrade @pytest.mark.tier2 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_delete_cv_promoted_to_multi_env(module_org): +def test_positive_delete_cv_promoted_to_multi_env(module_org, module_target_sat): """Delete published content view with version promoted to multiple environments @@ -664,20 +703,28 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): 5. Delete the content view, this should delete the content with all it's published/promoted versions from all environments - :expectedresults: The content view doesn't exists + :expectedresults: The content view doesn't exist :CaseLevel: Integration :CaseImportance: Critical """ - lce_dev = entities.LifecycleEnvironment(organization=module_org).create() - lce_qe = entities.LifecycleEnvironment(organization=module_org, prior=lce_dev).create() - lce_stage = entities.LifecycleEnvironment(organization=module_org, prior=lce_qe).create() - lce_prod = entities.LifecycleEnvironment(organization=module_org, prior=lce_stage).create() - product = entities.Product(organization=module_org).create() - yum_repo = entities.Repository(url=settings.repos.yum_1.url, product=product).create() + lce_dev = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + lce_qe = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_dev + ).create() + lce_stage = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_qe + ).create() + lce_prod = module_target_sat.api.LifecycleEnvironment( + organization=module_org, prior=lce_stage + ).create() + product = module_target_sat.api.Product(organization=module_org).create() + yum_repo = module_target_sat.api.Repository( + url=settings.repos.yum_1.url, product=product + ).create() yum_repo.sync() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type='docker', docker_upstream_name='busybox', product=product, @@ -685,7 +732,7 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): ).create() docker_repo.sync() # create a content view and add the yum repo to it - content_view = entities.ContentView(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.repository = [yum_repo, docker_repo] content_view = content_view.update(['repository']) # publish the content view @@ -694,7 +741,9 @@ def test_positive_delete_cv_promoted_to_multi_env(module_org): assert len(content_view.version) == 1 content_view_version = content_view.version[0].read() assert len(content_view_version.environment) == 1 - lce_library = entities.LifecycleEnvironment(id=content_view_version.environment[0].id).read() + lce_library = module_target_sat.api.LifecycleEnvironment( + id=content_view_version.environment[0].id + ).read() assert lce_library.name == ENVIRONMENT # promote content view version to DEV QE STAGE PROD lifecycle # environments diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index 51ac5fec162..528647a3341 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -16,16 +16,34 @@ :Upstream: No """ -from fauxfactory import gen_choice, gen_integer +from fauxfactory import gen_choice, gen_integer, gen_string import pytest from requests.exceptions import HTTPError from robottelo.utils.datafactory import valid_data_list +@pytest.fixture(scope='module') +def module_hostgroup(module_org, module_target_sat): + module_hostgroup = module_target_sat.api.HostGroup(organization=[module_org]).create() + module_hostgroup.delete() + + +@pytest.fixture(scope='module') +def module_location(module_location): + yield module_location + module_location.delete() + + +@pytest.fixture(scope='module') +def module_org(module_org): + yield module_org + module_org.delete() + + @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, target_sat): +def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, module_target_sat): """Create a new discovery rule with several attributes, update them and delete the rule itself. @@ -47,7 +65,7 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, name = gen_choice(list(valid_data_list().values())) search = gen_choice(searches) hostname = 'myhost-<%= rand(99999) %>' - discovery_rule = target_sat.api.DiscoveryRule( + discovery_rule = module_target_sat.api.DiscoveryRule( name=name, search_=search, hostname=hostname, @@ -83,6 +101,21 @@ def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup, discovery_rule.read() +@pytest.mark.tier1 +def test_negative_create_with_invalid_host_limit_and_priority(module_target_sat): + """Create a discovery rule with invalid host limit and priority + + :id: e3c7acb1-ac56-496b-ac04-2a83f66ec290 + + :expectedresults: Validation error should be raised + """ + with pytest.raises(HTTPError): + module_target_sat.api.DiscoveryRule(max_count=gen_string('alpha')).create() + with pytest.raises(HTTPError): + module_target_sat.api.DiscoveryRule(priority=gen_string('alpha')).create() + + +@pytest.mark.stubbed @pytest.mark.tier3 def test_positive_update_and_provision_with_rule_priority( module_target_sat, module_discovery_hostgroup, discovery_location, discovery_org diff --git a/tests/foreman/api/test_docker.py b/tests/foreman/api/test_docker.py index e87dac66fb6..4b6dc88537c 100644 --- a/tests/foreman/api/test_docker.py +++ b/tests/foreman/api/test_docker.py @@ -15,7 +15,6 @@ from random import choice, randint, shuffle from fauxfactory import gen_string, gen_url -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -31,7 +30,7 @@ DOCKER_PROVIDER = 'Docker' -def _create_repository(product, name=None, upstream_name=None): +def _create_repository(module_target_sat, product, name=None, upstream_name=None): """Create a Docker-based repository. :param product: A ``Product`` object. @@ -45,7 +44,7 @@ def _create_repository(product, name=None, upstream_name=None): name = choice(generate_strings_list(15, ['numeric', 'html'])) if upstream_name is None: upstream_name = CONTAINER_UPSTREAM_NAME - return entities.Repository( + return module_target_sat.api.Repository( content_type='docker', docker_upstream_name=upstream_name, name=name, @@ -55,21 +54,21 @@ def _create_repository(product, name=None, upstream_name=None): @pytest.fixture -def repo(module_product): +def repo(module_product, module_target_sat): """Create a single repository.""" - return _create_repository(module_product) + return _create_repository(module_target_sat, module_product) @pytest.fixture -def repos(module_product): +def repos(module_product, module_target_sat): """Create and return a list of repositories.""" - return [_create_repository(module_product) for _ in range(randint(2, 5))] + return [_create_repository(module_target_sat, module_product) for _ in range(randint(2, 5))] @pytest.fixture -def content_view(module_org): +def content_view(module_org, module_target_sat): """Create a content view.""" - return entities.ContentView(composite=False, organization=module_org).create() + return module_target_sat.api.ContentView(composite=False, organization=module_org).create() @pytest.fixture @@ -107,7 +106,7 @@ class TestDockerRepository: @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_docker_repository_names())) - def test_positive_create_with_name(self, module_product, name): + def test_positive_create_with_name(self, module_product, name, module_target_sat): """Create one Docker-type repository :id: 3360aab2-74f3-4f6e-a083-46498ceacad2 @@ -119,14 +118,16 @@ def test_positive_create_with_name(self, module_product, name): :CaseImportance: Critical """ - repo = _create_repository(module_product, name) + repo = _create_repository(module_target_sat, module_product, name) assert repo.name == name assert repo.docker_upstream_name == CONTAINER_UPSTREAM_NAME assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(valid_docker_upstream_names())) - def test_positive_create_with_upstream_name(self, module_product, upstream_name): + def test_positive_create_with_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a valid docker upstream name @@ -139,13 +140,15 @@ def test_positive_create_with_upstream_name(self, module_product, upstream_name) :CaseImportance: Critical """ - repo = _create_repository(module_product, upstream_name=upstream_name) + repo = _create_repository(module_target_sat, module_product, upstream_name=upstream_name) assert repo.docker_upstream_name == upstream_name assert repo.content_type == 'docker' @pytest.mark.tier1 @pytest.mark.parametrize('upstream_name', **parametrized(invalid_docker_upstream_names())) - def test_negative_create_with_invalid_upstream_name(self, module_product, upstream_name): + def test_negative_create_with_invalid_upstream_name( + self, module_product, upstream_name, module_target_sat + ): """Create a Docker-type repository with a invalid docker upstream name. @@ -159,25 +162,25 @@ def test_negative_create_with_invalid_upstream_name(self, module_product, upstre :CaseImportance: Critical """ with pytest.raises(HTTPError): - _create_repository(module_product, upstream_name=upstream_name) + _create_repository(module_target_sat, module_product, upstream_name=upstream_name) @pytest.mark.tier2 - def test_positive_create_repos_using_same_product(self, module_product): + def test_positive_create_repos_using_same_product(self, module_product, module_target_sat): """Create multiple Docker-type repositories :id: 4a6929fc-5111-43ff-940c-07a754828630 :expectedresults: Multiple docker repositories are created with a - Docker usptream repository and they all belong to the same product. + Docker upstream repository and they all belong to the same product. :CaseLevel: Integration """ for _ in range(randint(2, 5)): - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) assert repo.id in [repo_.id for repo_ in module_product.read().repository] @pytest.mark.tier2 - def test_positive_create_repos_using_multiple_products(self, module_org): + def test_positive_create_repos_using_multiple_products(self, module_org, module_target_sat): """Create multiple Docker-type repositories on multiple products :id: 5a65d20b-d3b5-4bd7-9c8f-19c8af190558 @@ -189,14 +192,14 @@ def test_positive_create_repos_using_multiple_products(self, module_org): :CaseLevel: Integration """ for _ in range(randint(2, 5)): - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 3)): - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) product = product.read() assert repo.id in [repo_.id for repo_ in product.repository] @pytest.mark.tier1 - def test_positive_sync(self, module_product): + def test_positive_sync(self, module_product, module_target_sat): """Create and sync a Docker-type repository :id: 80fbcd84-1c6f-444f-a44e-7d2738a0cba2 @@ -206,14 +209,14 @@ def test_positive_sync(self, module_product): :CaseImportance: Critical """ - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['docker_manifest'] >= 1 @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_docker_repository_names())) - def test_positive_update_name(self, module_product, repo, new_name): + def test_positive_update_name(self, repo, new_name): """Create a Docker-type repository and update its name. :id: 7967e6b5-c206-4ad0-bcf5-64a7ce85233b @@ -249,7 +252,7 @@ def test_positive_update_upstream_name(self, repo): assert repo.docker_upstream_name == new_upstream_name @pytest.mark.tier2 - def test_positive_update_url(self, module_product, repo): + def test_positive_update_url(self, repo): """Create a Docker-type repository and update its URL. :id: 6a588e65-bf1d-4ca9-82ce-591f9070215f @@ -285,7 +288,7 @@ def test_positive_delete(self, repo): repo.read() @pytest.mark.tier2 - def test_positive_delete_random_repo(self, module_org): + def test_positive_delete_random_repo(self, module_org, module_target_sat): """Create Docker-type repositories on multiple products and delete a random repository from a random product. @@ -296,10 +299,11 @@ def test_positive_delete_random_repo(self, module_org): """ repos = [] products = [ - entities.Product(organization=module_org).create() for _ in range(randint(2, 5)) + module_target_sat.api.Product(organization=module_org).create() + for _ in range(randint(2, 5)) ] for product in products: - repo = _create_repository(product) + repo = _create_repository(module_target_sat, product) assert repo.content_type == 'docker' repos.append(repo) @@ -341,7 +345,7 @@ def test_positive_add_docker_repo(self, repo, content_view): assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repos(self, module_org, module_product, content_view): + def test_positive_add_docker_repos(self, module_target_sat, module_product, content_view): """Add multiple Docker-type repositories to a non-composite content view. @@ -351,7 +355,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie and the product is added to a non-composite content view. """ repos = [ - _create_repository(module_product, name=gen_string('alpha')) + _create_repository(module_target_sat, module_product, name=gen_string('alpha')) for _ in range(randint(2, 5)) ] repo_ids = {r.id for r in repos} @@ -369,7 +373,7 @@ def test_positive_add_docker_repos(self, module_org, module_product, content_vie assert r.docker_upstream_name == CONTAINER_UPSTREAM_NAME @pytest.mark.tier2 - def test_positive_add_synced_docker_repo(self, module_org, module_product): + def test_positive_add_synced_docker_repo(self, module_org, module_product, module_target_sat): """Create and sync a Docker-type repository :id: 3c7d6f17-266e-43d3-99f8-13bf0251eca6 @@ -377,19 +381,21 @@ def test_positive_add_synced_docker_repo(self, module_org, module_product): :expectedresults: A repository is created with a Docker repository and it is synchronized. """ - repo = _create_repository(module_product) + repo = _create_repository(module_target_sat, module_product) repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['docker_manifest'] > 0 # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @pytest.mark.tier2 - def test_positive_add_docker_repo_to_ccv(self, module_org): + def test_positive_add_docker_repo_to_ccv(self, module_org, module_target_sat): """Add one Docker-type repository to a composite content view :id: fe278275-2bb2-4d68-8624-f0cfd63ecb57 @@ -398,10 +404,14 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): the product is added to a content view which is then added to a composite content view. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -412,7 +422,9 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): assert len(content_view.version) == 1 # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = content_view.version comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -420,7 +432,7 @@ def test_positive_add_docker_repo_to_ccv(self, module_org): ] @pytest.mark.tier2 - def test_positive_add_docker_repos_to_ccv(self, module_org): + def test_positive_add_docker_repos_to_ccv(self, module_org, module_target_sat): """Add multiple Docker-type repositories to a composite content view. @@ -431,11 +443,13 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): views which are then added to a composite content view. """ cv_versions = [] - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(randint(2, 5)): # Create content view and associate docker repo - content_view = entities.ContentView(composite=False, organization=module_org).create() - repo = _create_repository(product) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() + repo = _create_repository(module_target_sat, product) content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -446,14 +460,16 @@ def test_positive_add_docker_repos_to_ccv(self, module_org): cv_versions.append(content_view.version[0]) # Create composite content view and associate content view to it - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() for cv_version in cv_versions: comp_content_view.component.append(cv_version) comp_content_view = comp_content_view.update(['component']) assert cv_version.id in [component.id for component in comp_content_view.component] @pytest.mark.tier2 - def test_positive_publish_with_docker_repo(self, module_org): + def test_positive_publish_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it once. :id: 86a73e96-ead6-41fb-8095-154a0b83e344 @@ -462,9 +478,13 @@ def test_positive_publish_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published only once. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -481,7 +501,7 @@ def test_positive_publish_with_docker_repo(self, module_org): assert float(content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_with_docker_repo_composite(self, module_org): + def test_positive_publish_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to composite content view and publish it once. @@ -494,8 +514,12 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): :BZ: 1217635 """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert repo.id in [repo_.id for repo_ in content_view.repository] @@ -512,7 +536,9 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(content_view.next_version) > 1.0 # Create composite content view… - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert content_view.version[0].id in [ @@ -526,7 +552,7 @@ def test_positive_publish_with_docker_repo_composite(self, module_org): assert float(comp_content_view.next_version) > 1.0 @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo(self, module_org): + def test_positive_publish_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it multiple times. @@ -536,8 +562,12 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): repository and the product is added to a content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -551,7 +581,9 @@ def test_positive_publish_multiple_with_docker_repo(self, module_org): assert len(content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): + def test_positive_publish_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it multiple times. @@ -562,8 +594,12 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): added to a composite content view which is then published multiple times. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -573,7 +609,9 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): content_view = content_view.read() assert content_view.last_published is not None - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [content_view.version[0]] comp_content_view = comp_content_view.update(['component']) assert [content_view.version[0].id] == [comp.id for comp in comp_content_view.component] @@ -587,7 +625,7 @@ def test_positive_publish_multiple_with_docker_repo_composite(self, module_org): assert len(comp_content_view.version) == publish_amount @pytest.mark.tier2 - def test_positive_promote_with_docker_repo(self, module_org): + def test_positive_promote_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to the next available lifecycle-environment. @@ -596,10 +634,14 @@ def test_positive_promote_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -613,7 +655,7 @@ def test_positive_promote_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 2 @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo(self, module_org): + def test_positive_promote_multiple_with_docker_repo(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then promote it to multiple available lifecycle-environments. @@ -622,9 +664,13 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -634,12 +680,12 @@ def test_positive_promote_multiple_with_docker_repo(self, module_org): assert len(cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(cvv.read().environment) == i + 1 @pytest.mark.tier2 - def test_positive_promote_with_docker_repo_composite(self, module_org): + def test_positive_promote_with_docker_repo_composite(self, module_org, module_target_sat): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the next available lifecycle-environment. @@ -649,9 +695,13 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environment. """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -659,7 +709,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -673,7 +725,9 @@ def test_positive_promote_with_docker_repo_composite(self, module_org): @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): + def test_positive_promote_multiple_with_docker_repo_composite( + self, module_org, module_target_sat + ): """Add Docker-type repository to content view and publish it. Then add that content view to composite one. Publish and promote that composite content view to the multiple available lifecycle-environments @@ -683,8 +737,12 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): :expectedresults: Docker-type repository is promoted to content view found in the specific lifecycle-environments. """ - repo = _create_repository(entities.Product(organization=module_org).create()) - content_view = entities.ContentView(composite=False, organization=module_org).create() + repo = _create_repository( + module_target_sat, module_target_sat.api.Product(organization=module_org).create() + ) + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) assert [repo.id] == [repo_.id for repo_ in content_view.repository] @@ -692,7 +750,9 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): content_view.publish() cvv = content_view.read().version[0].read() - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -702,13 +762,13 @@ def test_positive_promote_multiple_with_docker_repo_composite(self, module_org): assert len(comp_cvv.read().environment) == 1 for i in range(1, randint(3, 6)): - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() comp_cvv.promote(data={'environment_ids': lce.id, 'force': False}) assert len(comp_cvv.read().environment) == i + 1 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_name_pattern_change(self, module_org): + def test_positive_name_pattern_change(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change registry name pattern for that environment. Verify that repository name on product changed according to new pattern. @@ -725,19 +785,23 @@ def test_positive_name_pattern_change(self, module_org): ) repo = _create_repository( - entities.Product(organization=module_org).create(), upstream_name=docker_upstream_name + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), + upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -746,7 +810,7 @@ def test_positive_name_pattern_change(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_product_name_change_after_promotion(self, module_org): + def test_positive_product_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change product name. Verify that repository name on product changed according to new pattern. @@ -761,21 +825,23 @@ def test_positive_product_name_change_after_promotion(self, module_org): docker_upstream_name = 'hello-world' new_pattern = "<%= organization.label %>/<%= product.name %>" - prod = entities.Product(organization=module_org, name=old_prod_name).create() - repo = _create_repository(prod, upstream_name=docker_upstream_name) + prod = module_target_sat.api.Product(organization=module_org, name=old_prod_name).create() + repo = _create_repository(module_target_sat, prod, upstream_name=docker_upstream_name) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) prod.name = new_prod_name prod.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -785,7 +851,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -793,7 +859,7 @@ def test_positive_product_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_positive_repo_name_change_after_promotion(self, module_org): + def test_positive_repo_name_change_after_promotion(self, module_org, module_target_sat): """Promote content view with Docker repository to lifecycle environment. Change repository name. Verify that Docker repository name on product changed according to new pattern. @@ -809,23 +875,26 @@ def test_positive_repo_name_change_after_promotion(self, module_org): new_pattern = "<%= organization.label %>/<%= repository.name %>" repo = _create_repository( - entities.Product(organization=module_org).create(), + module_target_sat, + module_target_sat.api.Product(organization=module_org).create(), name=old_repo_name, upstream_name=docker_upstream_name, ) repo.sync(timeout=600) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = [repo] content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) cvv.promote(data={'environment_ids': lce.id, 'force': False}) repo.name = new_repo_name repo.update(['name']) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -835,7 +904,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): content_view.publish() cvv = content_view.read().version[-1] cvv.promote(data={'environment_ids': lce.id, 'force': False}) - repos = entities.Repository(organization=module_org).search( + repos = module_target_sat.api.Repository(organization=module_org).search( query={'environment_id': lce.id} ) @@ -843,7 +912,7 @@ def test_positive_repo_name_change_after_promotion(self, module_org): assert repos[0].container_repository_name == expected_pattern @pytest.mark.tier2 - def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_lce): + def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, module_target_sat): """Set registry name pattern to one that does not guarantee uniqueness. Try to promote content view with multiple Docker repositories to lifecycle environment. Verify that content has not been promoted. @@ -855,16 +924,18 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() lce.registry_name_pattern = new_pattern lce = lce.update(['registry_name_pattern']) - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() @@ -873,7 +944,7 @@ def test_negative_set_non_unique_name_pattern_and_promote(self, module_org, modu cvv.promote(data={'environment_ids': lce.id, 'force': False}) @pytest.mark.tier2 - def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): + def test_negative_promote_and_set_non_unique_name_pattern(self, module_org, module_target_sat): """Promote content view with multiple Docker repositories to lifecycle environment. Set registry name pattern to one that does not guarantee uniqueness. Verify that pattern has not been @@ -886,18 +957,20 @@ def test_negative_promote_and_set_non_unique_name_pattern(self, module_org): docker_upstream_names = ['hello-world', 'alpine'] new_pattern = "<%= organization.label %>" - prod = entities.Product(organization=module_org).create() + prod = module_target_sat.api.Product(organization=module_org).create() repos = [] for docker_name in docker_upstream_names: - repo = _create_repository(prod, upstream_name=docker_name) + repo = _create_repository(module_target_sat, prod, upstream_name=docker_name) repo.sync(timeout=600) repos.append(repo) - content_view = entities.ContentView(composite=False, organization=module_org).create() + content_view = module_target_sat.api.ContentView( + composite=False, organization=module_org + ).create() content_view.repository = repos content_view = content_view.update(['repository']) content_view.publish() cvv = content_view.read().version[0] - lce = entities.LifecycleEnvironment(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() cvv.promote(data={'environment_ids': lce.id, 'force': False}) with pytest.raises(HTTPError): lce.registry_name_pattern = new_pattern @@ -916,7 +989,7 @@ class TestDockerActivationKey: @pytest.mark.tier2 def test_positive_add_docker_repo_cv( - self, module_lce, module_org, repo, content_view_publish_promote + self, module_lce, module_org, repo, content_view_publish_promote, module_target_sat ): """Add Docker-type repository to a non-composite content view and publish it. Then create an activation key and associate it with the @@ -928,7 +1001,7 @@ def test_positive_add_docker_repo_cv( key """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -936,7 +1009,7 @@ def test_positive_add_docker_repo_cv( @pytest.mark.tier2 def test_positive_remove_docker_repo_cv( - self, module_org, module_lce, content_view_publish_promote + self, module_org, module_lce, content_view_publish_promote, module_target_sat ): """Create an activation key and associate it with the Docker content view. Then remove this content view from the activation key. @@ -949,7 +1022,7 @@ def test_positive_remove_docker_repo_cv( :CaseLevel: Integration """ content_view = content_view_publish_promote - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == content_view.id @@ -957,7 +1030,9 @@ def test_positive_remove_docker_repo_cv( assert ak.update(['content_view']).content_view is None @pytest.mark.tier2 - def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, module_org): + def test_positive_add_docker_repo_ccv( + self, content_view_version, module_lce, module_org, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -969,7 +1044,9 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo key """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -978,13 +1055,15 @@ def test_positive_add_docker_repo_ccv(self, content_view_version, module_lce, mo comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id @pytest.mark.tier2 - def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_view_version): + def test_positive_remove_docker_repo_ccv( + self, module_lce, module_org, content_view_version, module_target_sat + ): """Add Docker-type repository to a non-composite content view and publish it. Then add this content view to a composite content view and publish it. Create an activation key and associate it with the @@ -997,7 +1076,9 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v then removed from the activation key. """ cvv = content_view_version - comp_content_view = entities.ContentView(composite=True, organization=module_org).create() + comp_content_view = module_target_sat.api.ContentView( + composite=True, organization=module_org + ).create() comp_content_view.component = [cvv] comp_content_view = comp_content_view.update(['component']) assert cvv.id == comp_content_view.component[0].id @@ -1006,7 +1087,7 @@ def test_positive_remove_docker_repo_ccv(self, module_lce, module_org, content_v comp_cvv = comp_content_view.read().version[0].read() comp_cvv.promote(data={'environment_ids': module_lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=comp_content_view, environment=module_lce, organization=module_org ).create() assert ak.content_view.id == comp_content_view.id diff --git a/tests/foreman/api/test_errata.py b/tests/foreman/api/test_errata.py index f33931fe833..d88b27411bc 100644 --- a/tests/foreman/api/test_errata.py +++ b/tests/foreman/api/test_errata.py @@ -40,8 +40,8 @@ @pytest.fixture(scope='module') -def activation_key(module_org, module_lce): - activation_key = entities.ActivationKey( +def activation_key(module_org, module_lce, module_target_sat): + activation_key = module_target_sat.api.ActivationKey( environment=module_lce, organization=module_org ).create() return activation_key @@ -339,7 +339,7 @@ def test_positive_sorted_issue_date_and_filter_by_cve(module_org, custom_repo, t :CaseLevel: System """ # Errata is sorted by issued date. - erratum_list = entities.Errata(repository=custom_repo['repository-id']).search( + erratum_list = target_sat.api.Errata(repository=custom_repo['repository-id']).search( query={'order': 'issued ASC', 'per_page': '1000'} ) issued = [errata.issued for errata in erratum_list] @@ -374,28 +374,28 @@ def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): reposet=constants.REPOSET['rhva6'], releasever=constants.DEFAULT_RELEASE_VERSION, ) - rh_repo = entities.Repository(id=rh_repo_id_rhva).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id_rhva).read() rh_repo.sync() - host_tools_product = entities.Product(organization=org).create() - host_tools_repo = entities.Repository( + host_tools_product = module_target_sat.api.Product(organization=org).create() + host_tools_repo = module_target_sat.api.Repository( product=host_tools_product, ).create() host_tools_repo.url = settings.repos.SATCLIENT_REPO.RHEL6 host_tools_repo = host_tools_repo.update(['url']) host_tools_repo.sync() - custom_product = entities.Product(organization=org).create() - custom_repo = entities.Repository( + custom_product = module_target_sat.api.Product(organization=org).create() + custom_repo = module_target_sat.api.Repository( product=custom_product, ).create() custom_repo.url = CUSTOM_REPO_URL custom_repo = custom_repo.update(['url']) custom_repo.sync() - lce = entities.LifecycleEnvironment(organization=org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() - cv = entities.ContentView( + cv = module_target_sat.api.ContentView( organization=org, repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], ).create() @@ -403,11 +403,13 @@ def setup_content_rhel6(module_entitlement_manifest_org, module_target_sat): cvv = cv.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - ak = entities.ActivationKey(content_view=cv, organization=org, environment=lce).create() + ak = module_target_sat.api.ActivationKey( + content_view=cv, organization=org, environment=lce + ).create() sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] for sub_name in sub_list: - subscription = entities.Subscription(organization=org).search( + subscription = module_target_sat.api.Subscription(organization=org).search( query={'search': f'name="{sub_name}"'} )[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) @@ -504,7 +506,7 @@ def test_positive_get_applicable_for_host(setup_content_rhel6, rhel6_contenthost @pytest.mark.tier3 -def test_positive_get_diff_for_cv_envs(): +def test_positive_get_diff_for_cv_envs(target_sat): """Generate a difference in errata between a set of environments for a content view @@ -522,10 +524,10 @@ def test_positive_get_diff_for_cv_envs(): :CaseLevel: System """ - org = entities.Organization().create() - env = entities.LifecycleEnvironment(organization=org).create() - content_view = entities.ContentView(organization=org).create() - activation_key = entities.ActivationKey(environment=env, organization=org).create() + org = target_sat.api.Organization().create() + env = target_sat.api.LifecycleEnvironment(organization=org).create() + content_view = target_sat.api.ContentView(organization=org).create() + activation_key = target_sat.api.ActivationKey(environment=env, organization=org).create() for repo_url in [settings.repos.yum_9.url, CUSTOM_REPO_URL]: setup_org_for_a_custom_repo( { @@ -536,10 +538,10 @@ def test_positive_get_diff_for_cv_envs(): 'activationkey-id': activation_key.id, } ) - new_env = entities.LifecycleEnvironment(organization=org, prior=env).create() + new_env = target_sat.api.LifecycleEnvironment(organization=org, prior=env).create() cvvs = content_view.read().version[-2:] cvvs[-1].promote(data={'environment_ids': new_env.id, 'force': False}) - result = entities.Errata().compare( + result = target_sat.api.Errata().compare( data={'content_view_version_ids': [cvv.id for cvv in cvvs], 'per_page': '9999'} ) cvv2_only_errata = next( @@ -611,7 +613,7 @@ def test_positive_incremental_update_required( rpm_package_name=constants.FAKE_1_CUSTOM_PACKAGE, ) # Call nailgun to make the API POST to see if any incremental updates are required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ 'organization_id': module_org.id, 'included': {'ids': [host.id]}, @@ -621,7 +623,7 @@ def test_positive_incremental_update_required( assert not response, 'Incremental update should not be required at this point' # Add filter of type include but do not include anything # this will hide all RPMs from selected erratum before publishing - entities.RPMContentViewFilter( + target_sat.api.RPMContentViewFilter( content_view=module_cv, inclusion=True, name='Include Nothing' ).create() module_cv.publish() @@ -631,7 +633,7 @@ def test_positive_incremental_update_required( CV1V.promote(data={'environment_ids': module_lce.id, 'force': False}) module_cv = module_cv.read() # Call nailgun to make the API POST to ensure an incremental update is required - response = entities.Host().bulk_available_incremental_updates( + response = target_sat.api.Host().bulk_available_incremental_updates( data={ 'organization_id': module_org.id, 'included': {'ids': [host.id]}, @@ -773,7 +775,7 @@ def rh_repo_module_manifest(module_entitlement_manifest_org, module_target_sat): releasever='None', ) # Sync step because repo is not synced by default - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @@ -791,11 +793,17 @@ def rhel8_custom_repo_cv(module_entitlement_manifest_org): @pytest.fixture(scope='module') def rhel8_module_ak( - module_entitlement_manifest_org, default_lce, rh_repo_module_manifest, rhel8_custom_repo_cv + module_entitlement_manifest_org, + default_lce, + rh_repo_module_manifest, + rhel8_custom_repo_cv, + module_target_sat, ): - rhel8_module_ak = entities.ActivationKey( + rhel8_module_ak = module_target_sat.api.ActivationKey( content_view=module_entitlement_manifest_org.default_content_view, - environment=entities.LifecycleEnvironment(id=module_entitlement_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment( + id=module_entitlement_manifest_org.library.id + ), organization=module_entitlement_manifest_org, ).create() # Ensure tools repo is enabled in the activation key @@ -805,19 +813,19 @@ def rhel8_module_ak( } ) # Fetch available subscriptions - subs = entities.Subscription(organization=module_entitlement_manifest_org).search( + subs = module_target_sat.api.Subscription(organization=module_entitlement_manifest_org).search( query={'search': f'{constants.DEFAULT_SUBSCRIPTION_NAME}'} ) assert subs # Add default subscription to activation key rhel8_module_ak.add_subscriptions(data={'subscription_id': subs[0].id}) # Add custom subscription to activation key - product = entities.Product(organization=module_entitlement_manifest_org).search( - query={'search': "redhat=false"} - ) - custom_sub = entities.Subscription(organization=module_entitlement_manifest_org).search( - query={'search': f"name={product[0].name}"} + product = module_target_sat.api.Product(organization=module_entitlement_manifest_org).search( + query={'search': 'redhat=false'} ) + custom_sub = module_target_sat.api.Subscription( + organization=module_entitlement_manifest_org + ).search(query={'search': f'name={product[0].name}'}) rhel8_module_ak.add_subscriptions(data={'subscription_id': custom_sub[0].id}) return rhel8_module_ak diff --git a/tests/foreman/api/test_filter.py b/tests/foreman/api/test_filter.py index 75d42707e4a..bf04a011276 100644 --- a/tests/foreman/api/test_filter.py +++ b/tests/foreman/api/test_filter.py @@ -20,22 +20,22 @@ :Upstream: No """ -from nailgun import entities + import pytest from requests.exceptions import HTTPError @pytest.fixture(scope='module') -def module_perms(): +def module_perms(module_target_sat): """Search for provisioning template permissions. Set ``cls.ct_perms``.""" - ct_perms = entities.Permission().search( + ct_perms = module_target_sat.api.Permission().search( query={'search': 'resource_type="ProvisioningTemplate"'} ) return ct_perms @pytest.mark.tier1 -def test_positive_create_with_permission(module_perms): +def test_positive_create_with_permission(module_perms, module_target_sat): """Create a filter and assign it some permissions. :id: b8631d0a-a71a-41aa-9f9a-d12d62adc496 @@ -45,14 +45,14 @@ def test_positive_create_with_permission(module_perms): :CaseImportance: Critical """ # Create a filter and assign all ProvisioningTemplate permissions to it - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_perms = [perm.id for perm in filter_.permission] perms = [perm.id for perm in module_perms] assert filter_perms == perms @pytest.mark.tier1 -def test_positive_delete(module_perms): +def test_positive_delete(module_perms, module_target_sat): """Create a filter and delete it afterwards. :id: f0c56fd8-c91d-48c3-ad21-f538313b17eb @@ -61,14 +61,14 @@ def test_positive_delete(module_perms): :CaseImportance: Critical """ - filter_ = entities.Filter(permission=module_perms).create() + filter_ = module_target_sat.api.Filter(permission=module_perms).create() filter_.delete() with pytest.raises(HTTPError): filter_.read() @pytest.mark.tier1 -def test_positive_delete_role(module_perms): +def test_positive_delete_role(module_perms, module_target_sat): """Create a filter and delete the role it points at. :id: b129642d-926d-486a-84d9-5952b44ac446 @@ -77,8 +77,8 @@ def test_positive_delete_role(module_perms): :CaseImportance: Critical """ - role = entities.Role().create() - filter_ = entities.Filter(permission=module_perms, role=role).create() + role = module_target_sat.api.Role().create() + filter_ = module_target_sat.api.Filter(permission=module_perms, role=role).create() # A filter depends on a role. Deleting a role implicitly deletes the # filter pointing at it. diff --git a/tests/foreman/api/test_foremantask.py b/tests/foreman/api/test_foremantask.py index f7e8377e82f..65918863cac 100644 --- a/tests/foreman/api/test_foremantask.py +++ b/tests/foreman/api/test_foremantask.py @@ -16,13 +16,12 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @pytest.mark.tier1 -def test_negative_fetch_non_existent_task(): +def test_negative_fetch_non_existent_task(target_sat): """Fetch a non-existent task. :id: a2a81ca2-63c4-47f5-9314-5852f5e2617f @@ -32,13 +31,13 @@ def test_negative_fetch_non_existent_task(): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.ForemanTask(id='abc123').read() + target_sat.api.ForemanTask(id='abc123').read() @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.e2e -def test_positive_get_summary(): +def test_positive_get_summary(target_sat): """Get a summary of foreman tasks. :id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce @@ -47,7 +46,7 @@ def test_positive_get_summary(): :CaseImportance: Critical """ - summary = entities.ForemanTask().summary() - assert type(summary) is list + summary = target_sat.api.ForemanTask().summary() + assert isinstance(summary, list) for item in summary: assert type(item) is dict diff --git a/tests/foreman/api/test_host.py b/tests/foreman/api/test_host.py index 8393dd5ecf0..15411a4741b 100644 --- a/tests/foreman/api/test_host.py +++ b/tests/foreman/api/test_host.py @@ -23,7 +23,7 @@ import http from fauxfactory import gen_choice, gen_integer, gen_ipaddr, gen_mac, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -32,14 +32,14 @@ from robottelo.utils import datafactory -def update_smart_proxy(location, smart_proxy): - if location.id not in [location.id for location in smart_proxy.location]: - smart_proxy.location.append(entities.Location(id=location.id)) - smart_proxy.update(['location']) +def update_smart_proxy(smart_proxy_location, smart_proxy): + if smart_proxy_location.id not in [location.id for location in smart_proxy.location]: + smart_proxy.location.append(smart_proxy_location) + smart_proxy.update(['location']) @pytest.mark.tier1 -def test_positive_get_search(): +def test_positive_get_search(target_sat): """GET ``api/v2/hosts`` and specify the ``search`` parameter. :id: d63f87e5-66e6-4886-8b44-4129259493a6 @@ -50,7 +50,7 @@ def test_positive_get_search(): """ query = gen_string('utf8', gen_integer(1, 100)) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'search': query}, verify=False, @@ -60,7 +60,7 @@ def test_positive_get_search(): @pytest.mark.tier1 -def test_positive_get_per_page(): +def test_positive_get_per_page(target_sat): """GET ``api/v2/hosts`` and specify the ``per_page`` parameter. :id: 9086f41c-b3b9-4af2-b6c4-46b80b4d1cfd @@ -72,7 +72,7 @@ def test_positive_get_per_page(): """ per_page = gen_integer(1, 1000) response = client.get( - entities.Host().path(), + target_sat.api.Host().path(), auth=get_credentials(), data={'per_page': str(per_page)}, verify=False, @@ -82,7 +82,7 @@ def test_positive_get_per_page(): @pytest.mark.tier2 -def test_positive_search_by_org_id(): +def test_positive_search_by_org_id(target_sat): """Search for host by specifying host's organization id :id: 56353f7c-b77e-4b6c-9ec3-51b58f9a18d8 @@ -96,9 +96,9 @@ def test_positive_search_by_org_id(): :CaseLevel: Integration """ - host = entities.Host().create() + host = target_sat.api.Host().create() # adding org id as GET parameter for correspondence with BZ - query = entities.Host() + query = target_sat.api.Host() query._meta['api_path'] += f'?organization_id={host.organization.id}' results = query.search() assert len(results) == 1 @@ -107,7 +107,7 @@ def test_positive_search_by_org_id(): @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_negative_create_with_owner_type(owner_type): +def test_negative_create_with_owner_type(owner_type, target_sat): """Create a host and specify only ``owner_type``. :id: cdf9d16f-1c47-498a-be48-901355385dde @@ -119,13 +119,15 @@ def test_negative_create_with_owner_type(owner_type): :CaseImportance: Critical """ with pytest.raises(HTTPError) as error: - entities.Host(owner_type=owner_type).create() + target_sat.api.Host(owner_type=owner_type).create() assert str(422) in str(error) @pytest.mark.tier1 @pytest.mark.parametrize('owner_type', ['User', 'Usergroup']) -def test_positive_update_owner_type(owner_type, module_org, module_location, module_user): +def test_positive_update_owner_type( + owner_type, module_org, module_location, module_user, module_target_sat +): """Update a host's ``owner_type``. :id: b72cd8ef-3a0b-4d2d-94f9-9b64908d699a @@ -141,9 +143,9 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod """ owners = { 'User': module_user, - 'Usergroup': entities.UserGroup().create(), + 'Usergroup': module_target_sat.api.UserGroup().create(), } - host = entities.Host(organization=module_org, location=module_location).create() + host = module_target_sat.api.Host(organization=module_org, location=module_location).create() host.owner_type = owner_type host.owner = owners[owner_type] host = host.update(['owner_type', 'owner']) @@ -152,7 +154,7 @@ def test_positive_update_owner_type(owner_type, module_org, module_location, mod @pytest.mark.tier1 -def test_positive_create_and_update_with_name(): +def test_positive_create_and_update_with_name(target_sat): """Create and update a host with different names and minimal input parameters :id: a7c0e8ec-3816-4092-88b1-0324cb271752 @@ -162,7 +164,7 @@ def test_positive_create_and_update_with_name(): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name).create() + host = target_sat.api.Host(name=name).create() assert host.name == f'{name}.{host.domain.read().name}' new_name = gen_choice(datafactory.valid_hosts_list()) host.name = new_name @@ -171,7 +173,7 @@ def test_positive_create_and_update_with_name(): @pytest.mark.tier1 -def test_positive_create_and_update_with_ip(): +def test_positive_create_and_update_with_ip(target_sat): """Create and update host with IP address specified :id: 3f266906-c509-42ce-9b20-def448bf8d86 @@ -181,7 +183,7 @@ def test_positive_create_and_update_with_ip(): :CaseImportance: Critical """ ip_addr = gen_ipaddr() - host = entities.Host(ip=ip_addr).create() + host = target_sat.api.Host(ip=ip_addr).create() assert host.ip == ip_addr new_ip_addr = gen_ipaddr() host.ip = new_ip_addr @@ -190,7 +192,7 @@ def test_positive_create_and_update_with_ip(): @pytest.mark.tier1 -def test_positive_create_and_update_mac(): +def test_positive_create_and_update_mac(target_sat): """Create host with MAC address and update it :id: 72e3b020-7347-4500-8669-c6ddf6dfd0b6 @@ -201,7 +203,7 @@ def test_positive_create_and_update_mac(): """ mac = gen_mac(multicast=False) - host = entities.Host(mac=mac).create() + host = target_sat.api.Host(mac=mac).create() assert host.mac == mac new_mac = gen_mac(multicast=False) host.mac = new_mac @@ -211,7 +213,7 @@ def test_positive_create_and_update_mac(): @pytest.mark.tier2 def test_positive_create_and_update_with_hostgroup( - module_org, module_location, module_lce, module_published_cv + module_org, module_location, module_lce, module_published_cv, module_target_sat ): """Create and update host with hostgroup specified @@ -222,8 +224,10 @@ def test_positive_create_and_update_with_hostgroup( :CaseLevel: Integration """ module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - hostgroup = entities.HostGroup(location=[module_location], organization=[module_org]).create() - host = entities.Host( + hostgroup = module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() + host = module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -233,7 +237,7 @@ def test_positive_create_and_update_with_hostgroup( }, ).create() assert host.hostgroup.read().name == hostgroup.name - new_hostgroup = entities.HostGroup( + new_hostgroup = module_target_sat.api.HostGroup( location=[host.location], organization=[host.organization] ).create() host.hostgroup = new_hostgroup @@ -246,7 +250,9 @@ def test_positive_create_and_update_with_hostgroup( @pytest.mark.tier2 -def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_library, module_org): +def test_positive_create_inherit_lce_cv( + module_default_org_view, module_lce_library, module_org, module_target_sat +): """Create a host with hostgroup specified. Make sure host inherited hostgroup's lifecycle environment and content-view @@ -259,12 +265,12 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr :BZ: 1391656 """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( content_view=module_default_org_view, lifecycle_environment=module_lce_library, organization=[module_org], ).create() - host = entities.Host(hostgroup=hostgroup, organization=module_org).create() + host = module_target_sat.api.Host(hostgroup=hostgroup, organization=module_org).create() assert ( host.content_facet_attributes['lifecycle_environment_id'] == hostgroup.lifecycle_environment.id @@ -273,7 +279,7 @@ def test_positive_create_inherit_lce_cv(module_default_org_view, module_lce_libr @pytest.mark.tier2 -def test_positive_create_with_inherited_params(module_org, module_location): +def test_positive_create_with_inherited_params(module_org, module_location, module_target_sat): """Create a new Host in organization and location with parameters :BZ: 1287223 @@ -287,18 +293,20 @@ def test_positive_create_with_inherited_params(module_org, module_location): :CaseImportance: High """ - org_param = entities.Parameter(organization=module_org).create() - loc_param = entities.Parameter(location=module_location).create() - host = entities.Host(location=module_location, organization=module_org).create() + org_param = module_target_sat.api.Parameter(organization=module_org).create() + loc_param = module_target_sat.api.Parameter(location=module_location).create() + host = module_target_sat.api.Host(location=module_location, organization=module_org).create() # get global parameters - glob_param_list = {(param.name, param.value) for param in entities.CommonParameter().search()} + glob_param_list = { + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() + } # if there are no global parameters, create one if len(glob_param_list) == 0: param_name = gen_string('alpha') param_global_value = gen_string('numeric') - entities.CommonParameter(name=param_name, value=param_global_value).create() + module_target_sat.api.CommonParameter(name=param_name, value=param_global_value).create() glob_param_list = { - (param.name, param.value) for param in entities.CommonParameter().search() + (param.name, param.value) for param in module_target_sat.api.CommonParameter().search() } assert len(host.all_parameters) == 2 + len(glob_param_list) innerited_params = {(org_param.name, org_param.value), (loc_param.name, loc_param.value)} @@ -397,7 +405,9 @@ def test_positive_end_to_end_with_puppet_class( @pytest.mark.tier2 -def test_positive_create_and_update_with_subnet(module_location, module_org, module_default_subnet): +def test_positive_create_and_update_with_subnet( + module_location, module_org, module_default_subnet, module_target_sat +): """Create and update a host with subnet specified :id: 9aa97aff-8439-4027-89ee-01c643fbf7d1 @@ -406,11 +416,13 @@ def test_positive_create_and_update_with_subnet(module_location, module_org, mod :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( location=module_location, organization=module_org, subnet=module_default_subnet ).create() assert host.subnet.read().name == module_default_subnet.name - new_subnet = entities.Subnet(location=[module_location], organization=[module_org]).create() + new_subnet = module_target_sat.api.Subnet( + location=[module_location], organization=[module_org] + ).create() host.subnet = new_subnet host = host.update(['subnet']) assert host.subnet.read().name == new_subnet.name @@ -418,7 +430,7 @@ def test_positive_create_and_update_with_subnet(module_location, module_org, mod @pytest.mark.tier2 def test_positive_create_and_update_with_compresource( - module_org, module_location, module_cr_libvirt + module_org, module_location, module_cr_libvirt, module_target_sat ): """Create and update a host with compute resource specified @@ -429,11 +441,11 @@ def test_positive_create_and_update_with_compresource( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( compute_resource=module_cr_libvirt, location=module_location, organization=module_org ).create() assert host.compute_resource.read().name == module_cr_libvirt.name - new_compresource = entities.LibvirtComputeResource( + new_compresource = module_target_sat.api.LibvirtComputeResource( location=[host.location], organization=[host.organization] ).create() host.compute_resource = new_compresource @@ -442,7 +454,7 @@ def test_positive_create_and_update_with_compresource( @pytest.mark.tier2 -def test_positive_create_and_update_with_model(module_model): +def test_positive_create_and_update_with_model(module_model, module_target_sat): """Create and update a host with model specified :id: 7a912a19-71e4-4843-87fd-bab98c156f4a @@ -451,16 +463,18 @@ def test_positive_create_and_update_with_model(module_model): :CaseLevel: Integration """ - host = entities.Host(model=module_model).create() + host = module_target_sat.api.Host(model=module_model).create() assert host.model.read().name == module_model.name - new_model = entities.Model().create() + new_model = module_target_sat.api.Model().create() host.model = new_model host = host.update(['model']) assert host.model.read().name == new_model.name @pytest.mark.tier2 -def test_positive_create_and_update_with_user(module_org, module_location, module_user): +def test_positive_create_and_update_with_user( + module_org, module_location, module_user, module_target_sat +): """Create and update host with user specified :id: 72e20f8f-17dc-4e38-8ac1-d08df8758f56 @@ -469,18 +483,22 @@ def test_positive_create_and_update_with_user(module_org, module_location, modul :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( owner=module_user, owner_type='User', organization=module_org, location=module_location ).create() assert host.owner.read() == module_user - new_user = entities.User(organization=[module_org], location=[module_location]).create() + new_user = module_target_sat.api.User( + organization=[module_org], location=[module_location] + ).create() host.owner = new_user host = host.update(['owner']) assert host.owner.read() == new_user @pytest.mark.tier2 -def test_positive_create_and_update_with_usergroup(module_org, module_location, function_role): +def test_positive_create_and_update_with_usergroup( + module_org, module_location, function_role, module_target_sat +): """Create and update host with user group specified :id: 706e860c-8c05-4ddc-be20-0ecd9f0da813 @@ -489,18 +507,18 @@ def test_positive_create_and_update_with_usergroup(module_org, module_location, :CaseLevel: Integration """ - user = entities.User( + user = module_target_sat.api.User( location=[module_location], organization=[module_org], role=[function_role] ).create() - usergroup = entities.UserGroup(role=[function_role], user=[user]).create() - host = entities.Host( + usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() + host = module_target_sat.api.Host( location=module_location, organization=module_org, owner=usergroup, owner_type='Usergroup', ).create() assert host.owner.read().name == usergroup.name - new_usergroup = entities.UserGroup(role=[function_role], user=[user]).create() + new_usergroup = module_target_sat.api.UserGroup(role=[function_role], user=[user]).create() host.owner = new_usergroup host = host.update(['owner']) assert host.owner.read().name == new_usergroup.name @@ -508,7 +526,7 @@ def test_positive_create_and_update_with_usergroup(module_org, module_location, @pytest.mark.tier1 @pytest.mark.parametrize('build', [True, False]) -def test_positive_create_and_update_with_build_parameter(build): +def test_positive_create_and_update_with_build_parameter(build, target_sat): """Create and update a host with 'build' parameter specified. Build parameter determines whether to enable the host for provisioning @@ -521,7 +539,7 @@ def test_positive_create_and_update_with_build_parameter(build): :CaseImportance: Critical """ - host = entities.Host(build=build).create() + host = target_sat.api.Host(build=build).create() assert host.build == build host.build = not build host = host.update(['build']) @@ -530,7 +548,7 @@ def test_positive_create_and_update_with_build_parameter(build): @pytest.mark.tier1 @pytest.mark.parametrize('enabled', [True, False], ids=['enabled', 'disabled']) -def test_positive_create_and_update_with_enabled_parameter(enabled): +def test_positive_create_and_update_with_enabled_parameter(enabled, target_sat): """Create and update a host with 'enabled' parameter specified. Enabled parameter determines whether to include the host within Satellite 6 reporting @@ -544,7 +562,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): :CaseImportance: Critical """ - host = entities.Host(enabled=enabled).create() + host = target_sat.api.Host(enabled=enabled).create() assert host.enabled == enabled host.enabled = not enabled host = host.update(['enabled']) @@ -553,7 +571,7 @@ def test_positive_create_and_update_with_enabled_parameter(enabled): @pytest.mark.tier1 @pytest.mark.parametrize('managed', [True, False], ids=['managed', 'unmanaged']) -def test_positive_create_and_update_with_managed_parameter(managed): +def test_positive_create_and_update_with_managed_parameter(managed, target_sat): """Create and update a host with managed parameter specified. Managed flag shows whether the host is managed or unmanaged and determines whether some extra parameters are required @@ -567,7 +585,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): :CaseImportance: Critical """ - host = entities.Host(managed=managed).create() + host = target_sat.api.Host(managed=managed).create() assert host.managed == managed host.managed = not managed host = host.update(['managed']) @@ -575,7 +593,7 @@ def test_positive_create_and_update_with_managed_parameter(managed): @pytest.mark.tier1 -def test_positive_create_and_update_with_comment(): +def test_positive_create_and_update_with_comment(target_sat): """Create and update a host with a comment :id: 9b78663f-139c-4d0b-9115-180624b0d41b @@ -585,7 +603,7 @@ def test_positive_create_and_update_with_comment(): :CaseImportance: Critical """ comment = gen_choice(list(datafactory.valid_data_list().values())) - host = entities.Host(comment=comment).create() + host = target_sat.api.Host(comment=comment).create() assert host.comment == comment new_comment = gen_choice(list(datafactory.valid_data_list().values())) host.comment = new_comment @@ -594,7 +612,7 @@ def test_positive_create_and_update_with_comment(): @pytest.mark.tier2 -def test_positive_create_and_update_with_compute_profile(module_compute_profile): +def test_positive_create_and_update_with_compute_profile(module_compute_profile, module_target_sat): """Create and update a host with a compute profile specified :id: 94be25e8-035d-42c5-b1f3-3aa20030410d @@ -604,9 +622,9 @@ def test_positive_create_and_update_with_compute_profile(module_compute_profile) :CaseLevel: Integration """ - host = entities.Host(compute_profile=module_compute_profile).create() + host = module_target_sat.api.Host(compute_profile=module_compute_profile).create() assert host.compute_profile.read().name == module_compute_profile.name - new_cprofile = entities.ComputeProfile().create() + new_cprofile = module_target_sat.api.ComputeProfile().create() host.compute_profile = new_cprofile host = host.update(['compute_profile']) assert host.compute_profile.read().name == new_cprofile.name @@ -614,7 +632,7 @@ def test_positive_create_and_update_with_compute_profile(module_compute_profile) @pytest.mark.tier2 def test_positive_create_and_update_with_content_view( - module_org, module_location, module_default_org_view, module_lce_library + module_org, module_location, module_default_org_view, module_lce_library, module_target_sat ): """Create and update host with a content view specified @@ -624,7 +642,7 @@ def test_positive_create_and_update_with_content_view( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -646,7 +664,7 @@ def test_positive_create_and_update_with_content_view( @pytest.mark.tier1 @pytest.mark.e2e -def test_positive_end_to_end_with_host_parameters(module_org, module_location): +def test_positive_end_to_end_with_host_parameters(module_org, module_location, module_target_sat): """Create a host with a host parameters specified then remove and update with the newly specified parameters @@ -658,7 +676,7 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): :CaseImportance: Critical """ parameters = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, host_parameters_attributes=parameters, @@ -683,7 +701,7 @@ def test_positive_end_to_end_with_host_parameters(module_org, module_location): @pytest.mark.tier2 @pytest.mark.e2e def test_positive_end_to_end_with_image( - module_org, module_location, module_cr_libvirt, module_libvirt_image + module_org, module_location, module_cr_libvirt, module_libvirt_image, module_target_sat ): """Create a host with an image specified then remove it and update the host with the same image afterwards @@ -695,7 +713,7 @@ def test_positive_end_to_end_with_image( :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -715,7 +733,7 @@ def test_positive_end_to_end_with_image( @pytest.mark.tier1 @pytest.mark.parametrize('method', ['build', 'image']) def test_positive_create_with_provision_method( - method, module_org, module_location, module_cr_libvirt + method, module_org, module_location, module_cr_libvirt, module_target_sat ): """Create a host with provision method specified @@ -728,7 +746,7 @@ def test_positive_create_with_provision_method( :CaseImportance: Critical """ # Compute resource is required for 'image' method - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, compute_resource=module_cr_libvirt, @@ -738,7 +756,7 @@ def test_positive_create_with_provision_method( @pytest.mark.tier1 -def test_positive_delete(): +def test_positive_delete(target_sat): """Delete a host :id: ec725359-a75e-498c-9da8-f5abd2343dd3 @@ -747,14 +765,16 @@ def test_positive_delete(): :CaseImportance: Critical """ - host = entities.Host().create() + host = target_sat.api.Host().create() host.delete() with pytest.raises(HTTPError): host.read() @pytest.mark.tier2 -def test_positive_create_and_update_domain(module_org, module_location, module_domain): +def test_positive_create_and_update_domain( + module_org, module_location, module_domain, module_target_sat +): """Create and update a host with a domain :id: 8ca9f67c-4c11-40f9-b434-4f200bad000f @@ -763,12 +783,14 @@ def test_positive_create_and_update_domain(module_org, module_location, module_d :CaseLevel: Integration """ - host = entities.Host( + host = module_target_sat.api.Host( organization=module_org, location=module_location, domain=module_domain ).create() assert host.domain.read().name == module_domain.name - new_domain = entities.Domain(organization=[module_org], location=[module_location]).create() + new_domain = module_target_sat.api.Domain( + organization=[module_org], location=[module_location] + ).create() host.domain = new_domain host = host.update(['domain']) assert host.domain.read().name == new_domain.name @@ -802,7 +824,7 @@ def test_positive_create_and_update_env( @pytest.mark.tier2 -def test_positive_create_and_update_arch(module_architecture): +def test_positive_create_and_update_arch(module_architecture, module_target_sat): """Create and update a host with an architecture :id: 5f190b14-e6db-46e1-8cd1-e94e048e6a77 @@ -811,17 +833,17 @@ def test_positive_create_and_update_arch(module_architecture): :CaseLevel: Integration """ - host = entities.Host(architecture=module_architecture).create() + host = module_target_sat.api.Host(architecture=module_architecture).create() assert host.architecture.read().name == module_architecture.name - new_arch = entities.Architecture(operatingsystem=[host.operatingsystem]).create() + new_arch = module_target_sat.api.Architecture(operatingsystem=[host.operatingsystem]).create() host.architecture = new_arch host = host.update(['architecture']) assert host.architecture.read().name == new_arch.name @pytest.mark.tier2 -def test_positive_create_and_update_os(module_os): +def test_positive_create_and_update_os(module_os, module_target_sat): """Create and update a host with an operating system :id: 46edced1-8909-4066-b196-b8e22512341f @@ -830,13 +852,13 @@ def test_positive_create_and_update_os(module_os): :CaseLevel: Integration """ - host = entities.Host(operatingsystem=module_os).create() + host = module_target_sat.api.Host(operatingsystem=module_os).create() assert host.operatingsystem.read().name == module_os.name - new_os = entities.OperatingSystem( + new_os = module_target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() - medium = entities.Media(id=host.medium.id).read() + medium = module_target_sat.api.Media(id=host.medium.id).read() medium.operatingsystem.append(new_os) medium.update(['operatingsystem']) host.operatingsystem = new_os @@ -845,7 +867,7 @@ def test_positive_create_and_update_os(module_os): @pytest.mark.tier2 -def test_positive_create_and_update_medium(module_org, module_location): +def test_positive_create_and_update_medium(module_org, module_location, module_target_sat): """Create and update a host with a medium :id: d81cb65c-48b3-4ce3-971e-51b9dd123697 @@ -854,11 +876,13 @@ def test_positive_create_and_update_medium(module_org, module_location): :CaseLevel: Integration """ - medium = entities.Media(organization=[module_org], location=[module_location]).create() - host = entities.Host(medium=medium).create() + medium = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() + host = module_target_sat.api.Host(medium=medium).create() assert host.medium.read().name == medium.name - new_medium = entities.Media( + new_medium = module_target_sat.api.Media( operatingsystem=[host.operatingsystem], location=[host.location], organization=[host.organization], @@ -907,7 +931,7 @@ def test_negative_update_mac(module_host): @pytest.mark.tier2 -def test_negative_update_arch(module_architecture): +def test_negative_update_arch(module_architecture, module_target_sat): """Attempt to update a host with an architecture, which does not belong to host's operating system @@ -917,7 +941,7 @@ def test_negative_update_arch(module_architecture): :CaseLevel: Integration """ - host = entities.Host().create() + host = module_target_sat.api.Host().create() host.architecture = module_architecture with pytest.raises(HTTPError): host = host.update(['architecture']) @@ -925,7 +949,7 @@ def test_negative_update_arch(module_architecture): @pytest.mark.tier2 -def test_negative_update_os(): +def test_negative_update_os(target_sat): """Attempt to update a host with an operating system, which is not associated with host's medium @@ -935,8 +959,8 @@ def test_negative_update_os(): :CaseLevel: Integration """ - host = entities.Host().create() - new_os = entities.OperatingSystem( + host = target_sat.api.Host().create() + new_os = target_sat.api.OperatingSystem( architecture=[host.architecture], ptable=[host.ptable] ).create() host.operatingsystem = new_os @@ -963,9 +987,9 @@ def test_positive_read_content_source_id( :CaseLevel: System """ - proxy = entities.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() + proxy = target_sat.api.SmartProxy().search(query={'url': f'{target_sat.url}:9090'})[0].read() module_published_cv.version[0].promote(data={'environment_ids': module_lce.id, 'force': False}) - host = entities.Host( + host = target_sat.api.Host( organization=module_org, location=module_location, content_facet_attributes={ @@ -1407,7 +1431,7 @@ class TestHostInterface: @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_create_end_to_end(self, module_host): + def test_positive_create_end_to_end(self, module_host, target_sat): """Create update and delete an interface with different names and minimal input parameters @@ -1418,7 +1442,7 @@ def test_positive_create_end_to_end(self, module_host): :CaseImportance: Critical """ name = gen_choice(datafactory.valid_interfaces_list()) - interface = entities.Interface(host=module_host, name=name).create() + interface = target_sat.api.Interface(host=module_host, name=name).create() assert interface.name == name new_name = gen_choice(datafactory.valid_interfaces_list()) interface.name = new_name @@ -1429,7 +1453,7 @@ def test_positive_create_end_to_end(self, module_host): interface.read() @pytest.mark.tier1 - def test_negative_end_to_end(self, module_host): + def test_negative_end_to_end(self, module_host, target_sat): """Attempt to create and update an interface with different invalid entries as names (>255 chars, unsupported string types), at the end attempt to remove primary interface @@ -1442,9 +1466,9 @@ def test_negative_end_to_end(self, module_host): """ name = gen_choice(datafactory.invalid_interfaces_list()) with pytest.raises(HTTPError) as error: - entities.Interface(host=module_host, name=name).create() + target_sat.api.Interface(host=module_host, name=name).create() assert str(422) in str(error) - interface = entities.Interface(host=module_host).create() + interface = target_sat.api.Interface(host=module_host).create() interface.name = name with pytest.raises(HTTPError) as error: interface.update(['name']) @@ -1463,7 +1487,7 @@ def test_negative_end_to_end(self, module_host): @pytest.mark.upgrade @pytest.mark.tier1 - def test_positive_delete_and_check_host(self): + def test_positive_delete_and_check_host(self, target_sat): """Delete host's interface (not primary) and make sure the host was not accidentally removed altogether with the interface @@ -1476,8 +1500,8 @@ def test_positive_delete_and_check_host(self): :CaseImportance: Critical """ - host = entities.Host().create() - interface = entities.Interface(host=host, primary=False).create() + host = target_sat.api.Host().create() + interface = target_sat.api.Interface(host=host, primary=False).create() interface.delete() with pytest.raises(HTTPError): interface.read() @@ -1491,7 +1515,7 @@ class TestHostBulkAction: """Tests for host bulk actions.""" @pytest.mark.tier2 - def test_positive_bulk_destroy(self, module_org): + def test_positive_bulk_destroy(self, module_org, module_target_sat): """Destroy multiple hosts make sure that hosts were removed, or were not removed when host is excluded from the list. @@ -1506,10 +1530,10 @@ def test_positive_bulk_destroy(self, module_org): host_ids = [] for _ in range(3): name = gen_choice(datafactory.valid_hosts_list()) - host = entities.Host(name=name, organization=module_org).create() + host = module_target_sat.api.Host(name=name, organization=module_org).create() host_ids.append(host.id) - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={ 'organization_id': module_org.id, 'included': {'ids': host_ids}, @@ -1517,15 +1541,15 @@ def test_positive_bulk_destroy(self, module_org): } ) for host_id in host_ids[:-1]: - result = entities.Host(id=host_id).read() + result = module_target_sat.api.Host(id=host_id).read() assert result.id == host_id with pytest.raises(HTTPError): - entities.Host(id=host_ids[-1]).read() + module_target_sat.api.Host(id=host_ids[-1]).read() - entities.Host().bulk_destroy( + module_target_sat.api.Host().bulk_destroy( data={'organization_id': module_org.id, 'included': {'ids': host_ids[:-1]}} ) for host_id in host_ids[:-1]: with pytest.raises(HTTPError): - entities.Host(id=host_id).read() + module_target_sat.api.Host(id=host_id).read() diff --git a/tests/foreman/api/test_hostcollection.py b/tests/foreman/api/test_hostcollection.py index 0b5c0147ead..8935779101c 100644 --- a/tests/foreman/api/test_hostcollection.py +++ b/tests/foreman/api/test_hostcollection.py @@ -19,7 +19,6 @@ from random import choice, randint from broker import Broker -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -32,15 +31,15 @@ @pytest.fixture(scope='module') -def fake_hosts(module_org): +def fake_hosts(module_org, module_target_sat): """Create content hosts that can be shared by tests.""" - hosts = [entities.Host(organization=module_org).create() for _ in range(2)] + hosts = [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] return hosts @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create host collections with different names. :id: 8f2b9223-f5be-4cb1-8316-01ea747cae14 @@ -52,12 +51,14 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(name=name, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + name=name, organization=module_org + ).create() assert host_collection.name == name @pytest.mark.tier1 -def test_positive_list(module_org): +def test_positive_list(module_org, module_target_sat): """Create new host collection and then retrieve list of all existing host collections @@ -72,13 +73,13 @@ def test_positive_list(module_org): :CaseImportance: Critical """ - entities.HostCollection(organization=module_org).create() - hc_list = entities.HostCollection().search() + module_target_sat.api.HostCollection(organization=module_org).create() + hc_list = module_target_sat.api.HostCollection().search() assert len(hc_list) >= 1 @pytest.mark.tier1 -def test_positive_list_for_organization(): +def test_positive_list_for_organization(target_sat): """Create host collection for specific organization. Retrieve list of host collections for that organization @@ -89,16 +90,16 @@ def test_positive_list_for_organization(): :CaseImportance: Critical """ - org = entities.Organization().create() - hc = entities.HostCollection(organization=org).create() - hc_list = entities.HostCollection(organization=org).search() + org = target_sat.api.Organization().create() + hc = target_sat.api.HostCollection(organization=org).create() + hc_list = target_sat.api.HostCollection(organization=org).search() assert len(hc_list) == 1 assert hc_list[0].id == hc.id @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, desc): +def test_positive_create_with_description(module_org, desc, module_target_sat): """Create host collections with different descriptions. :id: 9d13392f-8d9d-4ff1-8909-4233e4691055 @@ -110,12 +111,14 @@ def test_positive_create_with_description(module_org, desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(description=desc, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + description=desc, organization=module_org + ).create() assert host_collection.description == desc @pytest.mark.tier1 -def test_positive_create_with_limit(module_org): +def test_positive_create_with_limit(module_org, module_target_sat): """Create host collections with different limits. :id: 86d9387b-7036-4794-96fd-5a3472dd9160 @@ -127,13 +130,15 @@ def test_positive_create_with_limit(module_org): """ for _ in range(5): limit = randint(1, 30) - host_collection = entities.HostCollection(max_hosts=limit, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + max_hosts=limit, organization=module_org + ).create() assert host_collection.max_hosts == limit @pytest.mark.parametrize("unlimited", [False, True]) @pytest.mark.tier1 -def test_positive_create_with_unlimited_hosts(module_org, unlimited): +def test_positive_create_with_unlimited_hosts(module_org, unlimited, module_target_sat): """Create host collection with different values of 'unlimited hosts' parameter. @@ -146,7 +151,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=None if unlimited else 1, organization=module_org, unlimited_hosts=unlimited, @@ -155,7 +160,7 @@ def test_positive_create_with_unlimited_hosts(module_org, unlimited): @pytest.mark.tier1 -def test_positive_create_with_host(module_org, fake_hosts): +def test_positive_create_with_host(module_org, fake_hosts, module_target_sat): """Create a host collection that contains a host. :id: 9dc0ad72-58c2-4079-b1ca-2c4373472f0f @@ -167,14 +172,14 @@ def test_positive_create_with_host(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() assert len(host_collection.host) == 1 @pytest.mark.tier1 -def test_positive_create_with_hosts(module_org, fake_hosts): +def test_positive_create_with_hosts(module_org, fake_hosts, module_target_sat): """Create a host collection that contains hosts. :id: bb8d2b42-9a8b-4c4f-ba0c-c56ae5a7eb1d @@ -186,12 +191,14 @@ def test_positive_create_with_hosts(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert len(host_collection.host) == len(fake_hosts) @pytest.mark.tier2 -def test_positive_add_host(module_org, fake_hosts): +def test_positive_add_host(module_org, fake_hosts, module_target_sat): """Add a host to host collection. :id: da8bc901-7ac8-4029-bb62-af21aa4d3a88 @@ -202,7 +209,7 @@ def test_positive_add_host(module_org, fake_hosts): :BZ:1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.host_ids = [fake_hosts[0].id] host_collection = host_collection.update(['host_ids']) assert len(host_collection.host) == 1 @@ -210,7 +217,7 @@ def test_positive_add_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_add_hosts(module_org, fake_hosts): +def test_positive_add_hosts(module_org, fake_hosts, module_target_sat): """Add hosts to host collection. :id: f76b4db1-ccd5-47ab-be15-8c7d91d03b22 @@ -221,7 +228,7 @@ def test_positive_add_hosts(module_org, fake_hosts): :BZ: 1325989 """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_ids = [str(host.id) for host in fake_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -229,7 +236,7 @@ def test_positive_add_hosts(module_org, fake_hosts): @pytest.mark.tier1 -def test_positive_read_host_ids(module_org, fake_hosts): +def test_positive_read_host_ids(module_org, fake_hosts, module_target_sat): """Read a host collection and look at the ``host_ids`` field. :id: 444a1528-64c8-41b6-ba2b-6c49799d5980 @@ -241,7 +248,9 @@ def test_positive_read_host_ids(module_org, fake_hosts): :BZ:1325989 """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() assert frozenset(host.id for host in host_collection.host) == frozenset( host.id for host in fake_hosts ) @@ -249,7 +258,7 @@ def test_positive_read_host_ids(module_org, fake_hosts): @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, new_name): +def test_positive_update_name(module_org, new_name, module_target_sat): """Check if host collection name can be updated :id: b2dedb99-6dd7-41be-8aaa-74065c820ac6 @@ -260,14 +269,14 @@ def test_positive_update_name(module_org, new_name): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.name = new_name assert host_collection.update().name == new_name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_description(module_org, new_desc): +def test_positive_update_description(module_org, new_desc, module_target_sat): """Check if host collection description can be updated :id: f8e9bd1c-1525-4b5f-a07c-eb6b6e7aa628 @@ -278,13 +287,13 @@ def test_positive_update_description(module_org, new_desc): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.description = new_desc assert host_collection.update().description == new_desc @pytest.mark.tier1 -def test_positive_update_limit(module_org): +def test_positive_update_limit(module_org, module_target_sat): """Check if host collection limit can be updated :id: 4eda7796-cd81-453b-9b72-4ef84b2c1d8c @@ -293,7 +302,7 @@ def test_positive_update_limit(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1, organization=module_org, unlimited_hosts=False ).create() for limit in (1, 3, 5, 10, 20): @@ -302,7 +311,7 @@ def test_positive_update_limit(module_org): @pytest.mark.tier1 -def test_positive_update_unlimited_hosts(module_org): +def test_positive_update_unlimited_hosts(module_org, module_target_sat): """Check if host collection 'unlimited hosts' parameter can be updated :id: 09a3973d-9832-4255-87bf-f9eaeab4aee8 @@ -313,7 +322,7 @@ def test_positive_update_unlimited_hosts(module_org): :CaseImportance: Critical """ random_unlimited = choice([True, False]) - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( max_hosts=1 if not random_unlimited else None, organization=module_org, unlimited_hosts=random_unlimited, @@ -326,7 +335,7 @@ def test_positive_update_unlimited_hosts(module_org): @pytest.mark.tier1 -def test_positive_update_host(module_org, fake_hosts): +def test_positive_update_host(module_org, fake_hosts, module_target_sat): """Update host collection's host. :id: 23082854-abcf-4085-be9c-a5d155446acb @@ -335,7 +344,7 @@ def test_positive_update_host(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection( + host_collection = module_target_sat.api.HostCollection( host=[fake_hosts[0]], organization=module_org ).create() host_collection.host_ids = [fake_hosts[1].id] @@ -345,7 +354,7 @@ def test_positive_update_host(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_update_hosts(module_org, fake_hosts): +def test_positive_update_hosts(module_org, fake_hosts, module_target_sat): """Update host collection's hosts. :id: 0433b37d-ae16-456f-a51d-c7b800334861 @@ -354,8 +363,10 @@ def test_positive_update_hosts(module_org, fake_hosts): :CaseImportance: Critical """ - host_collection = entities.HostCollection(host=fake_hosts, organization=module_org).create() - new_hosts = [entities.Host(organization=module_org).create() for _ in range(2)] + host_collection = module_target_sat.api.HostCollection( + host=fake_hosts, organization=module_org + ).create() + new_hosts = [module_target_sat.api.Host(organization=module_org).create() for _ in range(2)] host_ids = [str(host.id) for host in new_hosts] host_collection.host_ids = host_ids host_collection = host_collection.update(['host_ids']) @@ -364,7 +375,7 @@ def test_positive_update_hosts(module_org, fake_hosts): @pytest.mark.upgrade @pytest.mark.tier1 -def test_positive_delete(module_org): +def test_positive_delete(module_org, module_target_sat): """Check if host collection can be deleted :id: 13a16cd2-16ce-4966-8c03-5d821edf963b @@ -373,7 +384,7 @@ def test_positive_delete(module_org): :CaseImportance: Critical """ - host_collection = entities.HostCollection(organization=module_org).create() + host_collection = module_target_sat.api.HostCollection(organization=module_org).create() host_collection.delete() with pytest.raises(HTTPError): host_collection.read() @@ -381,7 +392,7 @@ def test_positive_delete(module_org): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Try to create host collections with different invalid names :id: 38f67d04-a19d-4eab-a577-21b8d62c7389 @@ -393,7 +404,7 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostCollection(name=name, organization=module_org).create() + module_target_sat.api.HostCollection(name=name, organization=module_org).create() @pytest.mark.tier1 @@ -418,14 +429,14 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s """ # this command creates a host collection and "appends", makes available, to the AK module_ak_cv_lce.host_collection.append( - entities.HostCollection(organization=module_org).create() + target_sat.api.HostCollection(organization=module_org).create() ) # Move HC from Add tab to List tab on AK view module_ak_cv_lce = module_ak_cv_lce.update(['host_collection']) # Create a product so we have a subscription to use - product = entities.Product(organization=module_org).create() + product = target_sat.api.Product(organization=module_org).create() prod_name = product.name - product_subscription = entities.Subscription(organization=module_org).search( + product_subscription = target_sat.api.Subscription(organization=module_org).search( query={'search': f'name={prod_name}'} )[0] # Create and register VMs as members of Host Collection @@ -438,7 +449,7 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s host_ids = [host.id for host in host_collection.host] # Add subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_add_subscriptions( + target_sat.api.Host().bulk_add_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -447,13 +458,13 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are there for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert ( prod_name in req['results'][0]['product_name'] ), 'Subscription not applied to HC members' # Remove the subscription # Call nailgun to make the API PUT to members of Host Collection - entities.Host().bulk_remove_subscriptions( + target_sat.api.Host().bulk_remove_subscriptions( data={ "organization_id": module_org.id, "included": {"ids": host_ids}, @@ -462,5 +473,5 @@ def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, target_s ) # GET the subscriptions from hosts and assert they are gone for host_id in host_ids: - req = entities.HostSubscription(host=host_id).subscriptions() + req = target_sat.api.HostSubscription(host=host_id).subscriptions() assert not req['results'], 'Subscription not removed from HC members' diff --git a/tests/foreman/api/test_hostgroup.py b/tests/foreman/api/test_hostgroup.py index 9e3c3aa0c21..784b404e5dd 100644 --- a/tests/foreman/api/test_hostgroup.py +++ b/tests/foreman/api/test_hostgroup.py @@ -19,7 +19,7 @@ from random import randint from fauxfactory import gen_string -from nailgun import client, entities, entity_fields +from nailgun import client, entity_fields import pytest from requests.exceptions import HTTPError @@ -32,8 +32,10 @@ @pytest.fixture -def hostgroup(module_org, module_location): - return entities.HostGroup(location=[module_location], organization=[module_org]).create() +def hostgroup(module_org, module_location, module_target_sat): + return module_target_sat.api.HostGroup( + location=[module_location], organization=[module_org] + ).create() @pytest.fixture @@ -158,7 +160,7 @@ def test_inherit_puppetclass(self, session_puppet_enabled_sat): @pytest.mark.upgrade @pytest.mark.tier3 - def test_rebuild_config(self, module_org, module_location, hostgroup): + def test_rebuild_config(self, module_org, module_location, hostgroup, module_target_sat): """'Rebuild orchestration config' of an existing host group :id: 58bf7015-18fc-4d25-9b64-7f2dd6dde425 @@ -169,12 +171,12 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): :CaseLevel: System """ - lce = entities.LifecycleEnvironment(organization=module_org).create() - content_view = entities.ContentView(organization=module_org).create() + lce = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() + content_view = module_target_sat.api.ContentView(organization=module_org).create() content_view.publish() content_view = content_view.read() content_view.version[0].promote(data={'environment_ids': lce.id, 'force': False}) - entities.Host( + module_target_sat.api.Host( hostgroup=hostgroup, location=module_location, organization=module_org, @@ -193,7 +195,7 @@ def test_rebuild_config(self, module_org, module_location, hostgroup): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_hostgroups_list())) - def test_positive_create_with_name(self, name, module_org, module_location): + def test_positive_create_with_name(self, name, module_org, module_location, module_target_sat): """Create a hostgroup with different names :id: fd5d353c-fd0c-4752-8a83-8f399b4c3416 @@ -204,13 +206,13 @@ def test_positive_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() assert name == hostgroup.name @pytest.mark.tier1 - def test_positive_clone(self, hostgroup): + def test_positive_clone(self, hostgroup, target_sat): """Create a hostgroup by cloning an existing one :id: 44ac8b3b-9cb0-4a9e-ad9b-2c67b2411922 @@ -220,7 +222,7 @@ def test_positive_clone(self, hostgroup): :CaseImportance: Critical """ hostgroup_cloned_name = gen_string('alpha') - hostgroup_cloned = entities.HostGroup(id=hostgroup.id).clone( + hostgroup_cloned = target_sat.api.HostGroup(id=hostgroup.id).clone( data={'name': hostgroup_cloned_name} ) hostgroup_origin = hostgroup.read_json() @@ -402,20 +404,20 @@ def test_positive_create_with_realm(self, module_org, module_location, target_sa :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() assert hostgroup.realm.read().name == realm.name @pytest.mark.tier2 - def test_positive_create_with_locs(self, module_org): + def test_positive_create_with_locs(self, module_org, module_target_sat): """Create a hostgroup with multiple locations specified :id: 0c2ee2ff-9e7a-4931-8cea-f4eecbd8c4c0 @@ -427,12 +429,17 @@ def test_positive_create_with_locs(self, module_org): :CaseLevel: Integration """ - locs = [entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(location=locs, organization=[module_org]).create() + locs = [ + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) + ] + hostgroup = module_target_sat.api.HostGroup( + location=locs, organization=[module_org] + ).create() assert {loc.name for loc in locs} == {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_create_with_orgs(self): + def test_positive_create_with_orgs(self, target_sat): """Create a hostgroup with multiple organizations specified :id: 09642238-cf0d-469a-a0b5-c167b1b8edf5 @@ -444,8 +451,8 @@ def test_positive_create_with_orgs(self): :CaseLevel: Integration """ - orgs = [entities.Organization().create() for _ in range(randint(3, 5))] - hostgroup = entities.HostGroup(organization=orgs).create() + orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] + hostgroup = target_sat.api.HostGroup(organization=orgs).create() assert {org.name for org in orgs}, {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @@ -497,20 +504,20 @@ def test_positive_update_realm(self, module_org, module_location, target_sat): :CaseLevel: Integration """ - realm = entities.Realm( + realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[module_location], organization=[module_org], realm=realm ).create() - new_realm = entities.Realm( + new_realm = target_sat.api.Realm( location=[module_location], organization=[module_org], - realm_proxy=entities.SmartProxy().search( + realm_proxy=target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0], ).create() @@ -549,7 +556,7 @@ def test_positive_update_content_source(self, hostgroup, target_sat): :CaseLevel: Integration """ - new_content_source = entities.SmartProxy().search( + new_content_source = target_sat.api.SmartProxy().search( query={'search': f'url = {target_sat.url}:9090'} )[0] hostgroup.content_source = new_content_source @@ -557,7 +564,7 @@ def test_positive_update_content_source(self, hostgroup, target_sat): assert hostgroup.content_source.read().name == new_content_source.name @pytest.mark.tier2 - def test_positive_update_locs(self, module_org, hostgroup): + def test_positive_update_locs(self, module_org, hostgroup, module_target_sat): """Update a hostgroup with new multiple locations :id: b045f7e8-d7c0-428b-a29c-8d54e53742e2 @@ -569,14 +576,15 @@ def test_positive_update_locs(self, module_org, hostgroup): :CaseLevel: Integration """ new_locs = [ - entities.Location(organization=[module_org]).create() for _ in range(randint(3, 5)) + module_target_sat.api.Location(organization=[module_org]).create() + for _ in range(randint(3, 5)) ] hostgroup.location = new_locs hostgroup = hostgroup.update(['location']) assert {loc.name for loc in new_locs}, {loc.read().name for loc in hostgroup.location} @pytest.mark.tier2 - def test_positive_update_orgs(self, hostgroup): + def test_positive_update_orgs(self, hostgroup, target_sat): """Update a hostgroup with new multiple organizations :id: 5f6bd4f9-4bd6-4d7e-9a91-de824299020e @@ -587,14 +595,14 @@ def test_positive_update_orgs(self, hostgroup): :CaseLevel: Integration """ - new_orgs = [entities.Organization().create() for _ in range(randint(3, 5))] + new_orgs = [target_sat.api.Organization().create() for _ in range(randint(3, 5))] hostgroup.organization = new_orgs hostgroup = hostgroup.update(['organization']) assert {org.name for org in new_orgs} == {org.read().name for org in hostgroup.organization} @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name, module_org, module_location): + def test_negative_create_with_name(self, name, module_org, module_location, module_target_sat): """Attempt to create a hostgroup with invalid names :id: 3f5aa17a-8db9-4fe9-b309-b8ec5e739da1 @@ -606,7 +614,7 @@ def test_negative_create_with_name(self, name, module_org, module_location): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.HostGroup( + module_target_sat.api.HostGroup( location=[module_location], name=name, organization=[module_org] ).create() @@ -630,7 +638,7 @@ def test_negative_update_name(self, new_name, hostgroup): assert hostgroup.read().name == original_name @pytest.mark.tier2 - def test_positive_create_with_group_parameters(self, module_org): + def test_positive_create_with_group_parameters(self, module_org, module_target_sat): """Create a hostgroup with 'group parameters' specified :id: 0959e2a2-d635-482b-9b2e-d33990d6f0dc @@ -646,7 +654,7 @@ def test_positive_create_with_group_parameters(self, module_org): :BZ: 1710853 """ group_params = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - hostgroup = entities.HostGroup( + hostgroup = module_target_sat.api.HostGroup( organization=[module_org], group_parameters_attributes=[group_params] ).create() assert group_params['name'] == hostgroup.group_parameters_attributes[0]['name'] diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index f9c3023c77e..ed713078b4f 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from robottelo import constants @@ -206,7 +205,7 @@ def test_positive_auto_attach_with_http_proxy( @pytest.mark.e2e @pytest.mark.tier2 -def test_positive_assign_http_proxy_to_products(): +def test_positive_assign_http_proxy_to_products(target_sat): """Assign http_proxy to Products and check whether http-proxy is used during sync. @@ -219,15 +218,15 @@ def test_positive_assign_http_proxy_to_products(): :CaseImportance: Critical """ - org = entities.Organization().create() + org = target_sat.api.Organization().create() # create HTTP proxies - http_proxy_a = entities.HTTPProxy( + http_proxy_a = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.un_auth_proxy_url, organization=[org], ).create() - http_proxy_b = entities.HTTPProxy( + http_proxy_b = target_sat.api.HTTPProxy( name=gen_string('alpha', 15), url=settings.http_proxy.auth_proxy_url, username=settings.http_proxy.username, @@ -236,20 +235,20 @@ def test_positive_assign_http_proxy_to_products(): ).create() # Create products and repositories - product_a = entities.Product(organization=org).create() - product_b = entities.Product(organization=org).create() - repo_a1 = entities.Repository(product=product_a, http_proxy_policy='none').create() - repo_a2 = entities.Repository( + product_a = target_sat.api.Product(organization=org).create() + product_b = target_sat.api.Product(organization=org).create() + repo_a1 = target_sat.api.Repository(product=product_a, http_proxy_policy='none').create() + repo_a2 = target_sat.api.Repository( product=product_a, http_proxy_policy='use_selected_http_proxy', http_proxy_id=http_proxy_a.id, ).create() - repo_b1 = entities.Repository(product=product_b, http_proxy_policy='none').create() - repo_b2 = entities.Repository( + repo_b1 = target_sat.api.Repository(product=product_b, http_proxy_policy='none').create() + repo_b2 = target_sat.api.Repository( product=product_b, http_proxy_policy='global_default_http_proxy' ).create() # Add http_proxy to products - entities.ProductBulkAction().http_proxy( + target_sat.api.ProductBulkAction().http_proxy( data={ "ids": [product_a.id, product_b.id], "http_proxy_policy": "use_selected_http_proxy", diff --git a/tests/foreman/api/test_ldapauthsource.py b/tests/foreman/api/test_ldapauthsource.py index 10b9851b3d6..2139e7c947f 100644 --- a/tests/foreman/api/test_ldapauthsource.py +++ b/tests/foreman/api/test_ldapauthsource.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -27,7 +26,9 @@ @pytest.mark.tier3 @pytest.mark.upgrade @pytest.mark.parametrize('auth_source_type', ['AD', 'IPA']) -def test_positive_endtoend(auth_source_type, module_org, module_location, ad_data, ipa_data): +def test_positive_endtoend( + auth_source_type, module_org, module_location, ad_data, ipa_data, module_target_sat +): """Create/update/delete LDAP authentication with AD using names of different types :id: e3607c97-7c48-4cf6-b119-2bfd895d9325 @@ -46,7 +47,7 @@ def test_positive_endtoend(auth_source_type, module_org, module_location, ad_dat auth_source_data = ipa_data auth_source_data['ldap_user_name'] = auth_source_data['ldap_user_cn'] auth_type_attr = LDAP_ATTR['login'] - authsource = entities.AuthSourceLDAP( + authsource = module_target_sat.api.AuthSourceLDAP( onthefly_register=True, account=auth_source_data['ldap_user_cn'], account_password=auth_source_data['ldap_user_passwd'], diff --git a/tests/foreman/api/test_lifecycleenvironment.py b/tests/foreman/api/test_lifecycleenvironment.py index b8e7b3ef347..79cece7c76f 100644 --- a/tests/foreman/api/test_lifecycleenvironment.py +++ b/tests/foreman/api/test_lifecycleenvironment.py @@ -21,7 +21,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -34,20 +33,20 @@ @pytest.fixture(scope='module') -def module_lce(module_org): - return entities.LifecycleEnvironment( +def module_lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment( organization=module_org, description=gen_string('alpha') ).create() @pytest.fixture -def lce(module_org): - return entities.LifecycleEnvironment(organization=module_org).create() +def lce(module_org, module_target_sat): + return module_target_sat.api.LifecycleEnvironment(organization=module_org).create() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(name): +def test_positive_create_with_name(name, target_sat): """Create lifecycle environment with valid name only :id: ec1d985a-6a39-4de6-b635-c803ecedd832 @@ -58,12 +57,12 @@ def test_positive_create_with_name(name): :parametrized: yes """ - assert entities.LifecycleEnvironment(name=name).create().name == name + assert target_sat.api.LifecycleEnvironment(name=name).create().name == name @pytest.mark.parametrize('desc', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(desc): +def test_positive_create_with_description(desc, target_sat): """Create lifecycle environment with valid description :id: 0bc05510-afc7-4087-ab75-1065ab5ba1d3 @@ -75,11 +74,11 @@ def test_positive_create_with_description(desc): :parametrized: yes """ - assert entities.LifecycleEnvironment(description=desc).create().description == desc + assert target_sat.api.LifecycleEnvironment(description=desc).create().description == desc @pytest.mark.tier1 -def test_positive_create_prior(module_org): +def test_positive_create_prior(module_org, module_target_sat): """Create a lifecycle environment with valid name with Library as prior @@ -90,13 +89,13 @@ def test_positive_create_prior(module_org): :CaseImportance: Critical """ - lc_env = entities.LifecycleEnvironment(organization=module_org).create() + lc_env = module_target_sat.api.LifecycleEnvironment(organization=module_org).create() assert lc_env.prior.read().name == ENVIRONMENT @pytest.mark.parametrize('name', **parametrized(invalid_names_list())) @pytest.mark.tier3 -def test_negative_create_with_invalid_name(name): +def test_negative_create_with_invalid_name(name, target_sat): """Create lifecycle environment providing an invalid name :id: 7e8ea2e6-5927-4e86-8ea8-04c3feb524a6 @@ -108,12 +107,12 @@ def test_negative_create_with_invalid_name(name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.LifecycleEnvironment(name=name).create() + target_sat.api.LifecycleEnvironment(name=name).create() @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_lce, new_name): +def test_positive_update_name(module_lce, new_name, module_target_sat): """Create lifecycle environment providing the initial name, then update its name to another valid name. @@ -125,13 +124,13 @@ def test_positive_update_name(module_lce, new_name): """ module_lce.name = new_name module_lce.update(['name']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_name == updated.name @pytest.mark.parametrize('new_desc', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_lce, new_desc): +def test_positive_update_description(module_lce, new_desc, module_target_sat): """Create lifecycle environment providing the initial description, then update its description to another one. @@ -147,7 +146,7 @@ def test_positive_update_description(module_lce, new_desc): """ module_lce.description = new_desc module_lce.update(['description']) - updated = entities.LifecycleEnvironment(id=module_lce.id).read() + updated = module_target_sat.api.LifecycleEnvironment(id=module_lce.id).read() assert new_desc == updated.description @@ -175,7 +174,7 @@ def test_negative_update_name(module_lce, new_name): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete(lce, name): +def test_positive_delete(lce, name, target_sat): """Create lifecycle environment and then delete it. :id: cd5a97ca-c1e8-41c7-8d6b-f908916b24e1 @@ -188,12 +187,12 @@ def test_positive_delete(lce, name): """ lce.delete() with pytest.raises(HTTPError): - entities.LifecycleEnvironment(id=lce.id).read() + target_sat.api.LifecycleEnvironment(id=lce.id).read() @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_search_in_org(name): +def test_positive_search_in_org(name, target_sat): """Search for a lifecycle environment and specify an org ID. :id: 110e4777-c374-4365-b676-b1db4552fe51 @@ -211,8 +210,8 @@ def test_positive_search_in_org(name): :parametrized: yes """ - new_org = entities.Organization().create() - lc_env = entities.LifecycleEnvironment(organization=new_org).create() + new_org = target_sat.api.Organization().create() + lc_env = target_sat.api.LifecycleEnvironment(organization=new_org).create() lc_envs = lc_env.search({'organization'}) assert len(lc_envs) == 2 assert {lc_env_.name for lc_env_ in lc_envs}, {'Library', lc_env.name} diff --git a/tests/foreman/api/test_media.py b/tests/foreman/api/test_media.py index e5524d914fd..97c6da607f4 100644 --- a/tests/foreman/api/test_media.py +++ b/tests/foreman/api/test_media.py @@ -19,7 +19,6 @@ import random from fauxfactory import gen_string, gen_url -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -35,8 +34,8 @@ class TestMedia: """Tests for ``api/v2/media``.""" @pytest.fixture(scope='class') - def class_media(self, module_org): - return entities.Media(organization=[module_org]).create() + def class_media(self, module_org, class_target_sat): + return class_target_sat.api.Media(organization=[module_org]).create() @pytest.mark.tier1 @pytest.mark.upgrade @@ -44,7 +43,7 @@ def class_media(self, module_org): 'name, new_name', **parametrized(list(zip(valid_data_list().values(), valid_data_list().values()))) ) - def test_positive_crud_with_name(self, module_org, name, new_name): + def test_positive_crud_with_name(self, module_org, name, new_name, module_target_sat): """Create, update, delete media with valid name only :id: b07a4549-7dd5-4b36-a1b4-9f8d48ddfcb5 @@ -55,9 +54,9 @@ def test_positive_crud_with_name(self, module_org, name, new_name): :CaseImportance: Critical """ - media = entities.Media(organization=[module_org], name=name).create() + media = module_target_sat.api.Media(organization=[module_org], name=name).create() assert media.name == name - media = entities.Media(id=media.id, name=new_name).update(['name']) + media = module_target_sat.api.Media(id=media.id, name=new_name).update(['name']) assert media.name == new_name media.delete() with pytest.raises(HTTPError): @@ -65,7 +64,7 @@ def test_positive_crud_with_name(self, module_org, name, new_name): @pytest.mark.tier1 @pytest.mark.parametrize('os_family', **parametrized(OPERATING_SYSTEMS)) - def test_positive_create_update_with_os_family(self, module_org, os_family): + def test_positive_create_update_with_os_family(self, module_org, os_family, module_target_sat): """Create and update media with every OS family possible :id: d02404f0-b2ad-412c-b1cd-0548254f7c88 @@ -75,14 +74,14 @@ def test_positive_create_update_with_os_family(self, module_org, os_family): :expectedresults: Media entity is created and has proper OS family assigned """ - media = entities.Media(organization=[module_org], os_family=os_family).create() + media = module_target_sat.api.Media(organization=[module_org], os_family=os_family).create() assert media.os_family == os_family new_os_family = new_os_family = random.choice(OPERATING_SYSTEMS) media.os_family = new_os_family assert media.update(['os_family']).os_family == new_os_family @pytest.mark.tier2 - def test_positive_create_with_location(self, module_org, module_location): + def test_positive_create_with_location(self, module_org, module_location, module_target_sat): """Create media entity assigned to non-default location :id: 1c4fa736-c145-46ca-9feb-c4046fc778c6 @@ -91,11 +90,13 @@ def test_positive_create_with_location(self, module_org, module_location): :CaseLevel: Integration """ - media = entities.Media(organization=[module_org], location=[module_location]).create() + media = module_target_sat.api.Media( + organization=[module_org], location=[module_location] + ).create() assert media.location[0].read().name == module_location.name @pytest.mark.tier2 - def test_positive_create_with_os(self, module_org): + def test_positive_create_with_os(self, module_org, module_target_sat): """Create media entity assigned to operation system entity :id: dec22198-ed07-480c-9306-fa5458baec0b @@ -104,12 +105,14 @@ def test_positive_create_with_os(self, module_org): :CaseLevel: Integration """ - os = entities.OperatingSystem().create() - media = entities.Media(organization=[module_org], operatingsystem=[os]).create() + os = module_target_sat.api.OperatingSystem().create() + media = module_target_sat.api.Media( + organization=[module_org], operatingsystem=[os] + ).create() assert os.read().medium[0].read().name == media.name @pytest.mark.tier2 - def test_positive_create_update_url(self, module_org): + def test_positive_create_update_url(self, module_org, module_target_sat): """Create media entity providing the initial url path, then update that url to another valid one. @@ -120,15 +123,15 @@ def test_positive_create_update_url(self, module_org): :CaseImportance: Medium """ url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(organization=[module_org], path_=url).create() + media = module_target_sat.api.Media(organization=[module_org], path_=url).create() assert media.path_ == url new_url = gen_url(subdomain=gen_string('alpha')) - media = entities.Media(id=media.id, path_=new_url).update(['path_']) + media = module_target_sat.api.Media(id=media.id, path_=new_url).update(['path_']) assert media.path_ == new_url @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Try to create media entity providing an invalid name :id: 0934f4dc-f674-40fe-a639-035761139c83 @@ -140,10 +143,10 @@ def test_negative_create_with_invalid_name(self, name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(name=name).create() + target_sat.api.Media(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_invalid_url(self): + def test_negative_create_with_invalid_url(self, target_sat): """Try to create media entity providing an invalid URL :id: ae00b6bb-37ed-459e-b9f7-acc92ed0b262 @@ -153,10 +156,10 @@ def test_negative_create_with_invalid_url(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(path_='NON_EXISTENT_URL').create() + target_sat.api.Media(path_='NON_EXISTENT_URL').create() @pytest.mark.tier1 - def test_negative_create_with_invalid_os_family(self): + def test_negative_create_with_invalid_os_family(self, target_sat): """Try to create media entity providing an invalid OS family :id: 368b7eac-8c52-4071-89c0-1946d7101291 @@ -166,11 +169,11 @@ def test_negative_create_with_invalid_os_family(self): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(os_family='NON_EXISTENT_OS').create() + target_sat.api.Media(os_family='NON_EXISTENT_OS').create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list())) - def test_negative_update_name(self, module_org, class_media, new_name): + def test_negative_update_name(self, class_media, new_name, target_sat): """Create media entity providing the initial name, then try to update its name to invalid one. @@ -183,10 +186,10 @@ def test_negative_update_name(self, module_org, class_media, new_name): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, name=new_name).update(['name']) + target_sat.api.Media(id=class_media.id, name=new_name).update(['name']) @pytest.mark.tier1 - def test_negative_update_url(self, module_org, class_media): + def test_negative_update_url(self, class_media, target_sat): """Try to update media with invalid url. :id: 6832f178-4adc-4bb1-957d-0d8d4fd8d9cd @@ -196,10 +199,10 @@ def test_negative_update_url(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) + target_sat.api.Media(id=class_media.id, path_='NON_EXISTENT_URL').update(['path_']) @pytest.mark.tier1 - def test_negative_update_os_family(self, module_org, class_media): + def test_negative_update_os_family(self, class_media, target_sat): """Try to update media with invalid operation system. :id: f4c5438d-5f98-40b1-9bc7-c0741e81303a @@ -209,4 +212,6 @@ def test_negative_update_os_family(self, module_org, class_media): :CaseImportance: Medium """ with pytest.raises(HTTPError): - entities.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update(['os_family']) + target_sat.api.Media(id=class_media.id, os_family='NON_EXISTENT_OS').update( + ['os_family'] + ) diff --git a/tests/foreman/api/test_multiple_paths.py b/tests/foreman/api/test_multiple_paths.py index 66b4bc7ea9a..4cacae5051e 100644 --- a/tests/foreman/api/test_multiple_paths.py +++ b/tests/foreman/api/test_multiple_paths.py @@ -411,7 +411,7 @@ def test_positive_entity_read(self, entity_cls): assert isinstance(entity_cls(id=entity_id).read(), entity_cls) @pytest.mark.tier1 - def test_positive_architecture_read(self): + def test_positive_architecture_read(self, target_sat): """Create an arch that points to an OS, and read the arch. :id: e4c7babe-11d8-4f85-8382-5267a49046e9 @@ -421,14 +421,14 @@ def test_positive_architecture_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - arch_id = entities.Architecture(operatingsystem=[os_id]).create_json()['id'] - architecture = entities.Architecture(id=arch_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + arch_id = target_sat.api.Architecture(operatingsystem=[os_id]).create_json()['id'] + architecture = target_sat.api.Architecture(id=arch_id).read() assert len(architecture.operatingsystem) == 1 assert architecture.operatingsystem[0].id == os_id @pytest.mark.tier1 - def test_positive_syncplan_read(self): + def test_positive_syncplan_read(self, target_sat): """Create a SyncPlan and read it back using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -439,14 +439,14 @@ def test_positive_syncplan_read(self): :CaseImportance: Critical """ - org_id = entities.Organization().create_json()['id'] - syncplan_id = entities.SyncPlan(organization=org_id).create_json()['id'] + org_id = target_sat.api.Organization().create_json()['id'] + syncplan_id = target_sat.api.SyncPlan(organization=org_id).create_json()['id'] assert isinstance( - entities.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan + target_sat.api.SyncPlan(organization=org_id, id=syncplan_id).read(), entities.SyncPlan ) @pytest.mark.tier1 - def test_positive_osparameter_read(self): + def test_positive_osparameter_read(self, target_sat): """Create an OperatingSystemParameter and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -457,15 +457,15 @@ def test_positive_osparameter_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - osp_id = entities.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] + os_id = target_sat.api.OperatingSystem().create_json()['id'] + osp_id = target_sat.api.OperatingSystemParameter(operatingsystem=os_id).create_json()['id'] assert isinstance( - entities.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), - entities.OperatingSystemParameter, + target_sat.api.OperatingSystemParameter(id=osp_id, operatingsystem=os_id).read(), + target_sat.api.OperatingSystemParameter, ) @pytest.mark.tier1 - def test_positive_permission_read(self): + def test_positive_permission_read(self, target_sat): """Create an Permission entity and get it using ``nailgun.entity_mixins.EntityReadMixin.read``. @@ -476,12 +476,12 @@ class and name and resource_type fields are populated :CaseImportance: Critical """ - perm = entities.Permission().search(query={'per_page': '1'})[0] + perm = target_sat.api.Permission().search(query={'per_page': '1'})[0] assert perm.name assert perm.resource_type @pytest.mark.tier1 - def test_positive_media_read(self): + def test_positive_media_read(self, target_sat): """Create a media pointing at an OS and read the media. :id: 67b656fe-9302-457a-b544-3addb11c85e0 @@ -490,8 +490,8 @@ def test_positive_media_read(self): :CaseImportance: Critical """ - os_id = entities.OperatingSystem().create_json()['id'] - media_id = entities.Media(operatingsystem=[os_id]).create_json()['id'] - media = entities.Media(id=media_id).read() + os_id = target_sat.api.OperatingSystem().create_json()['id'] + media_id = target_sat.api.Media(operatingsystem=[os_id]).create_json()['id'] + media = target_sat.api.Media(id=media_id).read() assert len(media.operatingsystem) == 1 assert media.operatingsystem[0].id == os_id diff --git a/tests/foreman/api/test_organization.py b/tests/foreman/api/test_organization.py index 692504faf79..868ec7368b1 100644 --- a/tests/foreman/api/test_organization.py +++ b/tests/foreman/api/test_organization.py @@ -24,7 +24,7 @@ from random import randint from fauxfactory import gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -44,7 +44,7 @@ def valid_org_data_list(): Note: The maximum allowed length of org name is 242 only. This is an intended behavior (Also note that 255 is the standard across other - entities.) + entities) """ return dict( alpha=gen_string('alpha', randint(1, 242)), @@ -61,7 +61,7 @@ class TestOrganization: """Tests for the ``organizations`` path.""" @pytest.mark.tier1 - def test_positive_create(self): + def test_positive_create(self, target_sat): """Create an organization using a 'text/plain' content-type. :id: 6f67a3f0-0c1d-498c-9a35-28207b0faec2 @@ -70,7 +70,7 @@ def test_positive_create(self): :CaseImportance: Critical """ - organization = entities.Organization() + organization = target_sat.api.Organization() organization.create_missing() response = client.post( organization.path(), @@ -87,7 +87,7 @@ def test_positive_create(self): @pytest.mark.tier1 @pytest.mark.build_sanity @pytest.mark.parametrize('name', **parametrized(valid_org_data_list())) - def test_positive_create_with_name_and_description(self, name): + def test_positive_create_with_name_and_description(self, name, target_sat): """Create an organization and provide a name and description. :id: afeea84b-61ca-40bf-bb16-476432919115 @@ -99,7 +99,7 @@ def test_positive_create_with_name_and_description(self, name): :parametrized: yes """ - org = entities.Organization(name=name, description=name).create() + org = target_sat.api.Organization(name=name, description=name).create() assert org.name == name assert org.description == name @@ -110,7 +110,7 @@ def test_positive_create_with_name_and_description(self, name): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_invalid_name(self, name): + def test_negative_create_with_invalid_name(self, name, target_sat): """Create an org with an incorrect name. :id: 9c6a4b45-a98a-4d76-9865-92d992fa1a22 @@ -120,10 +120,10 @@ def test_negative_create_with_invalid_name(self, name): :parametrized: yes """ with pytest.raises(HTTPError): - entities.Organization(name=name).create() + target_sat.api.Organization(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_same_name(self): + def test_negative_create_with_same_name(self, target_sat): """Create two organizations with identical names. :id: a0f5333c-cc83-403c-9bf7-08fb372909dc @@ -132,9 +132,9 @@ def test_negative_create_with_same_name(self): :CaseImportance: Critical """ - name = entities.Organization().create().name + name = target_sat.api.Organization().create().name with pytest.raises(HTTPError): - entities.Organization(name=name).create() + target_sat.api.Organization(name=name).create() @pytest.mark.tier1 def test_negative_check_org_endpoint(self, module_entitlement_manifest_org): @@ -155,7 +155,7 @@ def test_negative_check_org_endpoint(self, module_entitlement_manifest_org): assert 'BEGIN RSA PRIVATE KEY' not in orgstring @pytest.mark.tier1 - def test_positive_search(self): + def test_positive_search(self, target_sat): """Create an organization, then search for it by name. :id: f6f1d839-21f2-4676-8683-9f899cbdec4c @@ -164,14 +164,14 @@ def test_positive_search(self): :CaseImportance: High """ - org = entities.Organization().create() - orgs = entities.Organization().search(query={'search': f'name="{org.name}"'}) + org = target_sat.api.Organization().create() + orgs = target_sat.api.Organization().search(query={'search': f'name="{org.name}"'}) assert len(orgs) == 1 assert orgs[0].id == org.id assert orgs[0].name == org.name @pytest.mark.tier1 - def test_negative_create_with_wrong_path(self): + def test_negative_create_with_wrong_path(self, target_sat): """Attempt to create an organization using foreman API path (``api/v2/organizations``) @@ -184,7 +184,7 @@ def test_negative_create_with_wrong_path(self): :CaseImportance: Critical """ - org = entities.Organization() + org = target_sat.api.Organization() org._meta['api_path'] = 'api/v2/organizations' with pytest.raises(HTTPError) as err: org.create() @@ -192,7 +192,7 @@ def test_negative_create_with_wrong_path(self): assert 'Route overriden by Katello' in err.value.response.text @pytest.mark.tier2 - def test_default_org_id_check(self): + def test_default_org_id_check(self, target_sat): """test to check the default_organization id :id: df066396-a069-4e9e-b3c1-c6d34a755ec0 @@ -204,7 +204,7 @@ def test_default_org_id_check(self): :CaseImportance: Low """ default_org_id = ( - entities.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[0].id + target_sat.api.Organization().search(query={'search': f'name="{DEFAULT_ORG}"'})[0].id ) assert default_org_id == 1 @@ -213,9 +213,9 @@ class TestOrganizationUpdate: """Tests for the ``organizations`` path.""" @pytest.fixture - def module_org(self): + def module_org(self, target_sat): """Create an organization.""" - return entities.Organization().create() + return target_sat.api.Organization().create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_org_data_list())) @@ -252,7 +252,7 @@ def test_positive_update_description(self, module_org, desc): assert module_org.description == desc @pytest.mark.tier2 - def test_positive_update_user(self, module_org): + def test_positive_update_user(self, module_org, target_sat): """Update an organization, associate user with it. :id: 2c0c0061-5b4e-4007-9f54-b61d6e65ef58 @@ -261,14 +261,14 @@ def test_positive_update_user(self, module_org): :CaseLevel: Integration """ - user = entities.User().create() + user = target_sat.api.User().create() module_org.user = [user] module_org = module_org.update(['user']) assert len(module_org.user) == 1 assert module_org.user[0].id == user.id @pytest.mark.tier2 - def test_positive_update_subnet(self, module_org): + def test_positive_update_subnet(self, module_org, target_sat): """Update an organization, associate subnet with it. :id: 3aa0b9cb-37f7-4e7e-a6ec-c1b407225e54 @@ -277,14 +277,14 @@ def test_positive_update_subnet(self, module_org): :CaseLevel: Integration """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() module_org.subnet = [subnet] module_org = module_org.update(['subnet']) assert len(module_org.subnet) == 1 assert module_org.subnet[0].id == subnet.id @pytest.mark.tier2 - def test_positive_add_and_remove_hostgroup(self): + def test_positive_add_and_remove_hostgroup(self, target_sat): """Add a hostgroup to an organization and then remove it :id: 7eb1aca7-fd7b-404f-ab18-21be5052a11f @@ -297,8 +297,8 @@ def test_positive_add_and_remove_hostgroup(self): :CaseImportance: Medium """ - org = entities.Organization().create() - hostgroup = entities.HostGroup().create() + org = target_sat.api.Organization().create() + hostgroup = target_sat.api.HostGroup().create() org.hostgroup = [hostgroup] org = org.update(['hostgroup']) assert len(org.hostgroup) == 1 @@ -348,7 +348,7 @@ def test_positive_add_and_remove_smart_proxy(self, target_sat): @pytest.mark.tier1 @pytest.mark.parametrize('update_field', ['name', 'label']) - def test_negative_update(self, module_org, update_field): + def test_negative_update(self, module_org, update_field, target_sat): """Update an organization's attributes with invalid values. :id: b7152d0b-5ab0-4d68-bfdf-f3eabcb5fbc6 @@ -367,4 +367,4 @@ def test_negative_update(self, module_org, update_field): update_field: gen_string(str_type='utf8', length=256 if update_field == 'name' else 10) } with pytest.raises(HTTPError): - entities.Organization(id=module_org.id, **update_dict).update([update_field]) + target_sat.api.Organization(id=module_org.id, **update_dict).update([update_field]) diff --git a/tests/foreman/api/test_oscap_tailoringfiles.py b/tests/foreman/api/test_oscap_tailoringfiles.py index d29eed1ca95..fb6b78750f0 100644 --- a/tests/foreman/api/test_oscap_tailoringfiles.py +++ b/tests/foreman/api/test_oscap_tailoringfiles.py @@ -16,7 +16,6 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.utils.datafactory import gen_string @@ -27,7 +26,9 @@ class TestTailoringFile: @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_crud_tailoringfile(self, default_org, default_location, tailoring_file_path): + def test_positive_crud_tailoringfile( + self, default_org, default_location, tailoring_file_path, target_sat + ): """Perform end to end testing for oscap tailoring files component :id: 2441988f-2054-49f7-885e-3675336f712f @@ -39,23 +40,23 @@ def test_positive_crud_tailoringfile(self, default_org, default_location, tailor name = gen_string('alpha') new_name = gen_string('alpha') original_filename = gen_string('alpha') - scap = entities.TailoringFile( + scap = target_sat.api.TailoringFile( name=name, scap_file=tailoring_file_path['local'], organization=[default_org], location=[default_location], ).create() - assert entities.TailoringFile().search(query={'search': f'name={name}'}) - result = entities.TailoringFile(id=scap.id).read() + assert target_sat.api.TailoringFile().search(query={'search': f'name={name}'}) + result = target_sat.api.TailoringFile(id=scap.id).read() assert result.name == name assert result.location[0].id == default_location.id assert result.organization[0].id == default_org.id - scap = entities.TailoringFile( + scap = target_sat.api.TailoringFile( id=scap.id, name=new_name, original_filename=f'{original_filename}' ).update() - result = entities.TailoringFile(id=scap.id).read() + result = target_sat.api.TailoringFile(id=scap.id).read() assert result.name == new_name assert result.original_filename == original_filename - assert entities.TailoringFile().search(query={'search': f'name={new_name}'}) - entities.TailoringFile(id=scap.id).delete() - assert not entities.TailoringFile().search(query={'search': f'name={new_name}'}) + assert target_sat.api.TailoringFile().search(query={'search': f'name={new_name}'}) + target_sat.api.TailoringFile(id=scap.id).delete() + assert not target_sat.api.TailoringFile().search(query={'search': f'name={new_name}'}) diff --git a/tests/foreman/api/test_oscappolicy.py b/tests/foreman/api/test_oscappolicy.py index 1efdd3e3779..c9500c465d1 100644 --- a/tests/foreman/api/test_oscappolicy.py +++ b/tests/foreman/api/test_oscappolicy.py @@ -17,7 +17,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest @@ -27,7 +26,7 @@ class TestOscapPolicy: @pytest.mark.tier1 @pytest.mark.e2e def test_positive_crud_scap_policy( - self, default_org, default_location, scap_content, tailoring_file + self, default_org, default_location, scap_content, tailoring_file, target_sat ): """Perform end to end testing for oscap policy component @@ -42,11 +41,11 @@ def test_positive_crud_scap_policy( name = gen_string('alpha') new_name = gen_string('alpha') description = gen_string('alpha') - hostgroup = entities.HostGroup( + hostgroup = target_sat.api.HostGroup( location=[default_location], organization=[default_org] ).create() # Create new oscap policy with assigned content and tailoring file - policy = entities.CompliancePolicies( + policy = target_sat.api.CompliancePolicies( name=name, deploy_by='ansible', description=description, @@ -60,7 +59,7 @@ def test_positive_crud_scap_policy( location=[default_location], organization=[default_org], ).create() - assert entities.CompliancePolicies().search(query={'search': f'name="{name}"'}) + assert target_sat.api.CompliancePolicies().search(query={'search': f'name="{name}"'}) # Check that created entity has expected values assert policy.deploy_by == 'ansible' assert policy.name == name @@ -74,9 +73,11 @@ def test_positive_crud_scap_policy( assert str(policy.organization[0].id) == str(default_org.id) assert str(policy.location[0].id) == str(default_location.id) # Update oscap policy with new name - policy = entities.CompliancePolicies(id=policy.id, name=new_name).update() + policy = target_sat.api.CompliancePolicies(id=policy.id, name=new_name).update() assert policy.name == new_name - assert not entities.CompliancePolicies().search(query={'search': f'name="{name}"'}) + assert not target_sat.api.CompliancePolicies().search(query={'search': f'name="{name}"'}) # Delete oscap policy entity - entities.CompliancePolicies(id=policy.id).delete() - assert not entities.CompliancePolicies().search(query={'search': f'name="{new_name}"'}) + target_sat.api.CompliancePolicies(id=policy.id).delete() + assert not target_sat.api.CompliancePolicies().search( + query={'search': f'name="{new_name}"'} + ) diff --git a/tests/foreman/api/test_permission.py b/tests/foreman/api/test_permission.py index bf511c5e6c7..c232b3ab840 100644 --- a/tests/foreman/api/test_permission.py +++ b/tests/foreman/api/test_permission.py @@ -72,7 +72,7 @@ def create_permissions(self, class_target_sat): cls.permission_names = list(chain.from_iterable(cls.permissions.values())) @pytest.mark.tier1 - def test_positive_search_by_name(self): + def test_positive_search_by_name(self, target_sat): """Search for a permission by name. :id: 1b6117f6-599d-4b2d-80a8-1e0764bdc04d @@ -84,7 +84,9 @@ def test_positive_search_by_name(self): """ failures = {} for permission_name in self.permission_names: - results = entities.Permission().search(query={'search': f'name="{permission_name}"'}) + results = target_sat.api.Permission().search( + query={'search': f'name="{permission_name}"'} + ) if len(results) != 1 or len(results) == 1 and results[0].name != permission_name: failures[permission_name] = { 'length': len(results), @@ -95,7 +97,7 @@ def test_positive_search_by_name(self): pytest.fail(json.dumps(failures, indent=True, sort_keys=True)) @pytest.mark.tier1 - def test_positive_search_by_resource_type(self): + def test_positive_search_by_resource_type(self, target_sat): """Search for permissions by resource type. :id: 29d9362b-1bf3-4722-b40f-a5e8b4d0d9ba @@ -109,7 +111,7 @@ def test_positive_search_by_resource_type(self): for resource_type in self.permission_resource_types: if resource_type is None: continue - perm_group = entities.Permission().search( + perm_group = target_sat.api.Permission().search( query={'search': f'resource_type="{resource_type}"'} ) permissions = {perm.name for perm in perm_group} @@ -128,7 +130,7 @@ def test_positive_search_by_resource_type(self): pytest.fail(json.dumps(failures, indent=True, sort_keys=True)) @pytest.mark.tier1 - def test_positive_search(self): + def test_positive_search(self, target_sat): """search with no parameters return all permissions :id: e58308df-19ec-415d-8fa1-63ebf3cd0ad6 @@ -137,7 +139,7 @@ def test_positive_search(self): :CaseImportance: Critical """ - permissions = entities.Permission().search(query={'per_page': '1000'}) + permissions = target_sat.api.Permission().search(query={'per_page': '1000'}) names = {perm.name for perm in permissions} resource_types = {perm.resource_type for perm in permissions} expected_names = set(self.permission_names) @@ -206,7 +208,7 @@ def create_user(self, target_sat, class_org, class_location): location=[class_location], ).create() - def give_user_permission(self, perm_name): + def give_user_permission(self, perm_name, target_sat): """Give ``self.user`` the ``perm_name`` permission. This method creates a role and filter to accomplish the above goal. @@ -222,10 +224,10 @@ def give_user_permission(self, perm_name): updating ``self.user``'s roles. :returns: Nothing. """ - role = entities.Role().create() - permissions = entities.Permission().search(query={'search': f'name="{perm_name}"'}) + role = target_sat.api.Role().create() + permissions = target_sat.api.Permission().search(query={'search': f'name="{perm_name}"'}) assert len(permissions) == 1 - entities.Filter(permission=permissions, role=role).create() + target_sat.api.Filter(permission=permissions, role=role).create() self.user.role += [role] self.user = self.user.update(['role']) @@ -347,7 +349,7 @@ def test_positive_check_delete(self, entity_cls, class_org, class_location): 'entity_cls', **parametrized([entities.Architecture, entities.Domain, entities.ActivationKey]), ) - def test_positive_check_update(self, entity_cls, class_org, class_location): + def test_positive_check_update(self, entity_cls, class_org, class_location, target_sat): """Check whether the "edit_*" role has an effect. :id: b5de2115-b031-413e-8e5b-eac8cb714174 diff --git a/tests/foreman/api/test_product.py b/tests/foreman/api/test_product.py index 84b6d78f866..dfa9f59f705 100644 --- a/tests/foreman/api/test_product.py +++ b/tests/foreman/api/test_product.py @@ -20,7 +20,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -40,7 +39,7 @@ @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_create_with_name(request, name, module_org): +def test_positive_create_with_name(request, name, module_org, module_target_sat): """Create a product providing different valid names :id: 3d873b73-6919-4fda-84df-0e26bdf0c1dc @@ -51,12 +50,12 @@ def test_positive_create_with_name(request, name, module_org): :CaseImportance: Critical """ - product = entities.Product(name=name, organization=module_org).create() + product = module_target_sat.api.Product(name=name, organization=module_org).create() assert product.name == name @pytest.mark.tier1 -def test_positive_create_with_label(module_org): +def test_positive_create_with_label(module_org, module_target_sat): """Create a product providing label which is different from its name :id: 95cf8e05-fd09-422e-bf6f-8b1dde762976 @@ -66,14 +65,14 @@ def test_positive_create_with_label(module_org): :CaseImportance: Critical """ label = gen_string('alphanumeric') - product = entities.Product(label=label, organization=module_org).create() + product = module_target_sat.api.Product(label=label, organization=module_org).create() assert product.label == label assert product.name != label @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_create_with_description(description, module_org): +def test_positive_create_with_description(description, module_org, module_target_sat): """Create a product providing different descriptions :id: f3e2df77-6711-440b-800a-9cebbbec36c5 @@ -84,12 +83,14 @@ def test_positive_create_with_description(description, module_org): :CaseImportance: Critical """ - product = entities.Product(description=description, organization=module_org).create() + product = module_target_sat.api.Product( + description=description, organization=module_org + ).create() assert product.description == description @pytest.mark.tier2 -def test_positive_create_with_gpg(module_org): +def test_positive_create_with_gpg(module_org, module_target_sat): """Create a product and provide a GPG key. :id: 57331c1f-15dd-4c9f-b8fc-3010847b2975 @@ -98,17 +99,17 @@ def test_positive_create_with_gpg(module_org): :CaseLevel: Integration """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_FILE.read_text(), organization=module_org, ).create() - product = entities.Product(gpg_key=gpg_key, organization=module_org).create() + product = module_target_sat.api.Product(gpg_key=gpg_key, organization=module_org).create() assert product.gpg_key.id == gpg_key.id @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_create_with_name(name, module_org): +def test_negative_create_with_name(name, module_org, module_target_sat): """Create a product providing invalid names only :id: 76531f53-09ff-4ee9-89b9-09a697526fb1 @@ -120,11 +121,11 @@ def test_negative_create_with_name(name, module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_same_name(module_org): +def test_negative_create_with_same_name(module_org, module_target_sat): """Create a product providing a name of already existent entity :id: 039269c5-607a-4b70-91dd-b8fed8e50cc6 @@ -134,13 +135,13 @@ def test_negative_create_with_same_name(module_org): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() with pytest.raises(HTTPError): - entities.Product(name=name, organization=module_org).create() + module_target_sat.api.Product(name=name, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_label(module_org): +def test_negative_create_with_label(module_org, module_target_sat): """Create a product providing invalid label :id: 30b1a737-07f1-4786-b68a-734e57c33a62 @@ -150,12 +151,12 @@ def test_negative_create_with_label(module_org): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Product(label=gen_string('utf8'), organization=module_org).create() + module_target_sat.api.Product(label=gen_string('utf8'), organization=module_org).create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_update_name(name, module_org): +def test_positive_update_name(name, module_org, module_target_sat): """Update product name to another valid name. :id: 1a9f6e0d-43fb-42e2-9dbd-e880f03b0297 @@ -166,7 +167,7 @@ def test_positive_update_name(name, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.name = name product = product.update(['name']) assert product.name == name @@ -174,7 +175,7 @@ def test_positive_update_name(name, module_org): @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) -def test_positive_update_description(description, module_org): +def test_positive_update_description(description, module_org, module_target_sat): """Update product description to another valid one. :id: c960c326-2e9f-4ee7-bdec-35a705305067 @@ -185,14 +186,14 @@ def test_positive_update_description(description, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.description = description product = product.update(['description']) assert product.description == description @pytest.mark.tier1 -def test_positive_update_name_to_original(module_org): +def test_positive_update_name_to_original(module_org, module_target_sat): """Rename Product back to original name :id: 3075f17f-4475-4b64-9fbd-1e41ced9142d @@ -201,7 +202,7 @@ def test_positive_update_name_to_original(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() old_name = product.name # Update product name @@ -218,7 +219,7 @@ def test_positive_update_name_to_original(module_org): @pytest.mark.upgrade @pytest.mark.tier2 -def test_positive_update_gpg(module_org): +def test_positive_update_gpg(module_org, module_target_sat): """Create a product and update its GPGKey :id: 3b08f155-a0d6-4987-b281-dc02e8d5a03e @@ -228,14 +229,14 @@ def test_positive_update_gpg(module_org): :CaseLevel: Integration """ # Create a product and make it point to a GPG key. - gpg_key_1 = entities.GPGKey( + gpg_key_1 = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_FILE.read_text(), organization=module_org, ).create() - product = entities.Product(gpg_key=gpg_key_1, organization=module_org).create() + product = module_target_sat.api.Product(gpg_key=gpg_key_1, organization=module_org).create() # Update the product and make it point to a new GPG key. - gpg_key_2 = entities.GPGKey( + gpg_key_2 = module_target_sat.api.GPGKey( content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), organization=module_org, ).create() @@ -246,7 +247,7 @@ def test_positive_update_gpg(module_org): @pytest.mark.skip_if_open("BZ:1310422") @pytest.mark.tier2 -def test_positive_update_organization(module_org): +def test_positive_update_organization(module_org, module_target_sat): """Create a product and update its organization :id: b298957a-2cdb-4f17-a934-098612f3b659 @@ -257,9 +258,9 @@ def test_positive_update_organization(module_org): :BZ: 1310422 """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() # Update the product and make it point to a new organization. - new_org = entities.Organization().create() + new_org = module_target_sat.api.Organization().create() product.organization = new_org product = product.update() assert product.organization.id == new_org.id @@ -267,7 +268,7 @@ def test_positive_update_organization(module_org): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) -def test_negative_update_name(name, module_org): +def test_negative_update_name(name, module_org, module_target_sat): """Attempt to update product name to invalid one :id: 3eb61fa8-3524-4872-8f1b-4e88004f66f5 @@ -278,13 +279,13 @@ def test_negative_update_name(name, module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() with pytest.raises(HTTPError): - entities.Product(id=product.id, name=name).update(['name']) + module_target_sat.api.Product(id=product.id, name=name).update(['name']) @pytest.mark.tier1 -def test_negative_update_label(module_org): +def test_negative_update_label(module_org, module_target_sat): """Attempt to update product label to another one. :id: 065cd673-8d10-46c7-800c-b731b06a5359 @@ -293,7 +294,7 @@ def test_negative_update_label(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() product.label = gen_string('alpha') with pytest.raises(HTTPError): product.update(['label']) @@ -301,7 +302,7 @@ def test_negative_update_label(module_org): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_delete(name, module_org): +def test_positive_delete(name, module_org, module_target_sat): """Create product and then delete it. :id: 30df95f5-0a4e-41ee-a99f-b418c5c5f2f3 @@ -312,7 +313,7 @@ def test_positive_delete(name, module_org): :CaseImportance: Critical """ - product = entities.Product(name=name, organization=module_org).create() + product = module_target_sat.api.Product(name=name, organization=module_org).create() product.delete() with pytest.raises(HTTPError): product.read() @@ -320,7 +321,7 @@ def test_positive_delete(name, module_org): @pytest.mark.tier1 @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync(module_org): +def test_positive_sync(module_org, module_target_sat): """Sync product (repository within a product) :id: 860e00a1-c370-4bd0-8987-449338071d56 @@ -329,8 +330,8 @@ def test_positive_sync(module_org): :CaseImportance: Critical """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository( + product = module_target_sat.api.Product(organization=module_org).create() + repo = module_target_sat.api.Repository( product=product, content_type='yum', url=settings.repos.yum_1.url ).create() assert repo.read().content_counts['rpm'] == 0 @@ -341,7 +342,7 @@ def test_positive_sync(module_org): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') -def test_positive_sync_several_repos(module_org): +def test_positive_sync_several_repos(module_org, module_target_sat): """Sync product (all repositories within a product) :id: 07918442-b72f-4db5-96b6-975564f3663a @@ -353,11 +354,11 @@ def test_positive_sync_several_repos(module_org): :BZ: 1389543 """ - product = entities.Product(organization=module_org).create() - rpm_repo = entities.Repository( + product = module_target_sat.api.Product(organization=module_org).create() + rpm_repo = module_target_sat.api.Repository( product=product, content_type='yum', url=settings.repos.yum_1.url ).create() - docker_repo = entities.Repository( + docker_repo = module_target_sat.api.Repository( content_type=REPO_TYPE['docker'], docker_upstream_name=CONTAINER_UPSTREAM_NAME, product=product, @@ -372,7 +373,7 @@ def test_positive_sync_several_repos(module_org): @pytest.mark.tier2 -def test_positive_filter_product_list(module_entitlement_manifest_org): +def test_positive_filter_product_list(module_entitlement_manifest_org, module_target_sat): """Filter products based on param 'custom/redhat_only' :id: e61fb63a-4552-4915-b13d-23ab80138249 @@ -384,9 +385,9 @@ def test_positive_filter_product_list(module_entitlement_manifest_org): :BZ: 1667129 """ org = module_entitlement_manifest_org - product = entities.Product(organization=org).create() - custom_products = entities.Product(organization=org).search(query={'custom': True}) - rh_products = entities.Product(organization=org).search( + product = module_target_sat.api.Product(organization=org).create() + custom_products = module_target_sat.api.Product(organization=org).search(query={'custom': True}) + rh_products = module_target_sat.api.Product(organization=org).search( query={'redhat_only': True, 'per_page': 1000} ) diff --git a/tests/foreman/api/test_reporttemplates.py b/tests/foreman/api/test_reporttemplates.py index 7750dc174c0..6c224bc5e04 100644 --- a/tests/foreman/api/test_reporttemplates.py +++ b/tests/foreman/api/test_reporttemplates.py @@ -18,7 +18,6 @@ """ from broker import Broker from fauxfactory import gen_string -from nailgun import entities import pytest from requests import HTTPError from wait_for import wait_for @@ -40,24 +39,24 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() - custom_repo = entities.Repository( - product=entities.Product(organization=org).create(), + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=org).create(), ).create() custom_repo.sync() - lce = entities.LifecycleEnvironment(organization=org).create() - cv = entities.ContentView( + lce = module_target_sat.api.LifecycleEnvironment(organization=org).create() + cv = module_target_sat.api.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() cvv.promote(data={'environment_ids': lce.id, 'force': False}) - ak = entities.ActivationKey( + ak = module_target_sat.api.ActivationKey( content_view=cv, max_hosts=100, organization=org, environment=lce, auto_attach=True ).create() - subscription = entities.Subscription(organization=org).search( + subscription = module_target_sat.api.Subscription(organization=org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) @@ -69,7 +68,7 @@ def setup_content(module_entitlement_manifest_org, module_target_sat): @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) -def test_positive_CRUDL(name): +def test_positive_CRUDL(name, target_sat): """Create, Read, Update, Delete, List :id: a2a577db-144e-4761-a42e-e83885464786 @@ -92,27 +91,27 @@ def test_positive_CRUDL(name): """ # Create template1 = gen_string('alpha') - rt = entities.ReportTemplate(name=name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=name, template=template1).create() # List - res = entities.ReportTemplate().search(query={'search': f'name="{name}"'}) + res = target_sat.api.ReportTemplate().search(query={'search': f'name="{name}"'}) assert name in list(map(lambda x: x.name, res)) # Read - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert name == rt.name assert template1 == rt.template # Update template2 = gen_string('alpha') - entities.ReportTemplate(id=rt.id, template=template2).update(['template']) - rt = entities.ReportTemplate(id=rt.id).read() + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(['template']) + rt = target_sat.api.ReportTemplate(id=rt.id).read() assert template2 == rt.template # Delete - entities.ReportTemplate(id=rt.id).delete() + target_sat.api.ReportTemplate(id=rt.id).delete() with pytest.raises(HTTPError): - rt = entities.ReportTemplate(id=rt.id).read() + rt = target_sat.api.ReportTemplate(id=rt.id).read() @pytest.mark.tier1 -def test_positive_generate_report_nofilter(): +def test_positive_generate_report_nofilter(target_sat): """Generate Host - Statuses report :id: a4b687db-144e-4761-a42e-e93887464986 @@ -128,8 +127,10 @@ def test_positive_generate_report_nofilter(): :CaseImportance: Critical """ host_name = gen_string('alpha').lower() - entities.Host(name=host_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate() for column_name in [ 'Name', @@ -155,7 +156,7 @@ def test_positive_generate_report_nofilter(): @pytest.mark.tier2 -def test_positive_generate_report_filter(): +def test_positive_generate_report_filter(target_sat): """Generate Host - Statuses report :id: a4b677cb-144e-4761-a42e-e93887464986 @@ -172,9 +173,11 @@ def test_positive_generate_report_filter(): """ host1_name = gen_string('alpha').lower() host2_name = gen_string('alpha').lower() - entities.Host(name=host1_name).create() - entities.Host(name=host2_name).create() - rt = entities.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + target_sat.api.Host(name=host1_name).create() + target_sat.api.Host(name=host2_name).create() + rt = ( + target_sat.api.ReportTemplate().search(query={'search': 'name="Host - Statuses"'})[0].read() + ) res = rt.generate(data={"input_values": {"hosts": host2_name}}) for column_name in [ 'Name', @@ -201,7 +204,7 @@ def test_positive_generate_report_filter(): @pytest.mark.tier2 -def test_positive_report_add_userinput(): +def test_positive_report_add_userinput(target_sat): """Add user input to template, use it in template, generate template :id: a4a577db-144e-4761-a42e-e86887464986 @@ -221,21 +224,21 @@ def test_positive_report_add_userinput(): input_value = gen_string('alpha').lower() template_name = gen_string('alpha').lower() template = f'<%= "value=\\"" %><%= input(\'{input_name}\') %><%= "\\"" %>' - entities.Host(name=host_name).create() - rt = entities.ReportTemplate(name=template_name, template=template).create() - entities.TemplateInput( + target_sat.api.Host(name=host_name).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template).create() + target_sat.api.TemplateInput( name=input_name, input_type="user", template=rt.id, ).create() - ti = entities.TemplateInput(template=rt.id).search()[0].read() + ti = target_sat.api.TemplateInput(template=rt.id).search()[0].read() assert input_name == ti.name res = rt.generate(data={"input_values": {input_name: input_value}}) assert f'value="{input_value}"' in res @pytest.mark.tier2 -def test_positive_lock_clone_nodelete_unlock_report(): +def test_positive_lock_clone_nodelete_unlock_report(target_sat): """Lock report template. Check it can be cloned and can't be deleted or edited. Unlock. Check it can be deleted and edited. @@ -265,15 +268,15 @@ def test_positive_lock_clone_nodelete_unlock_report(): template_clone_name = gen_string('alpha').lower() template1 = gen_string('alpha') template2 = gen_string('alpha') - rt = entities.ReportTemplate(name=template_name, template=template1).create() + rt = target_sat.api.ReportTemplate(name=template_name, template=template1).create() # 2. Lock template - entities.ReportTemplate(id=rt.id, locked=True).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=True).update(["locked"]) rt = rt.read() assert rt.locked is True # 3. Clone template, check cloned data rt.clone(data={'name': template_clone_name}) cloned_rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': f'name="{template_clone_name}"'})[0] .read() ) @@ -285,24 +288,28 @@ def test_positive_lock_clone_nodelete_unlock_report(): rt.delete() # In BZ1680458, exception is thrown but template is deleted anyway assert ( - len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) != 0 + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + != 0 ) # 5. Try to edit template with pytest.raises(HTTPError): - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template1 == rt.template # 6. Unlock template - entities.ReportTemplate(id=rt.id, locked=False).update(["locked"]) + target_sat.api.ReportTemplate(id=rt.id, locked=False).update(["locked"]) rt = rt.read() assert rt.locked is False # 7. Edit template - entities.ReportTemplate(id=rt.id, template=template2).update(["template"]) + target_sat.api.ReportTemplate(id=rt.id, template=template2).update(["template"]) rt = rt.read() assert template2 == rt.template # 8. Delete template rt.delete() - assert len(entities.ReportTemplate().search(query={'search': f'name="{template_name}"'})) == 0 + assert ( + len(target_sat.api.ReportTemplate().search(query={'search': f'name="{template_name}"'})) + == 0 + ) @pytest.mark.tier2 @@ -527,7 +534,7 @@ def test_positive_generate_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) @@ -566,7 +573,7 @@ def test_positive_schedule_entitlements_report(setup_content, target_sat): vm.register_contenthost(org.label, ak.name) assert vm.subscribed rt = ( - entities.ReportTemplate() + target_sat.api.ReportTemplate() .search(query={'search': 'name="Subscription - Entitlement Report"'})[0] .read() ) diff --git a/tests/foreman/api/test_repositories.py b/tests/foreman/api/test_repositories.py index 590ae50ae98..c250d25bbbe 100644 --- a/tests/foreman/api/test_repositories.py +++ b/tests/foreman/api/test_repositories.py @@ -17,7 +17,6 @@ :Upstream: No """ from manifester import Manifester -from nailgun import entities from nailgun.entity_mixins import call_entity_method_with_timeout import pytest from requests.exceptions import HTTPError @@ -179,7 +178,7 @@ def test_positive_sync_kickstart_repo(module_entitlement_manifest_org, target_sa repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update(['download_policy']) diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index 3f778f6382a..593d787bdfc 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -23,7 +23,7 @@ from urllib.parse import urljoin, urlparse, urlunparse from fauxfactory import gen_string -from nailgun import client, entities +from nailgun import client from nailgun.entity_mixins import TaskFailedError, call_entity_method_with_timeout import pytest from requests.exceptions import HTTPError @@ -47,24 +47,24 @@ def repo_options(request, module_org, module_product): @pytest.fixture -def repo_options_custom_product(request, module_org): +def repo_options_custom_product(request, module_org, module_target_sat): """Return the options that were passed as indirect parameters.""" options = getattr(request, 'param', {}).copy() options['organization'] = module_org - options['product'] = entities.Product(organization=module_org).create() + options['product'] = module_target_sat.api.Product(organization=module_org).create() return options @pytest.fixture -def env(module_org): +def env(module_org, module_target_sat): """Create a new puppet environment.""" - return entities.Environment(organization=[module_org]).create() + return module_target_sat.api.Environment(organization=[module_org]).create() @pytest.fixture -def repo(repo_options): +def repo(repo_options, module_target_sat): """Create a new repository.""" - return entities.Repository(**repo_options).create() + return module_target_sat.api.Repository(**repo_options).create() class TestRepository: @@ -197,7 +197,7 @@ def test_positive_create_with_download_policy(self, repo_options, repo): @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_positive_create_with_default_download_policy(self, repo): + def test_positive_create_with_default_download_policy(self, repo, target_sat): """Verify if the default download policy is assigned when creating a YUM repo without `download_policy` field @@ -210,7 +210,7 @@ def test_positive_create_with_default_download_policy(self, repo): :CaseImportance: Critical """ - default_dl_policy = entities.Setting().search( + default_dl_policy = target_sat.api.Setting().search( query={'search': 'name=default_download_policy'} ) assert default_dl_policy @@ -310,7 +310,7 @@ def test_positive_create_unprotected(self, repo_options, repo): assert repo.unprotected == repo_options['unprotected'] @pytest.mark.tier2 - def test_positive_create_with_gpg(self, module_org, module_product): + def test_positive_create_with_gpg(self, module_org, module_product, module_target_sat): """Create a repository and provide a GPG key ID. :id: 023cf84b-74f3-4e63-a9d7-10afee6c1990 @@ -319,16 +319,16 @@ def test_positive_create_with_gpg(self, module_org, module_product): :CaseLevel: Integration """ - gpg_key = entities.GPGKey( + gpg_key = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key).create() # Verify that the given GPG key ID is used. assert repo.gpg_key.id == gpg_key.id @pytest.mark.tier2 - def test_positive_create_same_name_different_orgs(self, repo): + def test_positive_create_same_name_different_orgs(self, repo, target_sat): """Create two repos with the same name in two different organizations. :id: bd1bd7e3-e393-44c8-a6d0-42edade40f60 @@ -338,9 +338,9 @@ def test_positive_create_same_name_different_orgs(self, repo): :CaseLevel: Integration """ - org_2 = entities.Organization().create() - product_2 = entities.Product(organization=org_2).create() - repo_2 = entities.Repository(product=product_2, name=repo.name).create() + org_2 = target_sat.api.Organization().create() + product_2 = target_sat.api.Product(organization=org_2).create() + repo_2 = target_sat.api.Repository(product=product_2, name=repo.name).create() assert repo_2.name == repo.name @pytest.mark.tier1 @@ -349,7 +349,7 @@ def test_positive_create_same_name_different_orgs(self, repo): **datafactory.parametrized([{'name': name} for name in datafactory.invalid_values_list()]), indirect=True, ) - def test_negative_create_name(self, repo_options): + def test_negative_create_name(self, repo_options, target_sat): """Attempt to create repository with invalid names only. :id: 24947c92-3415-43df-add6-d6eb38afd8a3 @@ -361,7 +361,7 @@ def test_negative_create_name(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -371,7 +371,7 @@ def test_negative_create_name(self, repo_options): ), indirect=True, ) - def test_negative_create_with_same_name(self, repo_options, repo): + def test_negative_create_with_same_name(self, repo_options, repo, target_sat): """Attempt to create a repository providing a name of already existent entity @@ -384,10 +384,10 @@ def test_negative_create_with_same_name(self, repo_options, repo): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 - def test_negative_create_label(self, module_product): + def test_negative_create_label(self, module_product, module_target_sat): """Attempt to create repository with invalid label. :id: f646ae84-2660-41bd-9883-331285fa1c9a @@ -397,7 +397,9 @@ def test_negative_create_label(self, module_product): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(product=module_product, label=gen_string('utf8')).create() + module_target_sat.api.Repository( + product=module_product, label=gen_string('utf8') + ).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -405,7 +407,7 @@ def test_negative_create_label(self, module_product): **datafactory.parametrized([{'url': url} for url in datafactory.invalid_names_list()]), indirect=True, ) - def test_negative_create_url(self, repo_options): + def test_negative_create_url(self, repo_options, target_sat): """Attempt to create repository with invalid url. :id: 0bb9fc3f-d442-4437-b5d8-83024bc7ceab @@ -417,7 +419,7 @@ def test_negative_create_url(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.skipif( @@ -428,7 +430,7 @@ def test_negative_create_url(self, repo_options): **datafactory.parametrized([{'url': f'http://{gen_string("alpha")}{punctuation}.com'}]), indirect=True, ) - def test_negative_create_with_url_with_special_characters(self, repo_options): + def test_negative_create_with_url_with_special_characters(self, repo_options, target_sat): """Verify that repository URL cannot contain unquoted special characters :id: 2ffaa412-e5e5-4bec-afaa-9ea54315df49 @@ -440,7 +442,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -450,7 +452,7 @@ def test_negative_create_with_url_with_special_characters(self, repo_options): ), indirect=True, ) - def test_negative_create_with_invalid_download_policy(self, repo_options): + def test_negative_create_with_invalid_download_policy(self, repo_options, target_sat): """Verify that YUM repository cannot be created with invalid download policy @@ -464,13 +466,13 @@ def test_negative_create_with_invalid_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( 'repo_options', **datafactory.parametrized([{'content_type': 'yum'}]), indirect=True ) - def test_negative_update_to_invalid_download_policy(self, repo): + def test_negative_update_to_invalid_download_policy(self, repo, target_sat): """Verify that YUM repository cannot be updated to invalid download policy @@ -499,7 +501,7 @@ def test_negative_update_to_invalid_download_policy(self, repo): ), indirect=True, ) - def test_negative_create_non_yum_with_download_policy(self, repo_options): + def test_negative_create_non_yum_with_download_policy(self, repo_options, target_sat): """Verify that non-YUM repositories cannot be created with download policy @@ -513,7 +515,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -523,7 +525,7 @@ def test_negative_create_non_yum_with_download_policy(self, repo_options): ), indirect=True, ) - def test_negative_create_checksum(self, repo_options): + def test_negative_create_checksum(self, repo_options, target_sat): """Attempt to create repository with invalid checksum type. :id: c49a3c49-110d-4b74-ae14-5c9494a4541c @@ -535,7 +537,7 @@ def test_negative_create_checksum(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -547,7 +549,7 @@ def test_negative_create_checksum(self, repo_options): ids=['sha1', 'sha256'], indirect=True, ) - def test_negative_create_checksum_with_on_demand_policy(self, repo_options): + def test_negative_create_checksum_with_on_demand_policy(self, repo_options, target_sat): """Attempt to create repository with checksum and on_demand policy. :id: de8b157c-ed62-454b-94eb-22659ce1158e @@ -559,7 +561,7 @@ def test_negative_create_checksum_with_on_demand_policy(self, repo_options): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier1 @pytest.mark.parametrize( @@ -669,7 +671,7 @@ def test_positive_update_unprotected(self, repo): assert repo.unprotected is True @pytest.mark.tier2 - def test_positive_update_gpg(self, module_org, module_product): + def test_positive_update_gpg(self, module_org, module_product, module_target_sat): """Create a repository and update its GPGKey :id: 0e9319dc-c922-4ecf-9f83-d221cfdf54c2 @@ -679,14 +681,14 @@ def test_positive_update_gpg(self, module_org, module_product): :CaseLevel: Integration """ # Create a repo and make it point to a GPG key. - gpg_key_1 = entities.GPGKey( + gpg_key_1 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_FILE.read_text(), ).create() - repo = entities.Repository(product=module_product, gpg_key=gpg_key_1).create() + repo = module_target_sat.api.Repository(product=module_product, gpg_key=gpg_key_1).create() # Update the repo and make it point to a new GPG key. - gpg_key_2 = entities.GPGKey( + gpg_key_2 = module_target_sat.api.GPGKey( organization=module_org, content=DataFile.VALID_GPG_KEY_BETA_FILE.read_text(), ).create() @@ -712,7 +714,7 @@ def test_positive_update_contents(self, repo): @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_upload_delete_srpm(self, repo): + def test_positive_upload_delete_srpm(self, repo, target_sat): """Create a repository and upload, delete SRPM contents. :id: e091a725-048f-44ca-90cc-c016c450ced9 @@ -726,12 +728,12 @@ def test_positive_upload_delete_srpm(self, repo): :BZ: 1378442 """ # upload srpm - entities.ContentUpload(repository=repo).upload( + target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) assert repo.read().content_counts['srpm'] == 1 - srpm_detail = entities.Srpms().search(query={'repository_id': repo.id}) + srpm_detail = target_sat.api.Srpms().search(query={'repository_id': repo.id}) assert len(srpm_detail) == 1 # Delete srpm @@ -750,7 +752,7 @@ def test_positive_upload_delete_srpm(self, repo): ids=['yum_fake'], indirect=True, ) - def test_positive_create_delete_srpm_repo(self, repo): + def test_positive_create_delete_srpm_repo(self, repo, target_sat): """Create a repository, sync SRPM contents and remove repo :id: e091a725-042f-43ca-99cc-c017c450ced9 @@ -763,7 +765,7 @@ def test_positive_create_delete_srpm_repo(self, repo): """ repo.sync() assert repo.read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'repository_id': repo.id})) == 3 + assert len(target_sat.api.Srpms().search(query={'repository_id': repo.id})) == 3 repo.delete() with pytest.raises(HTTPError): repo.read() @@ -778,7 +780,7 @@ def test_positive_create_delete_srpm_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_remove_contents(self, repo): + def test_positive_remove_contents(self, repo, target_sat): """Synchronize a repository and remove rpm content. :id: f686b74b-7ee9-4806-b999-bc05ffe61a9d @@ -795,7 +797,7 @@ def test_positive_remove_contents(self, repo): repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 @@ -953,7 +955,7 @@ def test_negative_synchronize_auth_yum_repo(self, repo): ids=['yum_fake_2'], indirect=True, ) - def test_positive_resynchronize_rpm_repo(self, repo): + def test_positive_resynchronize_rpm_repo(self, repo, target_sat): """Check that repository content is resynced after packages were removed from repository @@ -971,7 +973,7 @@ def test_positive_resynchronize_rpm_repo(self, repo): repo.sync() assert repo.read().content_counts['rpm'] >= 1 # Find repo packages and remove them - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) assert repo.read().content_counts['rpm'] == 0 # Re-synchronize repository @@ -1173,7 +1175,7 @@ def test_positive_recreate_pulp_repositories(self, module_entitlement_manifest_o reposet=constants.REPOSET['rhst7'], releasever=None, ) - call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) + call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) with target_sat.session.shell() as sh: sh.send('foreman-rake console') time.sleep(30) # sleep to allow time for console to open @@ -1210,9 +1212,9 @@ def test_positive_mirroring_policy(self, target_sat): repo_url = settings.repos.yum_0.url packages_count = constants.FAKE_0_YUM_REPO_PACKAGES_COUNT - org = entities.Organization().create() - prod = entities.Product(organization=org).create() - repo = entities.Repository( + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository( download_policy='immediate', mirroring_policy='mirror_complete', product=prod, @@ -1223,7 +1225,7 @@ def test_positive_mirroring_policy(self, target_sat): assert repo.content_counts['rpm'] == packages_count # remove all packages from the repo and upload another one - packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) + packages = target_sat.api.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) with open(DataFile.RPM_TO_UPLOAD, 'rb') as handle: @@ -1250,7 +1252,7 @@ class TestRepositorySync: """Tests for ``/katello/api/repositories/:id/sync``.""" @pytest.mark.tier2 - def test_positive_sync_repos_with_lots_files(self): + def test_positive_sync_repos_with_lots_files(self, target_sat): """Attempt to synchronize repository containing a lot of files inside rpms. @@ -1264,9 +1266,9 @@ def test_positive_sync_repos_with_lots_files(self): :expectedresults: repository was successfully synchronized """ - org = entities.Organization().create() - product = entities.Product(organization=org).create() - repo = entities.Repository(product=product, url=settings.repos.yum_8.url).create() + org = target_sat.api.Organization().create() + product = target_sat.api.Product(organization=org).create() + repo = target_sat.api.Repository(product=product, url=settings.repos.yum_8.url).create() response = repo.sync() assert response, f"Repository {repo} failed to sync." @@ -1289,7 +1291,7 @@ def test_positive_sync_rh(self, module_entitlement_manifest_org, target_sat): reposet=constants.REPOSET['rhst7'], releasever=None, ) - entities.Repository(id=repo_id).sync() + target_sat.api.Repository(id=repo_id).sync() @pytest.mark.tier2 @pytest.mark.skipif( @@ -1365,19 +1367,19 @@ def test_positive_bulk_cancel_sync(self, target_sat, module_entitlement_manifest releasever=repo['releasever'], ) repo_ids.append(repo_id) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update() sync_ids = [] for repo_id in repo_ids: - sync_task = entities.Repository(id=repo_id).sync(synchronous=False) + sync_task = target_sat.api.Repository(id=repo_id).sync(synchronous=False) sync_ids.append(sync_task['id']) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[0:5]}) # Give some time for sync cancels to calm down time.sleep(30) - entities.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": sync_ids[5:]}) for sync_id in sync_ids: - sync_result = entities.ForemanTask(id=sync_id).poll(canceled=True) + sync_result = target_sat.api.ForemanTask(id=sync_id).poll(canceled=True) assert ( 'Task canceled' in sync_result['humanized']['errors'] or 'No content added' in sync_result['humanized']['output'] @@ -1489,11 +1491,11 @@ def test_positive_sync_kickstart_check_os( repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) - rh_repo = entities.Repository(id=repo_id).read() + rh_repo = target_sat.api.Repository(id=repo_id).read() rh_repo.sync() major, minor = constants.REPOS['kickstart'][distro]['version'].split('.') - os = entities.OperatingSystem().search( + os = target_sat.api.OperatingSystem().search( query={'search': f'name="RedHat" AND major="{major}" AND minor="{minor}"'} ) assert len(os) @@ -1614,7 +1616,7 @@ def test_positive_synchronize(self, repo): ), indirect=True, ) - def test_positive_cancel_docker_repo_sync(self, repo): + def test_positive_cancel_docker_repo_sync(self, repo, target_sat): """Cancel a large, syncing Docker-type repository :id: 86534979-be49-40ad-8290-05ac71c801b2 @@ -1637,8 +1639,8 @@ def test_positive_cancel_docker_repo_sync(self, repo): sync_task = repo.sync(synchronous=False) # Need to wait for sync to actually start up time.sleep(2) - entities.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) - sync_task = entities.ForemanTask(id=sync_task['id']).poll(canceled=True) + target_sat.api.ForemanTask().bulk_cancel(data={"task_ids": [sync_task['id']]}) + sync_task = target_sat.api.ForemanTask(id=sync_task['id']).poll(canceled=True) assert 'Task canceled' in sync_task['humanized']['errors'] assert 'No content added' in sync_task['humanized']['output'] @@ -1726,7 +1728,7 @@ def test_positive_synchronize_private_registry(self, repo): :parametrized: yes - :expectedresults: A repository is created with a private Docker \ + :expectedresults: A repository is created with a private Docker repository and it is synchronized. :customerscenario: true @@ -1857,7 +1859,7 @@ def test_negative_synchronize_private_registry_no_passwd( match='422 Client Error: Unprocessable Entity for url: ' f'{target_sat.url}:443/katello/api/v2/repositories', ): - entities.Repository(**repo_options).create() + target_sat.api.Repository(**repo_options).create() @pytest.mark.tier2 @pytest.mark.upgrade @@ -2134,7 +2136,7 @@ def test_negative_synchronize_docker_repo_with_invalid_tags(self, repo_options, # releasever=None, # basearch=None, # ) -# call_entity_method_with_timeout(entities.Repository(id=repo_id).sync, timeout=1500) +# call_entity_method_with_timeout(target_sat.api.Repository(id=repo_id).sync, timeout=1500) class TestSRPMRepository: @@ -2143,7 +2145,9 @@ class TestSRPMRepository: @pytest.mark.skip_if_open("BZ:2016047") @pytest.mark.upgrade @pytest.mark.tier2 - def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): + def test_positive_srpm_upload_publish_promote_cv( + self, module_org, env, repo, module_target_sat + ): """Upload SRPM to repository, add repository to content view and publish, promote content view @@ -2151,20 +2155,27 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): :expectedresults: srpms can be listed in organization, content view, Lifecycle env """ - entities.ContentUpload(repository=repo).upload( + module_target_sat.api.ContentUpload(repository=repo).upload( filepath=DataFile.SRPM_TO_UPLOAD, content_type='srpm', ) - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = module_target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 1 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 1 + assert ( + len(module_target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 1 + ) assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) == 1 + len( + module_target_sat.api.Srpms().search( + query={'content_view_version_id': cv.version[0].id} + ) + ) + == 1 ) @pytest.mark.upgrade @@ -2178,7 +2189,7 @@ def test_positive_srpm_upload_publish_promote_cv(self, module_org, env, repo): **datafactory.parametrized({'fake_srpm': {'url': repo_constants.FAKE_YUM_SRPM_REPO}}), indirect=True, ) - def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): + def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo, target_sat): """Synchronize repository with SRPMs, add repository to content view and publish, promote content view @@ -2190,19 +2201,20 @@ def test_positive_repo_sync_publish_promote_cv(self, module_org, env, repo): """ repo.sync() - cv = entities.ContentView(organization=module_org, repository=[repo]).create() + cv = target_sat.api.ContentView(organization=module_org, repository=[repo]).create() cv.publish() cv = cv.read() assert cv.repository[0].read().content_counts['srpm'] == 3 - assert len(entities.Srpms().search(query={'organization_id': module_org.id})) >= 3 + assert len(target_sat.api.Srpms().search(query={'organization_id': module_org.id})) >= 3 assert ( - len(entities.Srpms().search(query={'content_view_version_id': cv.version[0].id})) >= 3 + len(target_sat.api.Srpms().search(query={'content_view_version_id': cv.version[0].id})) + >= 3 ) cv.version[0].promote(data={'environment_ids': env.id, 'force': False}) - assert len(entities.Srpms().search(query={'environment_id': env.id})) == 3 + assert len(target_sat.api.Srpms().search(query={'environment_id': env.id})) == 3 class TestSRPMRepositoryIgnoreContent: @@ -2317,7 +2329,7 @@ class TestFileRepository: **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_upload_file_to_file_repo(self, repo): + def test_positive_upload_file_to_file_repo(self, repo, target_sat): """Check arbitrary file can be uploaded to File Repository :id: fdb46481-f0f4-45aa-b075-2a8f6725e51b @@ -2335,7 +2347,9 @@ def test_positive_upload_file_to_file_repo(self, repo): repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) assert repo.read().content_counts['file'] == 1 - filesearch = entities.File().search(query={"search": f"name={constants.RPM_TO_UPLOAD}"}) + filesearch = target_sat.api.File().search( + query={"search": f"name={constants.RPM_TO_UPLOAD}"} + ) assert constants.RPM_TO_UPLOAD == filesearch[0].name @pytest.mark.stubbed @@ -2366,7 +2380,7 @@ def test_positive_file_permissions(self): **parametrized([{'content_type': 'file', 'url': repo_constants.CUSTOM_FILE_REPO}]), indirect=True, ) - def test_positive_remove_file(self, repo): + def test_positive_remove_file(self, repo, target_sat): """Check arbitrary file can be removed from File Repository :id: 65068b4c-9018-4baa-b87b-b6e9d7384a5d @@ -2387,7 +2401,7 @@ def test_positive_remove_file(self, repo): repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) assert repo.read().content_counts['file'] == 1 - file_detail = entities.File().search(query={'repository_id': repo.id}) + file_detail = target_sat.api.File().search(query={'repository_id': repo.id}) repo.remove_content(data={'ids': [file_detail[0].id], 'content_type': 'file'}) assert repo.read().content_counts['file'] == 0 @@ -2481,7 +2495,9 @@ class TestTokenAuthContainerRepository: """ @pytest.mark.tier2 - def test_positive_create_with_long_token(self, module_org, module_product, request): + def test_positive_create_with_long_token( + self, module_org, module_product, request, module_target_sat + ): """Create and sync Docker-type repo from the Red Hat Container registry Using token based auth, with very long tokens (>255 characters). @@ -2518,7 +2534,7 @@ def test_positive_create_with_long_token(self, module_org, module_product, reque if not len(repo_options['upstream_password']) > 255: pytest.skip('The "long_pass" registry does not meet length requirement') - repo = entities.Repository(**repo_options).create() + repo = module_target_sat.api.Repository(**repo_options).create() @request.addfinalizer def clean_repo(): @@ -2540,7 +2556,9 @@ def clean_repo(): @pytest.mark.tier2 @pytest.mark.parametrize('repo_key', container_repo_keys) - def test_positive_tag_whitelist(self, request, repo_key, module_org, module_product): + def test_positive_tag_whitelist( + self, request, repo_key, module_org, module_product, module_target_sat + ): """Create and sync Docker-type repos from multiple supported registries with a tag whitelist :id: 4f8ea85b-4c69-4da6-a8ef-bd467ee35147 @@ -2563,7 +2581,7 @@ def test_positive_tag_whitelist(self, request, repo_key, module_org, module_prod repo_options['organization'] = module_org repo_options['product'] = module_product - repo = entities.Repository(**repo_options).create() + repo = module_target_sat.api.Repository(**repo_options).create() @request.addfinalizer def clean_repo(): diff --git a/tests/foreman/api/test_repository_set.py b/tests/foreman/api/test_repository_set.py index 449bb1be33c..e4766585824 100644 --- a/tests/foreman/api/test_repository_set.py +++ b/tests/foreman/api/test_repository_set.py @@ -19,7 +19,6 @@ :Upstream: No """ -from nailgun import entities import pytest from robottelo.constants import PRDS, REPOSET @@ -33,17 +32,17 @@ @pytest.fixture -def product(function_entitlement_manifest_org): +def product(function_entitlement_manifest_org, module_target_sat): """Find and return the product matching PRODUCT_NAME.""" - return entities.Product( + return module_target_sat.api.Product( name=PRODUCT_NAME, organization=function_entitlement_manifest_org ).search()[0] @pytest.fixture -def reposet(product): +def reposet(product, module_target_sat): """Find and return the repository set matching REPOSET_NAME and product.""" - return entities.RepositorySet(name=REPOSET_NAME, product=product).search()[0] + return module_target_sat.api.RepositorySet(name=REPOSET_NAME, product=product).search()[0] @pytest.fixture diff --git a/tests/foreman/api/test_role.py b/tests/foreman/api/test_role.py index 4b42408114d..fabd0d3008c 100644 --- a/tests/foreman/api/test_role.py +++ b/tests/foreman/api/test_role.py @@ -20,7 +20,6 @@ :Upstream: No """ -from nailgun import entities from nailgun.config import ServerConfig import pytest from requests.exceptions import HTTPError @@ -41,7 +40,7 @@ class TestRole: 'name, new_name', **parametrized(list(zip(generate_strings_list(), generate_strings_list()))), ) - def test_positive_crud(self, name, new_name): + def test_positive_crud(self, name, new_name, target_sat): """Create, update and delete role with name ``name_generator()``. :id: 02c7d04d-a52c-4bc2-9e17-9938ab2ee5b2 @@ -55,7 +54,7 @@ def test_positive_crud(self, name, new_name): :CaseImportance: Critical """ - role = entities.Role(name=name).create() + role = target_sat.api.Role(name=name).create() assert role.name == name role.name = new_name assert role.update(['name']).name == new_name @@ -67,7 +66,7 @@ def test_positive_crud(self, name, new_name): class TestCannedRole: """Implements Canned Roles tests from API""" - def create_org_admin_role(self, name=None, orgs=None, locs=None): + def create_org_admin_role(self, target_sat, name=None, orgs=None, locs=None): """Helper function to create org admin role for particular organizations and locations by cloning 'Organization admin' role. @@ -80,17 +79,19 @@ def create_org_admin_role(self, name=None, orgs=None, locs=None): data returned from 'clone' function """ name = gen_string('alpha') if not name else name - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': {'name': name, 'organization_ids': orgs or [], 'location_ids': locs or []} } ) if 'role' in org_admin: - return entities.Role(id=org_admin['role']['id']).read() - return entities.Role(id=org_admin['id']).read() + return target_sat.api.Role(id=org_admin['role']['id']).read() + return target_sat.api.Role(id=org_admin['id']).read() - def create_org_admin_user(self, role_taxos, user_taxos): + def create_org_admin_user(self, role_taxos, user_taxos, target_sat): """Helper function to create an Org Admin user by assigning org admin role and assign taxonomies to Role and User @@ -105,12 +106,12 @@ def create_org_admin_user(self, role_taxos, user_taxos): """ # Create Org Admin Role org_admin = self.create_org_admin_role( - orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] + target_sat, orgs=[role_taxos['org'].id], locs=[role_taxos['loc'].id] ) # Create Org Admin User user_login = gen_string('alpha') user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_passwd, organization=[user_taxos['org'].id], @@ -120,7 +121,7 @@ def create_org_admin_user(self, role_taxos, user_taxos): user.passwd = user_passwd return user - def create_simple_user(self, filter_taxos, role=None): + def create_simple_user(self, target_sat, filter_taxos, role=None): """Creates simple user and assigns taxonomies :param dict filter_taxos: Filter taxonomiest specified as dictionary containing @@ -130,7 +131,7 @@ def create_simple_user(self, filter_taxos, role=None): passwd attr """ user_passwd = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=gen_string('alpha'), password=user_passwd, organization=[filter_taxos['org'].id], @@ -140,14 +141,16 @@ def create_simple_user(self, filter_taxos, role=None): user.passwd = user_passwd return user - def create_domain(self, orgs, locs): + def create_domain(self, target_sat, orgs, locs): """Creates domain in given orgs and locs :param list orgs: List of Organization ids :param list locs: List of Location ids :return Domain: Returns the ```nailgun.entities.Domain``` object """ - return entities.Domain(name=gen_string('alpha'), organization=orgs, location=locs).create() + return target_sat.api.Domain( + name=gen_string('alpha'), organization=orgs, location=locs + ).create() def user_config(self, user, satellite): """Returns The ```nailgun.confin.ServerConfig``` for given user @@ -160,19 +163,19 @@ def user_config(self, user, satellite): ) @pytest.fixture - def role_taxonomies(self): + def role_taxonomies(self, target_sat): """Create role taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture - def filter_taxonomies(self): + def filter_taxonomies(self, target_sat): """Create filter taxonomies""" return { - 'org': entities.Organization().create(), - 'loc': entities.Location().create(), + 'org': target_sat.api.Organization().create(), + 'loc': target_sat.api.Location().create(), } @pytest.fixture @@ -184,7 +187,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): sat_url=target_sat.url, ldap_user_name=ad_data['ldap_user_name'], ldap_user_passwd=ad_data['ldap_user_passwd'], - authsource=entities.AuthSourceLDAP( + authsource=target_sat.api.AuthSourceLDAP( onthefly_register=True, account=fr"{ad_data['workgroup']}\{ad_data['ldap_user_name']}", account_password=ad_data['ldap_user_passwd'], @@ -210,7 +213,7 @@ def create_ldap(self, ad_data, target_sat, module_location, module_org): LDAPAuthSource.delete({'name': authsource_name}) @pytest.mark.tier1 - def test_positive_create_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_role_with_taxonomies(self, role_taxonomies, target_sat): """create role with taxonomies :id: fa449217-889c-429b-89b5-0b6c018ffd9e @@ -222,7 +225,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], @@ -232,7 +235,7 @@ def test_positive_create_role_with_taxonomies(self, role_taxonomies): assert role_taxonomies['loc'].id == role.location[0].id @pytest.mark.tier1 - def test_positive_create_role_without_taxonomies(self): + def test_positive_create_role_without_taxonomies(self, target_sat): """Create role without taxonomies :id: fe65a691-1b04-4bfe-a24b-adb48feb31d1 @@ -244,13 +247,13 @@ def test_positive_create_role_without_taxonomies(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name, organization=[], location=[]).create() + role = target_sat.api.Role(name=role_name, organization=[], location=[]).create() assert role.name == role_name assert not role.organization assert not role.location @pytest.mark.tier1 - def test_positive_create_filter_without_override(self, role_taxonomies): + def test_positive_create_filter_without_override(self, role_taxonomies, target_sat): """Create filter in role w/o overriding it :id: 1aadb7ea-ff76-4171-850f-188ba6f87021 @@ -269,27 +272,27 @@ def test_positive_create_filter_without_override(self, role_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id assert role_taxonomies['org'].id == filtr.organization[0].id assert role_taxonomies['loc'].id == filtr.location[0].id assert not filtr.override @pytest.mark.tier1 - def test_positive_create_non_overridable_filter(self): + def test_positive_create_non_overridable_filter(self, target_sat): """Create non overridable filter in role :id: f891e2e1-76f8-4edf-8c96-b41d05483298 :steps: Create a filter to which taxonomies cannot be associated. - e.g Architecture filter + e.g. Architecture filter :expectedresults: @@ -299,21 +302,23 @@ def test_positive_create_non_overridable_filter(self): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) - filtr = entities.Filter(permission=arch_perm, role=role.id).create() + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) + filtr = target_sat.api.Filter(permission=arch_perm, role=role.id).create() assert role.id == filtr.role.id assert not filtr.override @pytest.mark.tier1 - def test_negative_override_non_overridable_filter(self, filter_taxonomies): + def test_negative_override_non_overridable_filter(self, filter_taxonomies, target_sat): """Override non overridable filter :id: 7793be96-e8eb-451b-a986-51a46a1ab4f9 :steps: Attempt to override a filter to which taxonomies cannot be - associated. e.g Architecture filter + associated. e.g. Architecture filter :expectedresults: Filter is not overrided as taxonomies cannot be applied to that filter @@ -321,11 +326,13 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() + role = target_sat.api.Role(name=role_name).create() assert role.name == role_name - arch_perm = entities.Permission().search(query={'search': 'resource_type="Architecture"'}) + arch_perm = target_sat.api.Permission().search( + query={'search': 'resource_type="Architecture"'} + ) with pytest.raises(HTTPError): - entities.Filter( + target_sat.api.Filter( permission=arch_perm, role=[role.id], override=True, @@ -335,7 +342,9 @@ def test_negative_override_non_overridable_filter(self, filter_taxonomies): @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxonomies): + def test_positive_create_overridable_filter( + self, role_taxonomies, filter_taxonomies, target_sat + ): """Create overridable filter in role :id: c7ea9377-9b9e-495e-accd-3576166d504e @@ -355,14 +364,14 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -377,7 +386,7 @@ def test_positive_create_overridable_filter(self, role_taxonomies, filter_taxono assert role_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which applies to its non-overrided filters :id: 902dcb32-2126-4ff4-b733-3e86749ccd1e @@ -390,29 +399,29 @@ def test_positive_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter(permission=dom_perm, role=role.id).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter(permission=dom_perm, role=role.id).create() assert role.id == filtr.role.id role.organization = [filter_taxonomies['org']] role.location = [filter_taxonomies['loc']] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert filter_taxonomies['org'].id == role.organization[0].id assert filter_taxonomies['loc'].id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert filter_taxonomies['org'].id == filtr.organization[0].id assert filter_taxonomies['loc'].id == filtr.location[0].id @pytest.mark.tier1 - def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies): + def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomies, target_sat): """Update role taxonomies which doesnt applies to its overrided filters :id: 9f3bf95a-f71a-4063-b51c-12610bc655f2 @@ -426,14 +435,14 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -442,23 +451,23 @@ def test_negative_update_role_taxonomies(self, role_taxonomies, filter_taxonomie ).create() assert role.id == filtr.role.id # Creating new Taxonomies - org_new = entities.Organization().create() - loc_new = entities.Location().create() + org_new = target_sat.api.Organization().create() + loc_new = target_sat.api.Location().create() # Updating Taxonomies role.organization = [org_new] role.location = [loc_new] role = role.update(['organization', 'location']) # Updated Role - role = entities.Role(id=role.id).read() + role = target_sat.api.Role(id=role.id).read() assert org_new.id == role.organization[0].id assert loc_new.id == role.location[0].id # Updated Filter - filtr = entities.Filter(id=filtr.id).read() + filtr = target_sat.api.Filter(id=filtr.id).read() assert org_new.id != filtr.organization[0].id assert loc_new.id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies): + def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomies, target_sat): """Unsetting override flag resets filter taxonomies :id: eaa7b921-7c12-45c5-989b-d82aa2b6e3a6 @@ -477,14 +486,14 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi :CaseImportance: Critical """ role_name = gen_string('alpha') - role = entities.Role( + role = target_sat.api.Role( name=role_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert role.name == role_name - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - filtr = entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + filtr = target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, @@ -500,7 +509,7 @@ def test_positive_disable_filter_override(self, role_taxonomies, filter_taxonomi assert filter_taxonomies['loc'].id != filtr.location[0].id @pytest.mark.tier1 - def test_positive_create_org_admin_from_clone(self): + def test_positive_create_org_admin_from_clone(self, target_sat): """Create Org Admin role which has access to most of the resources within organization @@ -515,14 +524,16 @@ def test_positive_create_org_admin_from_clone(self): :BZ: 1637436 """ - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) org_admin = self.create_org_admin_role() - default_filters = entities.Role(id=default_org_admin[0].id).read().filters - orgadmin_filters = entities.Role(id=org_admin.id).read().filters + default_filters = target_sat.api.Role(id=default_org_admin[0].id).read().filters + orgadmin_filters = target_sat.api.Role(id=org_admin.id).read().filters assert len(default_filters) == len(orgadmin_filters) @pytest.mark.tier1 - def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): + def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies, target_sat): """Taxonomies can be assigned to cloned role :id: 31079015-5ede-439a-a062-e20d1ffd66df @@ -542,7 +553,7 @@ def test_positive_create_cloned_role_with_taxonomies(self, role_taxonomies): org_admin = self.create_org_admin_role( orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - org_admin = entities.Role(id=org_admin.id).read() + org_admin = target_sat.api.Role(id=org_admin.id).read() assert role_taxonomies['org'].id == org_admin.organization[0].id assert role_taxonomies['loc'].id == org_admin.location[0].id @@ -575,7 +586,7 @@ def test_negative_access_entities_from_org_admin( sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_user( @@ -606,10 +617,10 @@ def test_negative_access_entities_from_user( sc = self.user_config(user, target_sat) # Getting the domain from user with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier2 - def test_positive_override_cloned_role_filter(self, role_taxonomies): + def test_positive_override_cloned_role_filter(self, role_taxonomies, target_sat): """Cloned role filter overrides :id: 8a32ed5f-b93f-4f31-aff4-16602fbe7fab @@ -625,27 +636,27 @@ def test_positive_override_cloned_role_filter(self, role_taxonomies): :CaseLevel: Integration """ role_name = gen_string('alpha') - role = entities.Role(name=role_name).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() + role = target_sat.api.Role(name=role_name).create() + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() cloned_role_name = gen_string('alpha') - cloned_role = entities.Role(id=role.id).clone(data={'name': cloned_role_name}) + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': cloned_role_name}) assert cloned_role_name == cloned_role['name'] - filter_cloned_id = entities.Role(id=cloned_role['id']).read().filters[0].id - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned_id = target_sat.api.Role(id=cloned_role['id']).read().filters[0].id + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() filter_cloned.override = True filter_cloned.organization = [role_taxonomies['org']] filter_cloned.location = [role_taxonomies['loc']] filter_cloned.update(['override', 'organization', 'location']) # Updated Filter - filter_cloned = entities.Filter(id=filter_cloned_id).read() + filter_cloned = target_sat.api.Filter(id=filter_cloned_id).read() assert filter_cloned.override assert role_taxonomies['org'].id == filter_cloned.organization[0].id assert role_taxonomies['loc'].id == filter_cloned.location[0].id @pytest.mark.tier2 def test_positive_emptiness_of_filter_taxonomies_on_role_clone( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """Taxonomies of filters in cloned role are set to None for filters that are overridden in parent role @@ -667,29 +678,29 @@ def test_positive_emptiness_of_filter_taxonomies_on_role_clone( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - filter_cloned = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + filter_cloned = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not filter_cloned.organization assert not filter_cloned.location assert filter_cloned.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_with_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Unlimited and Override flag sets on filter for filter that is overridden in parent role @@ -710,34 +721,34 @@ def test_positive_clone_role_having_overridden_filter_with_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone( + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( - self, role_taxonomies + self, role_taxonomies, target_sat ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is not overridden in parent @@ -758,27 +769,29 @@ def test_positive_clone_role_having_non_overridden_filter_with_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_taxonomies): + def test_positive_clone_role_having_unlimited_filter_with_taxonomies( + self, role_taxonomies, target_sat + ): """When taxonomies assigned to cloned role, Neither unlimited nor override sets on filter for filter that is unlimited in parent role @@ -797,28 +810,28 @@ def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self, role_ :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={ 'name': gen_string('alpha'), 'organization_ids': [role_taxonomies['org'].id], 'location_ids': [role_taxonomies['loc'].id], } ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert not cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 def test_positive_clone_role_having_overridden_filter_without_taxonomies( - self, role_taxonomies, filter_taxonomies + self, role_taxonomies, filter_taxonomies, target_sat ): # noqa """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is overridden in parent role @@ -838,27 +851,29 @@ def test_positive_clone_role_having_overridden_filter_without_taxonomies( :CaseLevel: Integration """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter( permission=dom_perm, role=role.id, override=True, organization=[filter_taxonomies['org']], location=[filter_taxonomies['loc']], ).create() - cloned_role = entities.Role(id=role.id).clone(data={'name': gen_string('alpha')}) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role = target_sat.api.Role(id=role.id).clone(data={'name': gen_string('alpha')}) + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_non_overided_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, only unlimited but not override flag sets on filter for filter that is overridden in parent role @@ -881,23 +896,25 @@ def test_positive_clone_role_without_taxonomies_non_overided_filter(self, role_t :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @pytest.mark.tier2 - def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxonomies): + def test_positive_clone_role_without_taxonomies_unlimited_filter( + self, role_taxonomies, target_sat + ): """When taxonomies not assigned to cloned role, Unlimited and override flags sets on filter for filter that is unlimited in parent role @@ -919,18 +936,18 @@ def test_positive_clone_role_without_taxonomies_unlimited_filter(self, role_taxo :BZ: 1488908 """ - role = entities.Role( + role = target_sat.api.Role( name=gen_string('alpha'), organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() - dom_perm = entities.Permission().search(query={'search': 'resource_type="Domain"'}) - entities.Filter(permission=dom_perm, role=role.id, unlimited=True).create() - cloned_role = entities.Role(id=role.id).clone( + dom_perm = target_sat.api.Permission().search(query={'search': 'resource_type="Domain"'}) + target_sat.api.Filter(permission=dom_perm, role=role.id, unlimited=True).create() + cloned_role = target_sat.api.Role(id=role.id).clone( data={'role': {'name': gen_string('alpha'), 'location_ids': [], 'organization_ids': []}} ) - cloned_role_filter = entities.Role(id=cloned_role['id']).read().filters[0] - cloned_filter = entities.Filter(id=cloned_role_filter.id).read() + cloned_role_filter = target_sat.api.Role(id=cloned_role['id']).read().filters[0] + cloned_filter = target_sat.api.Filter(id=cloned_role_filter.id).read() assert cloned_filter.unlimited assert not cloned_filter.override @@ -948,7 +965,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta 2. Assign an organization A and Location A to the Org Admin role 3. Create two users without assigning roles while creating them 4. Assign Organization A and Location A to both users - 5. Create an user group with above two users + 5. Create a user group with above two users 6. Assign Org Admin role to User Group :expectedresults: Both the user should have access to the resources of @@ -961,7 +978,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ) userone_login = gen_string('alpha') userone_pass = gen_string('alphanumeric') - user_one = entities.User( + user_one = target_sat.api.User( login=userone_login, password=userone_pass, organization=[role_taxonomies['org'].id], @@ -970,7 +987,7 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta assert userone_login == user_one.login usertwo_login = gen_string('alpha') usertwo_pass = gen_string('alphanumeric') - user_two = entities.User( + user_two = target_sat.api.User( login=usertwo_login, password=usertwo_pass, organization=[role_taxonomies['org'].id], @@ -978,17 +995,17 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ).create() assert usertwo_login == user_two.login ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name # Creating Subnets and Domains to verify if user really can access them - subnet = entities.Subnet( + subnet = target_sat.api.Subnet( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], ).create() - domain = entities.Domain( + domain = target_sat.api.Domain( name=gen_string('alpha'), organization=[role_taxonomies['org'].id], location=[role_taxonomies['loc'].id], @@ -998,13 +1015,13 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta auth=(login, password), url=target_sat.url, verify=settings.server.verify_ca ) try: - entities.Domain(sc).search( + target_sat.api.Domain(sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, } ) - entities.Subnet(sc).search( + target_sat.api.Subnet(sc).search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1012,8 +1029,8 @@ def test_positive_user_group_users_access_as_org_admin(self, role_taxonomies, ta ) except HTTPError as err: pytest.fail(str(err)) - assert domain.id in [dom.id for dom in entities.Domain(sc).search()] - assert subnet.id in [sub.id for sub in entities.Subnet(sc).search()] + assert domain.id in [dom.id for dom in target_sat.api.Domain(sc).search()] + assert subnet.id in [sub.id for sub in target_sat.api.Subnet(sc).search()] @pytest.mark.tier3 def test_positive_user_group_users_access_contradict_as_org_admins(self): @@ -1067,10 +1084,10 @@ def test_negative_assign_org_admin_to_user_group( org_admin = self.create_org_admin_role( orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) - user_one = self.create_simple_user(filter_taxos=filter_taxonomies) - user_two = self.create_simple_user(filter_taxos=filter_taxonomies) + user_one = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) + user_two = self.create_simple_user(target_sat, filter_taxos=filter_taxonomies) ug_name = gen_string('alpha') - user_group = entities.UserGroup( + user_group = target_sat.api.UserGroup( name=ug_name, role=[org_admin.id], user=[user_one.id, user_two.id] ).create() assert user_group.name == ug_name @@ -1078,7 +1095,7 @@ def test_negative_assign_org_admin_to_user_group( for user in [user_one, user_two]: sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.Domain(sc, id=dom.id).read() + target_sat.api.Domain(sc, id=dom.id).read() @pytest.mark.tier2 def test_negative_assign_taxonomies_by_org_admin( @@ -1111,13 +1128,13 @@ def test_negative_assign_taxonomies_by_org_admin( ) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']] ).create() assert dom_name == dom.name user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1129,13 +1146,13 @@ def test_negative_assign_taxonomies_by_org_admin( auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) # Getting the domain from user1 - dom = entities.Domain(sc, id=dom.id).read() + dom = target_sat.api.Domain(sc, id=dom.id).read() dom.organization = [filter_taxonomies['org']] with pytest.raises(HTTPError): dom.update(['organization']) @pytest.mark.tier1 - def test_positive_remove_org_admin_role(self, role_taxonomies): + def test_positive_remove_org_admin_role(self, role_taxonomies, target_sat): """Super Admin user can remove Org Admin role :id: 03fac76c-22ac-43cf-9068-b96e255b3c3c @@ -1156,21 +1173,23 @@ def test_positive_remove_org_admin_role(self, role_taxonomies): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User(login=user_login, password=user_pass, role=[org_admin.id]).create() + user = target_sat.api.User( + login=user_login, password=user_pass, role=[org_admin.id] + ).create() assert user_login == user.login try: - entities.Role(id=org_admin.id).delete() + target_sat.api.Role(id=org_admin.id).delete() except HTTPError as err: pytest.fail(str(err)) # Getting updated user - user = entities.User(id=user.id).read() + user = target_sat.api.User(id=user.id).read() assert org_admin.id not in [role.id for role in user.role] @pytest.mark.tier2 def test_positive_taxonomies_control_to_superadmin_with_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin :id: 37db0b40-ed35-4e70-83e8-83cff27caae2 @@ -1179,9 +1198,9 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( 1. Create Org Admin role and assign organization A and Location A 2. Create User and assign above Org Admin role 3. Login with SuperAdmin who created the above Org Admin role and - access entities in Organization A and Location A + access target_sat.api in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin :CaseLevel: Integration @@ -1190,7 +1209,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( sc, name=dom_name, organization=[role_taxonomies['org']], @@ -1198,7 +1217,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( ).create() assert dom_name == dom.name try: - entities.Subnet().search( + target_sat.api.Subnet().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1211,7 +1230,7 @@ def test_positive_taxonomies_control_to_superadmin_with_org_admin( def test_positive_taxonomies_control_to_superadmin_without_org_admin( self, role_taxonomies, target_sat ): - """Super Admin can access entities in taxonomies assigned to Org Admin + """Super Admin can access target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin role/user :id: 446f66a5-16e0-4298-b326-262913502955 @@ -1224,7 +1243,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( 4. Login with SuperAdmin who created the above Org Admin role and access entities in Organization A and Location A - :expectedresults: Super admin should be able to access the entities in + :expectedresults: Super admin should be able to access the target_sat.api in taxonomies assigned to Org Admin after deleting Org Admin :CaseLevel: Integration @@ -1233,21 +1252,21 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( sc = self.user_config(user, target_sat) # Creating resource dom_name = gen_string('alpha') - dom = entities.Domain( + dom = target_sat.api.Domain( sc, name=dom_name, organization=[role_taxonomies['org']], location=[role_taxonomies['loc']], ).create() assert dom_name == dom.name - user_role = entities.Role(id=user.role[0].id).read() - entities.Role(id=user_role.id).delete() - entities.User(id=user.id).delete() + user_role = target_sat.api.Role(id=user.role[0].id).read() + target_sat.api.Role(id=user_role.id).delete() + target_sat.api.User(id=user.id).delete() with pytest.raises(HTTPError): user_role.read() user.read() try: - entities.Domain().search( + target_sat.api.Domain().search( query={ 'organization-id': role_taxonomies['org'].id, 'location-id': role_taxonomies['loc'].id, @@ -1259,7 +1278,7 @@ def test_positive_taxonomies_control_to_superadmin_without_org_admin( @pytest.mark.tier1 @pytest.mark.upgrade def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): - """Org Admin doesnt have permissions to create new roles + """Org Admin doesn't have permissions to create new roles :id: 806ecc16-0dc7-405b-90d3-0584eced27a3 @@ -1278,7 +1297,7 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1291,7 +1310,7 @@ def test_negative_create_roles_by_org_admin(self, role_taxonomies, target_sat): ) role_name = gen_string('alpha') with pytest.raises(HTTPError): - entities.Role( + target_sat.api.Role( sc, name=role_name, organization=[role_taxonomies['org']], @@ -1315,9 +1334,11 @@ def test_negative_modify_roles_by_org_admin(self, role_taxonomies, target_sat): existing roles """ user = self.create_org_admin_user(role_taxos=role_taxonomies, user_taxos=role_taxonomies) - test_role = entities.Role().create() + test_role = target_sat.api.Role().create() sc = self.user_config(user, target_sat) - test_role = entities.Role(sc, id=test_role.id).read() + test_role = target_sat.api.Role(sc, id=test_role.id).read() + test_role.organization = [role_taxonomies['org']] + test_role.location = [role_taxonomies['loc']] with pytest.raises(HTTPError): test_role.organization = [role_taxonomies['org']] test_role.location = [role_taxonomies['loc']] @@ -1345,7 +1366,7 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1357,7 +1378,7 @@ def test_negative_admin_permissions_to_org_admin(self, role_taxonomies, target_s auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) with pytest.raises(HTTPError): - entities.User(sc, id=1).read() + target_sat.api.User(sc, id=1).read() @pytest.mark.tier2 @pytest.mark.upgrade @@ -1392,7 +1413,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1405,7 +1426,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): ) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( sc_user, login=user_login, password=user_pass, @@ -1417,7 +1438,7 @@ def test_positive_create_user_by_org_admin(self, role_taxonomies, target_sat): assert org_admin.id == user.role[0].id if not is_open('BZ:1825698'): name = gen_string('alphanumeric') - location = entities.Location(sc_user, name=name).create() + location = target_sat.api.Location(sc_user, name=name).create() assert location.name == name @pytest.mark.tier2 @@ -1445,7 +1466,7 @@ def test_positive_access_users_inside_org_admin_taxonomies(self, role_taxonomies test_user = self.create_simple_user(filter_taxos=role_taxonomies) sc = self.user_config(user, target_sat) try: - entities.User(sc, id=test_user.id).read() + target_sat.api.User(sc, id=test_user.id).read() except HTTPError as err: pytest.fail(str(err)) @@ -1472,7 +1493,7 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): """ user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, organization=[role_taxonomies['org']], @@ -1487,7 +1508,7 @@ def test_positive_create_nested_location(self, role_taxonomies, target_sat): auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) name = gen_string('alphanumeric') - location = entities.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() + location = target_sat.api.Location(sc, name=name, parent=role_taxonomies['loc'].id).create() assert location.name == name @pytest.mark.tier2 @@ -1517,7 +1538,7 @@ def test_negative_access_users_outside_org_admin_taxonomies( test_user = self.create_simple_user(filter_taxos=filter_taxonomies) sc = self.user_config(user, target_sat) with pytest.raises(HTTPError): - entities.User(sc, id=test_user.id).read() + target_sat.api.User(sc, id=test_user.id).read() @pytest.mark.tier1 def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_sat): @@ -1541,7 +1562,7 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1553,11 +1574,11 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s auth=(user_login, user_pass), url=target_sat.url, verify=settings.server.verify_ca ) with pytest.raises(HTTPError): - entities.Organization(sc, name=gen_string('alpha')).create() + target_sat.api.Organization(sc, name=gen_string('alpha')).create() if not is_open("BZ:1825698"): try: loc_name = gen_string('alpha') - loc = entities.Location(sc, name=loc_name).create() + loc = target_sat.api.Location(sc, name=loc_name).create() except HTTPError as err: pytest.fail(str(err)) assert loc_name == loc.name @@ -1567,7 +1588,7 @@ def test_negative_create_taxonomies_by_org_admin(self, role_taxonomies, target_s def test_positive_access_all_global_entities_by_org_admin( self, role_taxonomies, filter_taxonomies, target_sat ): - """Org Admin can access all global entities in any taxonomies + """Org Admin can access all global target_sat.api in any taxonomies regardless of its own assigned taxonomies :id: add5feb3-7a3f-45a1-a633-49f1141b029b @@ -1578,16 +1599,16 @@ def test_positive_access_all_global_entities_by_org_admin( 2. Create new user and assign Org A,B and Location A,B 3. Assign Org Admin role to User 4. Login with Org Admin user - 5. Attempt to create all the global entities in org B and Loc B - e.g Architectures, Operating System + 5. Attempt to create all the global target_sat.api in org B and Loc B + e.g. Architectures, Operating System :expectedresults: Org Admin should have access to all the global - entities in any taxonomies + target_sat.api in any taxonomies """ org_admin = self.create_org_admin_role(orgs=[role_taxonomies['org'].id]) user_login = gen_string('alpha') user_pass = gen_string('alphanumeric') - user = entities.User( + user = target_sat.api.User( login=user_login, password=user_pass, role=[org_admin.id], @@ -1600,22 +1621,24 @@ def test_positive_access_all_global_entities_by_org_admin( ) try: for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: entity(sc).search() except HTTPError as err: pytest.fail(str(err)) @pytest.mark.tier3 - def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, create_ldap): + def test_negative_access_entities_from_ldap_org_admin( + self, role_taxonomies, create_ldap, target_sat + ): """LDAP User can not access resources in taxonomies assigned to role if its own taxonomies are not same as its role @@ -1649,17 +1672,19 @@ def test_negative_access_entities_from_ldap_org_admin(self, role_taxonomies, cre verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the domain resource created in org admin role with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 def test_negative_access_entities_from_ldap_user( - self, role_taxonomies, create_ldap, module_location, module_org + self, role_taxonomies, create_ldap, module_location, module_org, target_sat ): """LDAP User can not access resources within its own taxonomies if assigned role does not have permissions for same taxonomies @@ -1692,16 +1717,20 @@ def test_negative_access_entities_from_ldap_user( verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': f"login={create_ldap['ldap_user_name']}"})[0] - user.role = [entities.Role(id=org_admin.id).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': f"login={create_ldap['ldap_user_name']}"} + )[0] + user.role = [target_sat.api.Role(id=org_admin.id).read()] user.update(['role']) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 - def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, create_ldap): + def test_positive_assign_org_admin_to_ldap_user_group( + self, role_taxonomies, create_ldap, target_sat + ): """Users in LDAP usergroup can access to the resources in taxonomies if the taxonomies of Org Admin role are same @@ -1735,7 +1764,7 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre locs=[create_ldap['authsource'].location[0].id], ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1743,9 +1772,11 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to the usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1756,10 +1787,12 @@ def test_positive_assign_org_admin_to_ldap_user_group(self, role_taxonomies, cre verify=settings.server.verify_ca, ) # Accessing the Domain resource - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() @pytest.mark.tier3 - def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_taxonomies): + def test_negative_assign_org_admin_to_ldap_user_group( + self, create_ldap, role_taxonomies, target_sat + ): """Users in LDAP usergroup can not have access to the resources in taxonomies if the taxonomies of Org Admin role is not same @@ -1791,7 +1824,7 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta orgs=[role_taxonomies['org'].id], locs=[role_taxonomies['loc'].id] ) users = [ - entities.User( + target_sat.api.User( login=gen_string("alpha"), password=password, organization=create_ldap['authsource'].organization, @@ -1799,9 +1832,11 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta ).create() for _ in range(2) ] - user_group = entities.UserGroup(name=group_name, user=users, role=[org_admin]).create() + user_group = target_sat.api.UserGroup( + name=group_name, user=users, role=[org_admin] + ).create() # Adding LDAP authsource to usergroup - entities.ExternalUserGroup( + target_sat.api.ExternalUserGroup( name='foobargroup', usergroup=user_group, auth_source=create_ldap['authsource'] ).create() @@ -1813,7 +1848,7 @@ def test_negative_assign_org_admin_to_ldap_user_group(self, create_ldap, role_ta ) # Trying to access the Domain resource with pytest.raises(HTTPError): - entities.Domain(sc, id=domain.id).read() + target_sat.api.Domain(sc, id=domain.id).read() class TestRoleSearchFilter: diff --git a/tests/foreman/api/test_settings.py b/tests/foreman/api/test_settings.py index b5d0542d35d..b4fe49c23c1 100644 --- a/tests/foreman/api/test_settings.py +++ b/tests/foreman/api/test_settings.py @@ -18,7 +18,6 @@ """ import random -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -188,7 +187,7 @@ def test_negative_discover_host_with_invalid_prefix(): @pytest.mark.tier2 @pytest.mark.parametrize('download_policy', ["immediate", "on_demand"]) @pytest.mark.parametrize('setting_update', ['default_download_policy'], indirect=True) -def test_positive_custom_repo_download_policy(setting_update, download_policy): +def test_positive_custom_repo_download_policy(setting_update, download_policy, target_sat): """Check the set custom repository download policy for newly created custom repository. :id: d5150cce-ba85-4ea0-a8d1-6a54d0d29571 @@ -209,11 +208,11 @@ def test_positive_custom_repo_download_policy(setting_update, download_policy): :CaseLevel: Acceptance """ - org = entities.Organization().create() - prod = entities.Product(organization=org).create() + org = target_sat.api.Organization().create() + prod = target_sat.api.Product(organization=org).create() setting_update.value = download_policy setting_update.update({'value'}) - repo = entities.Repository(product=prod, content_type='yum', organization=org).create() + repo = target_sat.api.Repository(product=prod, content_type='yum', organization=org).create() assert repo.download_policy == download_policy repo.delete() prod.delete() diff --git a/tests/foreman/api/test_subnet.py b/tests/foreman/api/test_subnet.py index c5188d12808..75d7bff0b9e 100644 --- a/tests/foreman/api/test_subnet.py +++ b/tests/foreman/api/test_subnet.py @@ -23,7 +23,6 @@ """ import re -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -36,7 +35,7 @@ @pytest.mark.tier1 -def test_positive_create_with_parameter(): +def test_positive_create_with_parameter(target_sat): """Subnet can be created along with parameters :id: ec581cb5-8c48-4b9c-b536-302c0b7ec30f @@ -47,14 +46,14 @@ def test_positive_create_with_parameter(): :expectedresults: The Subnet is created with parameter """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() assert subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(generate_strings_list())) -def test_positive_add_parameter(name): +def test_positive_add_parameter(name, target_sat): """Parameters can be created in subnet :id: c1dae6f4-45b1-45db-8529-d7918e41a99b @@ -70,15 +69,15 @@ def test_positive_add_parameter(name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(subnet=subnet.id, name=name, value=value).create() + subnet_param = target_sat.api.Parameter(subnet=subnet.id, name=name, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 -def test_positive_add_parameter_with_values_and_separator(): +def test_positive_add_parameter_with_values_and_separator(target_sat): """Subnet parameters can be created with values separated by comma :id: b3de6f96-7c39-4c44-b91c-a6d141f5dd6a @@ -94,10 +93,10 @@ def test_positive_add_parameter_with_values_and_separator(): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() name = gen_string('alpha') values = ', '.join(generate_strings_list()) - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=values).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=values).create() assert subnet_param.name == name assert subnet_param.value == values @@ -106,7 +105,7 @@ def test_positive_add_parameter_with_values_and_separator(): @pytest.mark.parametrize( 'separator', **parametrized({'comma': ',', 'slash': '/', 'dash': '-', 'pipe': '|'}) ) -def test_positive_create_with_parameter_and_valid_separator(separator): +def test_positive_create_with_parameter_and_valid_separator(separator, target_sat): """Subnet parameters can be created with name with valid separators :id: d1e2d75a-a1e8-4767-93f1-0bb1b75e10a0 @@ -124,16 +123,16 @@ def test_positive_create_with_parameter_and_valid_separator(separator): :CaseImportance: Low """ name = f'{separator}'.join(generate_strings_list()) - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() value = gen_string('utf8') - subnet_param = entities.Parameter(name=name, subnet=subnet.id, value=value).create() + subnet_param = target_sat.api.Parameter(name=name, subnet=subnet.id, value=value).create() assert subnet_param.name == name assert subnet_param.value == value @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_create_with_parameter_and_invalid_separator(name): +def test_negative_create_with_parameter_and_invalid_separator(name, target_sat): """Subnet parameters can not be created with name with invalid separators @@ -155,13 +154,13 @@ def test_negative_create_with_parameter_and_invalid_separator(name): :CaseImportance: Low """ - subnet = entities.Subnet().create() + subnet = target_sat.api.Subnet().create() with pytest.raises(HTTPError): - entities.Parameter(name=name, subnet=subnet.id).create() + target_sat.api.Parameter(name=name, subnet=subnet.id).create() @pytest.mark.tier1 -def test_negative_create_with_duplicated_parameters(): +def test_negative_create_with_duplicated_parameters(target_sat): """Attempt to create multiple parameters with same key name for the same subnet @@ -180,10 +179,10 @@ def test_negative_create_with_duplicated_parameters(): :CaseImportance: Low """ - subnet = entities.Subnet().create() - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() with pytest.raises(HTTPError) as context: - entities.Parameter(name='duplicateParameter', subnet=subnet.id).create() + target_sat.api.Parameter(name='duplicateParameter', subnet=subnet.id).create() assert re.search("Name has already been taken", context.value.response.text) @@ -244,7 +243,7 @@ def test_positive_subnet_parameters_override_from_host(): @pytest.mark.tier3 -def test_positive_subnet_parameters_override_impact_on_subnet(): +def test_positive_subnet_parameters_override_impact_on_subnet(target_sat): """Override subnet parameter from host impact on subnet parameter :id: 6fe963ed-93a3-496e-bfd9-599bf91a61f3 @@ -266,15 +265,15 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): # Create subnet with valid parameters parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], subnet_parameters_attributes=parameter ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter[0]['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter[0]['value'] # Create host with above subnet - host = entities.Host(location=loc, organization=org, subnet=org_subnet).create() + host = target_sat.api.Host(location=loc, organization=org, subnet=org_subnet).create() assert host.subnet.read().name == org_subnet.name parameter_new_value = [ { @@ -293,7 +292,7 @@ def test_positive_subnet_parameters_override_impact_on_subnet(): @pytest.mark.tier1 -def test_positive_update_parameter(): +def test_positive_update_parameter(target_sat): """Subnet parameter can be updated :id: 8c389c3f-60ef-4856-b8fc-c5b066c67a2f @@ -309,7 +308,7 @@ def test_positive_update_parameter(): :CaseImportance: Medium """ parameter = [{'name': gen_string('alpha'), 'value': gen_string('alpha')}] - subnet = entities.Subnet(subnet_parameters_attributes=parameter).create() + subnet = target_sat.api.Subnet(subnet_parameters_attributes=parameter).create() update_parameter = [{'name': gen_string('utf8'), 'value': gen_string('utf8')}] subnet.subnet_parameters_attributes = update_parameter up_subnet = subnet.update(['subnet_parameters_attributes']) @@ -319,7 +318,7 @@ def test_positive_update_parameter(): @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(invalid_values_list() + ['name with space'])) -def test_negative_update_parameter(new_name): +def test_negative_update_parameter(new_name, target_sat): """Subnet parameter can not be updated with invalid names :id: fcdbad13-ad96-4152-8e20-e023d61a2853 @@ -339,8 +338,8 @@ def test_negative_update_parameter(new_name): :CaseImportance: Medium """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter( + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter( name=gen_string('utf8'), subnet=subnet.id, value=gen_string('utf8') ).create() with pytest.raises(HTTPError): @@ -377,7 +376,7 @@ def test_positive_update_subnet_parameter_host_impact(): @pytest.mark.tier1 @pytest.mark.upgrade -def test_positive_delete_subnet_parameter(): +def test_positive_delete_subnet_parameter(target_sat): """Subnet parameter can be deleted :id: 972b66ec-d506-4fcb-9786-c62f2f79ac1a @@ -389,8 +388,8 @@ def test_positive_delete_subnet_parameter(): :expectedresults: The parameter should be deleted from subnet """ - subnet = entities.Subnet().create() - sub_param = entities.Parameter(subnet=subnet.id).create() + subnet = target_sat.api.Subnet().create() + sub_param = target_sat.api.Parameter(subnet=subnet.id).create() sub_param.delete() with pytest.raises(HTTPError): sub_param.read() @@ -452,7 +451,7 @@ def test_positive_delete_subnet_overridden_parameter_host_impact(): @pytest.mark.tier1 -def test_positive_list_parameters(): +def test_positive_list_parameters(target_sat): """Satellite lists all the subnet parameters :id: ce86d531-bf6b-45a9-81e3-67e1b3398f76 @@ -467,9 +466,9 @@ def test_positive_list_parameters(): parameters """ parameter = {'name': gen_string('alpha'), 'value': gen_string('alpha')} - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - org_subnet = entities.Subnet( + org = target_sat.api.Organization().create() + loc = target_sat.api.Location(organization=[org]).create() + org_subnet = target_sat.api.Subnet( location=[loc], organization=[org], ipam='DHCP', @@ -478,10 +477,10 @@ def test_positive_list_parameters(): ).create() assert org_subnet.subnet_parameters_attributes[0]['name'] == parameter['name'] assert org_subnet.subnet_parameters_attributes[0]['value'] == parameter['value'] - sub_param = entities.Parameter( + sub_param = target_sat.api.Parameter( name=gen_string('alpha'), subnet=org_subnet.id, value=gen_string('alpha') ).create() - org_subnet = entities.Subnet(id=org_subnet.id).read() + org_subnet = target_sat.api.Subnet(id=org_subnet.id).read() params_list = { param['name']: param['value'] for param in org_subnet.subnet_parameters_attributes diff --git a/tests/foreman/api/test_subscription.py b/tests/foreman/api/test_subscription.py index 377d43555dd..b27488cd5f7 100644 --- a/tests/foreman/api/test_subscription.py +++ b/tests/foreman/api/test_subscription.py @@ -21,7 +21,6 @@ :Upstream: No """ from fauxfactory import gen_string -from nailgun import entities from nailgun.config import ServerConfig from nailgun.entity_mixins import TaskFailedError import pytest @@ -44,28 +43,30 @@ def rh_repo(module_sca_manifest_org, module_target_sat): reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo @pytest.fixture(scope='module') -def custom_repo(rh_repo, module_sca_manifest_org): - custom_repo = entities.Repository( - product=entities.Product(organization=module_sca_manifest_org).create(), +def custom_repo(rh_repo, module_sca_manifest_org, module_target_sat): + custom_repo = module_target_sat.api.Repository( + product=module_target_sat.api.Product(organization=module_sca_manifest_org).create(), ).create() custom_repo.sync() return custom_repo @pytest.fixture(scope='module') -def module_ak(module_sca_manifest_org, rh_repo, custom_repo): +def module_ak(module_sca_manifest_org, rh_repo, custom_repo, module_target_sat): """rh_repo and custom_repo are included here to ensure their execution before the AK""" - module_ak = entities.ActivationKey( + module_ak = module_target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=module_target_sat.api.LifecycleEnvironment( + id=module_sca_manifest_org.library.id + ), auto_attach=True, ).create() return module_ak @@ -82,12 +83,12 @@ def test_positive_create(module_entitlement_manifest, module_target_sat): :CaseImportance: Critical """ - org = entities.Organization().create() + org = module_target_sat.api.Organization().create() module_target_sat.upload_manifest(org.id, module_entitlement_manifest.content) @pytest.mark.tier1 -def test_positive_refresh(function_entitlement_manifest_org, request): +def test_positive_refresh(function_entitlement_manifest_org, request, target_sat): """Upload a manifest and refresh it afterwards. :id: cd195db6-e81b-42cb-a28d-ec0eb8a53341 @@ -97,7 +98,7 @@ def test_positive_refresh(function_entitlement_manifest_org, request): :CaseImportance: Critical """ org = function_entitlement_manifest_org - sub = entities.Subscription(organization=org) + sub = target_sat.api.Subscription(organization=org) request.addfinalizer(lambda: sub.delete_manifest(data={'organization_id': org.id})) sub.refresh_manifest(data={'organization_id': org.id}) assert sub.search() @@ -120,9 +121,9 @@ def test_positive_create_after_refresh( :CaseImportance: Critical """ - org_sub = entities.Subscription(organization=function_entitlement_manifest_org) - new_org = entities.Organization().create() - new_org_sub = entities.Subscription(organization=new_org) + org_sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) + new_org = target_sat.api.Organization().create() + new_org_sub = target_sat.api.Subscription(organization=new_org) try: org_sub.refresh_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert org_sub.search() @@ -133,7 +134,7 @@ def test_positive_create_after_refresh( @pytest.mark.tier1 -def test_positive_delete(function_entitlement_manifest_org): +def test_positive_delete(function_entitlement_manifest_org, target_sat): """Delete an Uploaded manifest. :id: 4c21c7c9-2b26-4a65-a304-b978d5ba34fc @@ -142,7 +143,7 @@ def test_positive_delete(function_entitlement_manifest_org): :CaseImportance: Critical """ - sub = entities.Subscription(organization=function_entitlement_manifest_org) + sub = target_sat.api.Subscription(organization=function_entitlement_manifest_org) assert sub.search() sub.delete_manifest(data={'organization_id': function_entitlement_manifest_org.id}) assert len(sub.search()) == 0 @@ -157,12 +158,12 @@ def test_negative_upload(function_entitlement_manifest, target_sat): :expectedresults: The manifest is not uploaded to the second organization. """ - orgs = [entities.Organization().create() for _ in range(2)] + orgs = [target_sat.api.Organization().create() for _ in range(2)] with function_entitlement_manifest as manifest: target_sat.upload_manifest(orgs[0].id, manifest.content) with pytest.raises(TaskFailedError): target_sat.upload_manifest(orgs[1].id, manifest.content) - assert len(entities.Subscription(organization=orgs[1]).search()) == 0 + assert len(target_sat.api.Subscription(organization=orgs[1]).search()) == 0 @pytest.mark.tier2 @@ -208,11 +209,11 @@ def test_positive_delete_manifest_as_another_user( ) # use the first admin to upload a manifest with function_entitlement_manifest as manifest: - entities.Subscription(sc1, organization=function_org).upload( + target_sat.api.Subscription(sc1, organization=function_org).upload( data={'organization_id': function_org.id}, files={'content': manifest.content} ) # try to search and delete the manifest with another admin - entities.Subscription(sc2, organization=function_org).delete_manifest( + target_sat.api.Subscription(sc2, organization=function_org).delete_manifest( data={'organization_id': function_org.id} ) assert len(Subscription.list({'organization-id': function_org.id})) == 0 @@ -238,7 +239,7 @@ def test_positive_subscription_status_disabled( rhel_contenthost.install_katello_ca(target_sat) rhel_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel_contenthost.subscribed - host_content = entities.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content + host_content = target_sat.api.Host(id=rhel_contenthost.nailgun_host.id).read_raw().content assert 'Simple Content Access' in str(host_content) @@ -266,9 +267,12 @@ def test_sca_end_to_end( rhel7_contenthost.register_contenthost(module_sca_manifest_org.label, module_ak.name) assert rhel7_contenthost.subscribed # Check to see if Organization is in SCA Mode - assert entities.Organization(id=module_sca_manifest_org.id).read().simple_content_access is True + assert ( + target_sat.api.Organization(id=module_sca_manifest_org.id).read().simple_content_access + is True + ) # Verify that you cannot attach a subscription to an activation key in SCA Mode - subscription = entities.Subscription(organization=module_sca_manifest_org).search( + subscription = target_sat.api.Subscription(organization=module_sca_manifest_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] with pytest.raises(HTTPError) as ak_context: @@ -276,12 +280,12 @@ def test_sca_end_to_end( assert 'Simple Content Access' in ak_context.value.response.text # Verify that you cannot attach a subscription to an Host in SCA Mode with pytest.raises(HTTPError) as host_context: - entities.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( + target_sat.api.HostSubscription(host=rhel7_contenthost.nailgun_host.id).add_subscriptions( data={'subscriptions': [{'id': subscription.id, 'quantity': 1}]} ) assert 'Simple Content Access' in host_context.value.response.text # Create a content view with repos and check to see that the client has access - content_view = entities.ContentView(organization=module_sca_manifest_org).create() + content_view = target_sat.api.ContentView(organization=module_sca_manifest_org).create() content_view.repository = [rh_repo, custom_repo] content_view.update(['repository']) content_view.publish() @@ -327,34 +331,34 @@ def test_positive_candlepin_events_processed_by_stomp( :CaseImportance: High """ - repo = entities.Repository( - product=entities.Product(organization=function_org).create() + repo = target_sat.api.Repository( + product=target_sat.api.Product(organization=function_org).create() ).create() repo.sync() - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=function_org.default_content_view, max_hosts=100, organization=function_org, - environment=entities.LifecycleEnvironment(id=function_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=function_org.library.id), auto_attach=True, ).create() rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(function_org.name, ak.name) - host = entities.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) + host = target_sat.api.Host().search(query={'search': f'name={rhel7_contenthost.hostname}'}) host_id = host[0].id - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 2 with function_entitlement_manifest as manifest: target_sat.upload_manifest(function_org.id, manifest.content) - subscription = entities.Subscription(organization=function_org).search( + subscription = target_sat.api.Subscription(organization=function_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] - entities.HostSubscription(host=host_id).add_subscriptions( + target_sat.api.HostSubscription(host=host_id).add_subscriptions( data={'subscriptions': [{'id': subscription.cp_id, 'quantity': 1}]} ) - host_content = entities.Host(id=host_id).read_json() + host_content = target_sat.api.Host(id=host_id).read_json() assert host_content['subscription_status'] == 0 - response = entities.Ping().search_json()['services']['candlepin_events'] + response = target_sat.api.Ping().search_json()['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message'] @@ -386,11 +390,11 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte :CaseImportance: High """ - ak = entities.ActivationKey( + ak = target_sat.api.ActivationKey( content_view=module_sca_manifest_org.default_content_view, max_hosts=100, organization=module_sca_manifest_org, - environment=entities.LifecycleEnvironment(id=module_sca_manifest_org.library.id), + environment=target_sat.api.LifecycleEnvironment(id=module_sca_manifest_org.library.id), auto_attach=True, ).create() # registering the content host with no content enabled/synced in the org @@ -411,7 +415,7 @@ def test_positive_expired_SCA_cert_handling(module_sca_manifest_org, rhel7_conte reposet=REPOSET['rhst7'], releasever=None, ) - rh_repo = entities.Repository(id=rh_repo_id).read() + rh_repo = target_sat.api.Repository(id=rh_repo_id).read() rh_repo.sync() # re-registering the host should test whether Candlepin gracefully handles # registration of a host with an expired SCA cert diff --git a/tests/foreman/api/test_syncplan.py b/tests/foreman/api/test_syncplan.py index bdf367b507b..06ff3df0716 100644 --- a/tests/foreman/api/test_syncplan.py +++ b/tests/foreman/api/test_syncplan.py @@ -24,7 +24,7 @@ from time import sleep from fauxfactory import gen_choice, gen_string -from nailgun import client, entities +from nailgun import client import pytest from requests.exceptions import HTTPError @@ -101,7 +101,7 @@ def validate_repo_content(repo, content_types, after_sync=True): @pytest.mark.tier1 -def test_positive_get_routes(): +def test_positive_get_routes(target_sat): """Issue an HTTP GET response to both available routes. :id: 9e40ea7f-71ea-4ced-94ba-cde03620c654 @@ -112,8 +112,8 @@ def test_positive_get_routes(): :CaseImportance: Critical """ - org = entities.Organization().create() - entities.SyncPlan(organization=org).create() + org = target_sat.api.Organization().create() + target_sat.api.SyncPlan(organization=org).create() response1 = client.get( f'{get_url()}/katello/api/v2/sync_plans', auth=get_credentials(), @@ -144,7 +144,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan = sync_plan.read() assert sync_plan.enabled == enabled @@ -152,7 +152,7 @@ def test_positive_create_enabled_disabled(module_org, enabled, request, target_s @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_name(module_org, name): +def test_positive_create_with_name(module_org, name, module_target_sat): """Create a sync plan with a random name. :id: c1263134-0d7c-425a-82fd-df5274e1f9ba @@ -163,14 +163,16 @@ def test_positive_create_with_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, name=name, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan( + enabled=False, name=name, organization=module_org + ).create() sync_plan = sync_plan.read() assert sync_plan.name == name @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_create_with_description(module_org, description): +def test_positive_create_with_description(module_org, description, module_target_sat): """Create a sync plan with a random description. :id: 3e5745e8-838d-44a5-ad61-7e56829ad47c @@ -182,7 +184,7 @@ def test_positive_create_with_description(module_org, description): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=description, organization=module_org ).create() sync_plan = sync_plan.read() @@ -191,7 +193,7 @@ def test_positive_create_with_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_create_with_interval(module_org, interval): +def test_positive_create_with_interval(module_org, interval, module_target_sat): """Create a sync plan with a random interval. :id: d160ed1c-b698-42dc-be0b-67ac693c7840 @@ -202,7 +204,7 @@ def test_positive_create_with_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -214,7 +216,7 @@ def test_positive_create_with_interval(module_org, interval): @pytest.mark.parametrize('sync_delta', **parametrized(sync_date_deltas)) @pytest.mark.tier1 -def test_positive_create_with_sync_date(module_org, sync_delta): +def test_positive_create_with_sync_date(module_org, sync_delta, module_target_sat): """Create a sync plan and update its sync date. :id: bdb6e0a9-0d3b-4811-83e2-2140b7bb62e3 @@ -226,7 +228,7 @@ def test_positive_create_with_sync_date(module_org, sync_delta): :CaseImportance: Critical """ sync_date = datetime.now() + timedelta(seconds=sync_delta) - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, organization=module_org, sync_date=sync_date ).create() sync_plan = sync_plan.read() @@ -235,7 +237,7 @@ def test_positive_create_with_sync_date(module_org, sync_delta): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_name(module_org, name): +def test_negative_create_with_invalid_name(module_org, name, module_target_sat): """Create a sync plan with an invalid name. :id: a3a0f844-2f81-4f87-9f68-c25506c29ce2 @@ -248,12 +250,12 @@ def test_negative_create_with_invalid_name(module_org, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(name=name, organization=module_org).create() + module_target_sat.api.SyncPlan(name=name, organization=module_org).create() @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_create_with_invalid_interval(module_org, interval): +def test_negative_create_with_invalid_interval(module_org, interval, module_target_sat): """Create a sync plan with invalid interval specified. :id: f5844526-9f58-4be3-8a96-3849a465fc02 @@ -266,11 +268,11 @@ def test_negative_create_with_invalid_interval(module_org, interval): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.SyncPlan(interval=interval, organization=module_org).create() + module_target_sat.api.SyncPlan(interval=interval, organization=module_org).create() @pytest.mark.tier1 -def test_negative_create_with_empty_interval(module_org): +def test_negative_create_with_empty_interval(module_org, module_target_sat): """Create a sync plan with no interval specified. :id: b4686463-69c8-4538-b040-6fb5246a7b00 @@ -280,7 +282,7 @@ def test_negative_create_with_empty_interval(module_org): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(organization=module_org) + sync_plan = module_target_sat.api.SyncPlan(organization=module_org) sync_plan.create_missing() del sync_plan.interval with pytest.raises(HTTPError): @@ -300,7 +302,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=not enabled, organization=module_org).create() + sync_plan = target_sat.api.SyncPlan(enabled=not enabled, organization=module_org).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) sync_plan.enabled = enabled sync_plan.update(['enabled']) @@ -310,7 +312,7 @@ def test_positive_update_enabled(module_org, enabled, request, target_sat): @pytest.mark.parametrize('name', **parametrized(valid_data_list())) @pytest.mark.tier1 -def test_positive_update_name(module_org, name): +def test_positive_update_name(module_org, name, module_target_sat): """Create a sync plan and update its name. :id: dbfadf4f-50af-4aa8-8d7d-43988dc4528f @@ -322,7 +324,7 @@ def test_positive_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name sync_plan.update(['name']) sync_plan = sync_plan.read() @@ -331,7 +333,7 @@ def test_positive_update_name(module_org, name): @pytest.mark.parametrize('description', **parametrized(valid_data_list())) @pytest.mark.tier2 -def test_positive_update_description(module_org, description): +def test_positive_update_description(module_org, description, module_target_sat): """Create a sync plan and update its description. :id: 4769fe9c-9eec-40c8-b015-1e3d7e570bec @@ -341,7 +343,7 @@ def test_positive_update_description(module_org, description): :expectedresults: A sync plan is created and its description can be updated with the specified description. """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org ).create() sync_plan.description = description @@ -352,7 +354,7 @@ def test_positive_update_description(module_org, description): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval(module_org, interval): +def test_positive_update_interval(module_org, interval, module_target_sat): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 @@ -364,7 +366,7 @@ def test_positive_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, interval=interval ) if interval == SYNC_INTERVAL['custom']: @@ -384,7 +386,7 @@ def test_positive_update_interval(module_org, interval): @pytest.mark.parametrize('interval', **parametrized(valid_sync_interval())) @pytest.mark.tier1 -def test_positive_update_interval_custom_cron(module_org, interval): +def test_positive_update_interval_custom_cron(module_org, interval, module_target_sat): """Create a sync plan and update its interval to custom cron. :id: 26c58319-cae0-4b0c-b388-2a1fe3f22344 @@ -397,7 +399,7 @@ def test_positive_update_interval_custom_cron(module_org, interval): :CaseImportance: Critical """ if interval != SYNC_INTERVAL['custom']: - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, description=gen_string('alpha'), organization=module_org, @@ -413,7 +415,7 @@ def test_positive_update_interval_custom_cron(module_org, interval): @pytest.mark.parametrize('sync_delta', **parametrized(sync_date_deltas)) @pytest.mark.tier1 -def test_positive_update_sync_date(module_org, sync_delta): +def test_positive_update_sync_date(module_org, sync_delta, module_target_sat): """Updated sync plan's sync date. :id: fad472c7-01b4-453b-ae33-0845c9e0dfd4 @@ -425,7 +427,7 @@ def test_positive_update_sync_date(module_org, sync_delta): :CaseImportance: Critical """ sync_date = datetime.now() + timedelta(seconds=sync_delta) - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( enabled=False, organization=module_org, sync_date=datetime.now() + timedelta(days=10) ).create() sync_plan.sync_date = sync_date @@ -436,7 +438,7 @@ def test_positive_update_sync_date(module_org, sync_delta): @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_name(module_org, name): +def test_negative_update_name(module_org, name, module_target_sat): """Try to update a sync plan with an invalid name. :id: ae502053-9d3c-4cad-aee4-821f846ceae5 @@ -448,7 +450,7 @@ def test_negative_update_name(module_org, name): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.name = name with pytest.raises(HTTPError): sync_plan.update(['name']) @@ -456,7 +458,7 @@ def test_negative_update_name(module_org, name): @pytest.mark.parametrize('interval', **parametrized(invalid_values_list())) @pytest.mark.tier1 -def test_negative_update_interval(module_org, interval): +def test_negative_update_interval(module_org, interval, module_target_sat): """Try to update a sync plan with invalid interval. :id: 8c981174-6f55-49c0-8baa-40e5c3fc598c @@ -468,14 +470,14 @@ def test_negative_update_interval(module_org, interval): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() sync_plan.interval = interval with pytest.raises(HTTPError): sync_plan.update(['interval']) @pytest.mark.tier2 -def test_positive_add_product(module_org): +def test_positive_add_product(module_org, module_target_sat): """Create a sync plan and add one product to it. :id: 036dea02-f73d-4fc1-9c41-5515b6659c79 @@ -487,8 +489,8 @@ def test_positive_add_product(module_org): :CaseImportance: Critical """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - product = entities.Product(organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() sync_plan.add_products(data={'product_ids': [product.id]}) sync_plan = sync_plan.read() assert len(sync_plan.product) == 1 @@ -496,7 +498,7 @@ def test_positive_add_product(module_org): @pytest.mark.tier2 -def test_positive_add_products(module_org): +def test_positive_add_products(module_org, module_target_sat): """Create a sync plan and add two products to it. :id: 2a80ecad-2245-46d8-bbc6-0b802e68d50c @@ -506,8 +508,8 @@ def test_positive_add_products(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [module_target_sat.api.Product(organization=module_org).create() for _ in range(2)] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) sync_plan = sync_plan.read() assert len(sync_plan.product) == 2 @@ -515,7 +517,7 @@ def test_positive_add_products(module_org): @pytest.mark.tier2 -def test_positive_remove_product(module_org): +def test_positive_remove_product(module_org, module_target_sat): """Create a sync plan with two products and then remove one product from it. @@ -528,8 +530,8 @@ def test_positive_remove_product(module_org): :BZ: 1199150 """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [module_target_sat.api.Product(organization=module_org).create() for _ in range(2)] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [products[0].id]}) @@ -540,7 +542,7 @@ def test_positive_remove_product(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_remove_products(module_org): +def test_positive_remove_products(module_org, module_target_sat): """Create a sync plan with two products and then remove both products from it. @@ -551,8 +553,8 @@ def test_positive_remove_products(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(enabled=False, organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = module_target_sat.api.SyncPlan(enabled=False, organization=module_org).create() + products = [module_target_sat.api.Product(organization=module_org).create() for _ in range(2)] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [product.id for product in products]}) @@ -560,7 +562,7 @@ def test_positive_remove_products(module_org): @pytest.mark.tier2 -def test_positive_repeatedly_add_remove(module_org, request, target_sat): +def test_positive_repeatedly_add_remove(module_org, request, module_target_sat): """Repeatedly add and remove a product from a sync plan. :id: b67536ba-3a36-4bb7-a405-0e12081d5a7e @@ -572,9 +574,9 @@ def test_positive_repeatedly_add_remove(module_org, request, target_sat): :BZ: 1199150 """ - sync_plan = entities.SyncPlan(organization=module_org).create() - request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - product = entities.Product(organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + request.addfinalizer(lambda: module_target_sat.api_factory.disable_syncplan(sync_plan)) + product = module_target_sat.api.Product(organization=module_org).create() for _ in range(5): sync_plan.add_products(data={'product_ids': [product.id]}) assert len(sync_plan.read().product) == 1 @@ -583,7 +585,7 @@ def test_positive_repeatedly_add_remove(module_org, request, target_sat): @pytest.mark.tier2 -def test_positive_add_remove_products_custom_cron(module_org, request, target_sat): +def test_positive_add_remove_products_custom_cron(module_org, request, module_target_sat): """Create a sync plan with two products having custom cron interval and then remove both products from it. @@ -596,11 +598,11 @@ def test_positive_add_remove_products_custom_cron(module_org, request, target_sa """ cron_expression = gen_choice(valid_cron_expressions()) - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( organization=module_org, interval='custom cron', cron_expression=cron_expression ).create() - request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) - products = [entities.Product(organization=module_org).create() for _ in range(2)] + request.addfinalizer(lambda: module_target_sat.api_factory.disable_syncplan(sync_plan)) + products = [module_target_sat.api.Product(organization=module_org).create() for _ in range(2)] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) assert len(sync_plan.read().product) == 2 sync_plan.remove_products(data={'product_ids': [product.id for product in products]}) @@ -620,14 +622,14 @@ def test_negative_synchronize_custom_product_past_sync_date(module_org, request, :CaseLevel: System """ - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=2) validate_repo_content(repo, ['erratum', 'rpm', 'package_group'], after_sync=False) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=datetime.utcnow().replace(second=0) ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -654,10 +656,10 @@ def test_positive_synchronize_custom_product_past_sync_date(module_org, request, """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='hourly', @@ -700,8 +702,8 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques :BZ: 1655595, 1695733 """ delay = 2 * 60 # delay for sync date in seconds - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(target_sat, repo.id, module_org.id, max_tries=1) @@ -710,7 +712,7 @@ def test_positive_synchronize_custom_product_future_sync_date(module_org, reques # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -752,9 +754,11 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque """ # Test with multiple products and multiple repos needs more delay. delay = 8 * 60 # delay for sync date in seconds - products = [entities.Product(organization=module_org).create() for _ in range(2)] + products = [target_sat.api.Product(organization=module_org).create() for _ in range(2)] repos = [ - entities.Repository(product=product).create() for product in products for _ in range(2) + target_sat.api.Repository(product=product).create() + for product in products + for _ in range(2) ] # Verify products have not been synced yet logger.info( @@ -768,7 +772,7 @@ def test_positive_synchronize_custom_products_future_sync_date(module_org, reque # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -825,9 +829,9 @@ def test_positive_synchronize_rh_product_past_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() - sync_plan = entities.SyncPlan( + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() + sync_plan = target_sat.SyncPlan( organization=org, enabled=True, interval='hourly', @@ -858,7 +862,7 @@ def test_positive_synchronize_rh_product_past_sync_date( # Add disassociate RH product from sync plan check for BZ#1879537 assert len(sync_plan.read().product) == 1 # Disable the reposet - reposet = entities.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] + reposet = target_sat.api.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] reposet.disable(data={'basearch': 'x86_64', 'releasever': None, 'product_id': product.id}) # Assert that the Sync Plan now has no product associated with it assert len(sync_plan.read().product) == 0 @@ -889,12 +893,12 @@ def test_positive_synchronize_rh_product_future_sync_date( reposet=REPOSET['rhst7'], releasever=None, ) - product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] - repo = entities.Repository(id=repo_id).read() + product = target_sat.api.Product(name=PRDS['rhel'], organization=org).search()[0] + repo = target_sat.api.Repository(id=repo_id).read() # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=org, enabled=True, interval='hourly', sync_date=sync_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -938,11 +942,11 @@ def test_positive_synchronize_custom_product_daily_recurrence(module_org, reques :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(days=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='daily', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -983,11 +987,11 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque :CaseLevel: System """ delay = 2 * 60 - product = entities.Product(organization=module_org).create() - repo = entities.Repository(product=product).create() + product = target_sat.api.Product(organization=module_org).create() + repo = target_sat.api.Repository(product=product).create() start_date = datetime.utcnow().replace(second=0) - timedelta(weeks=1) + timedelta(seconds=delay) # Create and Associate sync plan with product - sync_plan = entities.SyncPlan( + sync_plan = target_sat.api.SyncPlan( organization=module_org, enabled=True, interval='weekly', sync_date=start_date ).create() request.addfinalizer(lambda: target_sat.api_factory.disable_syncplan(sync_plan)) @@ -1014,7 +1018,7 @@ def test_positive_synchronize_custom_product_weekly_recurrence(module_org, reque @pytest.mark.tier2 -def test_positive_delete_one_product(module_org): +def test_positive_delete_one_product(module_org, module_target_sat): """Create a sync plan with one product and delete it. :id: e565c464-33e2-4bca-8eca-15d5a7d4b155 @@ -1024,8 +1028,8 @@ def test_positive_delete_one_product(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - product = entities.Product(organization=module_org).create() + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() sync_plan.add_products(data={'product_ids': [product.id]}) sync_plan.delete() with pytest.raises(HTTPError): @@ -1033,7 +1037,7 @@ def test_positive_delete_one_product(module_org): @pytest.mark.tier2 -def test_positive_delete_products(module_org): +def test_positive_delete_products(module_org, module_target_sat): """Create a sync plan with two products and delete them. :id: f21bd57f-369e-4acd-a492-5532349a3804 @@ -1043,8 +1047,8 @@ def test_positive_delete_products(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - products = [entities.Product(organization=module_org).create() for _ in range(2)] + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + products = [module_target_sat.api.Product(organization=module_org).create() for _ in range(2)] sync_plan.add_products(data={'product_ids': [product.id for product in products]}) sync_plan.delete() with pytest.raises(HTTPError): @@ -1053,7 +1057,7 @@ def test_positive_delete_products(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product(module_org): +def test_positive_delete_synced_product(module_org, module_target_sat): """Create a sync plan with one synced product and delete it. :id: 195d8fec-1fa0-42ab-84a5-32dd81a285ca @@ -1063,9 +1067,9 @@ def test_positive_delete_synced_product(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan(organization=module_org).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + sync_plan = module_target_sat.api.SyncPlan(organization=module_org).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() sync_plan.delete() @@ -1075,7 +1079,7 @@ def test_positive_delete_synced_product(module_org): @pytest.mark.tier2 @pytest.mark.upgrade -def test_positive_delete_synced_product_custom_cron(module_org): +def test_positive_delete_synced_product_custom_cron(module_org, module_target_sat): """Create a sync plan with custom cron with one synced product and delete it. @@ -1086,13 +1090,13 @@ def test_positive_delete_synced_product_custom_cron(module_org): :CaseLevel: Integration """ - sync_plan = entities.SyncPlan( + sync_plan = module_target_sat.api.SyncPlan( organization=module_org, interval='custom cron', cron_expression=gen_choice(valid_cron_expressions()), ).create() - product = entities.Product(organization=module_org).create() - entities.Repository(product=product).create() + product = module_target_sat.api.Product(organization=module_org).create() + module_target_sat.api.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() product = product.read() diff --git a/tests/foreman/api/test_templatesync.py b/tests/foreman/api/test_templatesync.py index 061b8db1dd1..8e38540d395 100644 --- a/tests/foreman/api/test_templatesync.py +++ b/tests/foreman/api/test_templatesync.py @@ -19,7 +19,6 @@ import time from fauxfactory import gen_string -from nailgun import entities import pytest import requests @@ -67,7 +66,9 @@ def setUpClass(self, module_target_sat): ) @pytest.mark.tier2 - def test_positive_import_filtered_templates_from_git(self, module_org, module_location): + def test_positive_import_filtered_templates_from_git( + self, module_org, module_location, module_target_sat + ): """Assure only templates with a given filter regex are pulled from git repo. @@ -91,7 +92,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo :CaseImportance: High """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -105,7 +106,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(True) assert imported_count == 8 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -114,7 +115,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptemplates) == 5 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -123,7 +124,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(ptables) == 1 - jtemplates = entities.JobTemplate().search( + jtemplates = module_target_sat.api.JobTemplate().search( query={ 'per_page': '100', 'search': f'name~{prefix}', @@ -132,7 +133,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo } ) assert len(jtemplates) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -143,7 +144,7 @@ def test_positive_import_filtered_templates_from_git(self, module_org, module_lo assert len(rtemplates) == 1 @pytest.mark.tier2 - def test_import_filtered_templates_from_git_with_negate(self, module_org): + def test_import_filtered_templates_from_git_with_negate(self, module_org, module_target_sat): """Assure templates with a given filter regex are NOT pulled from git repo. @@ -162,7 +163,7 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -176,15 +177,15 @@ def test_import_filtered_templates_from_git_with_negate(self, module_org): template['imported'] for template in filtered_imported_templates['message']['templates'] ].count(False) assert not_imported_count == 9 - ptemplates = entities.ProvisioningTemplate().search( + ptemplates = module_target_sat.api.ProvisioningTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptemplates) == 6 - ptables = entities.PartitionTable().search( + ptables = module_target_sat.api.PartitionTable().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(ptables) == 1 - rtemplates = entities.ReportTemplate().search( + rtemplates = module_target_sat.api.ReportTemplate().search( query={'per_page': '100', 'search': 'name~jenkins', 'organization_id': module_org.id} ) assert len(rtemplates) == 1 @@ -267,7 +268,7 @@ def test_positive_import_and_associate( prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir # Associate Never - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -277,7 +278,7 @@ def test_positive_import_and_associate( } ) # - Template 1 imported in X and Y taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -288,7 +289,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 1 not imported in metadata taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}', @@ -302,7 +303,7 @@ def test_positive_import_and_associate( f'cp {dir_path}/example_template.erb {dir_path}/another_template.erb && ' f'sed -ie "s/name: .*/name: another_template/" {dir_path}/another_template.erb' ) - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -312,7 +313,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -323,7 +324,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 should be imported in importing taxonomies - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -334,7 +335,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # Associate Always - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'prefix': prefix, @@ -344,7 +345,7 @@ def test_positive_import_and_associate( } ) # - Template 1 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}example_template', @@ -355,7 +356,7 @@ def test_positive_import_and_associate( assert ptemplate assert len(ptemplate[0].read().organization) == 1 # - Template 2 taxonomies are not changed - ptemplate = entities.ProvisioningTemplate().search( + ptemplate = target_sat.api.ProvisioningTemplate().search( query={ 'per_page': '10', 'search': f'name~{prefix}another_template', @@ -367,7 +368,7 @@ def test_positive_import_and_associate( assert len(ptemplate[0].read().organization) == 1 @pytest.mark.tier2 - def test_positive_import_from_subdirectory(self, module_org): + def test_positive_import_from_subdirectory(self, module_org, module_target_sat): """Assure templates are imported from specific repositories subdirectory :id: 8ea11a1a-165e-4834-9387-7accb4c94e77 @@ -384,7 +385,7 @@ def test_positive_import_from_subdirectory(self, module_org): :CaseImportance: Medium """ prefix = gen_string('alpha') - filtered_imported_templates = entities.Template().imports( + filtered_imported_templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -423,7 +424,7 @@ def test_positive_export_filtered_templates_to_localdir( :CaseImportance: Low """ dir_name, dir_path = create_import_export_local_dir - exported_temps = entities.Template().exports( + exported_temps = target_sat.api.Template().exports( data={ 'repo': FOREMAN_TEMPLATE_ROOT_DIR, 'dirname': dir_name, @@ -459,7 +460,7 @@ def test_positive_export_filtered_templates_negate( """ # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -498,7 +499,7 @@ def test_positive_export_and_import_with_metadata( ex_template = 'example_template.erb' prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': dir_path, 'location_ids': [module_location.id], @@ -508,7 +509,7 @@ def test_positive_export_and_import_with_metadata( ) export_file = f'{prefix.lower()}{ex_template}' # Export same template to local dir with refreshed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'refresh', 'repo': dir_path, @@ -522,7 +523,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 0 # Export same template to local dir with keeping metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'keep', 'repo': dir_path, @@ -536,7 +537,7 @@ def test_positive_export_and_import_with_metadata( ) assert result.status == 1 # Export same template to local dir with removed metadata - entities.Template().exports( + target_sat.api.Template().exports( data={ 'metadata_export_mode': 'remove', 'repo': dir_path, @@ -553,7 +554,7 @@ def test_positive_export_and_import_with_metadata( # Take Templates out of Tech Preview Feature Tests @pytest.mark.tier3 @pytest.mark.parametrize('verbose', [True, False]) - def test_positive_import_json_output_verbose(self, module_org, verbose): + def test_positive_import_json_output_verbose(self, module_org, verbose, module_target_sat): """Assert all the required fields displayed in import output when verbose is True and False @@ -575,7 +576,7 @@ def test_positive_import_json_output_verbose(self, module_org, verbose): :CaseImportance: Low """ prefix = gen_string('alpha') - templates = entities.Template().imports( + templates = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -628,19 +629,19 @@ def test_positive_import_json_output_changed_key_true( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) target_sat.execute(f'echo " Updating Template data." >> {dir_path}/example_template.erb') - post_template = entities.Template().imports( + post_template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(post_template['message']['templates'][0]['changed']) @pytest.mark.tier2 def test_positive_import_json_output_changed_key_false( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `changed` key returns `False` when template data gets updated @@ -663,11 +664,11 @@ def test_positive_import_json_output_changed_key_false( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - pre_template = entities.Template().imports( + pre_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(pre_template['message']['templates'][0]['imported']) - post_template = entities.Template().imports( + post_template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert not bool(post_template['message']['templates'][0]['changed']) @@ -697,7 +698,7 @@ def test_positive_import_json_output_name_key( target_sat.execute( f'sed -ie "s/name: .*/name: {template_name}/" {dir_path}/example_template.erb' ) - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert 'name' in template['message']['templates'][0].keys() @@ -705,7 +706,7 @@ def test_positive_import_json_output_name_key( @pytest.mark.tier2 def test_positive_import_json_output_imported_key( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output `imported` key returns `True` on successful import @@ -725,13 +726,15 @@ def test_positive_import_json_output_imported_key( """ prefix = gen_string('alpha') _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'prefix': prefix} ) assert bool(template['message']['templates'][0]['imported']) @pytest.mark.tier2 - def test_positive_import_json_output_file_key(self, create_import_export_local_dir, module_org): + def test_positive_import_json_output_file_key( + self, create_import_export_local_dir, module_org, module_target_sat + ): """Assert template imports output `file` key returns correct file name from where the template is imported @@ -750,7 +753,7 @@ def test_positive_import_json_output_file_key(self, create_import_export_local_d :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert 'example_template.erb' == template['message']['templates'][0]['file'] @@ -780,7 +783,7 @@ def test_positive_import_json_output_corrupted_metadata( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/<%#/$#$#@%^$^@@RT$$/" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -791,7 +794,7 @@ def test_positive_import_json_output_corrupted_metadata( @pytest.mark.skip_if_open('BZ:1787355') @pytest.mark.tier2 def test_positive_import_json_output_filtered_skip_message( - self, create_import_export_local_dir, module_org + self, create_import_export_local_dir, module_org, module_target_sat ): """Assert template imports output returns template import skipped info for templates whose name doesnt match the filter @@ -812,7 +815,7 @@ def test_positive_import_json_output_filtered_skip_message( :CaseImportance: Low """ _, dir_path = create_import_export_local_dir - template = entities.Template().imports( + template = module_target_sat.api.Template().imports( data={ 'repo': dir_path, 'organization_ids': [module_org.id], @@ -850,7 +853,7 @@ def test_positive_import_json_output_no_name_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/name: .*/name: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -884,7 +887,7 @@ def test_positive_import_json_output_no_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "/model: .*/d" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -918,7 +921,7 @@ def test_positive_import_json_output_blank_model_error( """ _, dir_path = create_import_export_local_dir target_sat.execute(f'sed -ie "s/model: .*/model: /" {dir_path}/example_template.erb') - template = entities.Template().imports( + template = target_sat.api.Template().imports( data={'repo': dir_path, 'organization_ids': [module_org.id]} ) assert not bool(template['message']['templates'][0]['imported']) @@ -948,7 +951,7 @@ def test_positive_export_json_output( :CaseImportance: Low """ prefix = gen_string('alpha') - imported_templates = entities.Template().imports( + imported_templates = target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'automation', @@ -963,7 +966,7 @@ def test_positive_export_json_output( assert imported_count == 17 # Total Count # Export some filtered templates to local dir _, dir_path = create_import_export_local_dir - exported_templates = entities.Template().exports( + exported_templates = target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': prefix} ) exported_count = [ @@ -1000,7 +1003,7 @@ def test_positive_import_log_to_production(self, module_org, target_sat): :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1038,7 +1041,7 @@ def test_positive_export_log_to_production( :CaseImportance: Low """ - entities.Template().imports( + target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1047,7 +1050,7 @@ def test_positive_export_log_to_production( } ) _, dir_path = create_import_export_local_dir - entities.Template().exports( + target_sat.api.Template().exports( data={'repo': dir_path, 'organization_ids': [module_org.id], 'filter': 'empty'} ) time.sleep(5) @@ -1076,7 +1079,7 @@ def test_positive_export_log_to_production( ids=['non_empty_repo', 'empty_repo'], ) def test_positive_export_all_templates_to_repo( - self, module_org, git_repository, git_branch, url + self, module_org, git_repository, git_branch, url, module_target_sat ): """Assure all templates are exported if no filter is specified. @@ -1094,7 +1097,7 @@ def test_positive_export_all_templates_to_repo( :CaseImportance: Low """ - output = entities.Template().exports( + output = module_target_sat.api.Template().exports( data={ 'repo': f'{url}/{git.username}/{git_repository["name"]}', 'branch': git_branch, @@ -1118,7 +1121,7 @@ def test_positive_export_all_templates_to_repo( assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_positive_import_all_templates_from_repo(self, module_org): + def test_positive_import_all_templates_from_repo(self, module_org, module_target_sat): """Assure all templates are imported if no filter is specified. :id: 95ac9543-d989-44f4-b4d9-18f20a0b58b9 @@ -1131,7 +1134,7 @@ def test_positive_import_all_templates_from_repo(self, module_org): :CaseImportance: Low """ - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'master', @@ -1150,7 +1153,7 @@ def test_positive_import_all_templates_from_repo(self, module_org): assert len(output['message']['templates']) == git_count @pytest.mark.tier2 - def test_negative_import_locked_template(self, module_org): + def test_negative_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are not pulled from repository. :id: 88e21cad-448e-45e0-add2-94493a1319c5 @@ -1164,7 +1167,7 @@ def test_negative_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1176,7 +1179,7 @@ def test_negative_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # try to import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1193,13 +1196,13 @@ def test_negative_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template @pytest.mark.tier2 - def test_positive_import_locked_template(self, module_org): + def test_positive_import_locked_template(self, module_org, module_target_sat): """Assure locked templates are pulled from repository while using force parameter. :id: 936c91cc-1947-45b0-8bf0-79ba4be87b97 @@ -1213,7 +1216,7 @@ def test_positive_import_locked_template(self, module_org): :CaseImportance: Medium """ # import template with lock - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1225,7 +1228,7 @@ def test_positive_import_locked_template(self, module_org): ) assert output['message']['templates'][0]['imported'] # force import same template with changed content - output = entities.Template().imports( + output = module_target_sat.api.Template().imports( data={ 'repo': FOREMAN_TEMPLATE_IMPORT_URL, 'branch': 'locked', @@ -1244,7 +1247,7 @@ def test_positive_import_locked_template(self, module_org): ) res.raise_for_status() git_content = base64.b64decode(json.loads(res.text)['content']) - sat_content = entities.ProvisioningTemplate( + sat_content = module_target_sat.api.ProvisioningTemplate( id=output['message']['templates'][0]['id'] ).read() assert git_content.decode('utf-8') == sat_content.template diff --git a/tests/foreman/api/test_user.py b/tests/foreman/api/test_user.py index eff47fbba0f..cd2c704e249 100644 --- a/tests/foreman/api/test_user.py +++ b/tests/foreman/api/test_user.py @@ -23,7 +23,6 @@ import json import re -from nailgun import entities from nailgun.config import ServerConfig import pytest from requests.exceptions import HTTPError @@ -45,9 +44,9 @@ @pytest.fixture(scope='module') -def create_user(): +def create_user(module_target_sat): """Create a user""" - return entities.User().create() + return module_target_sat.api.User().create() class TestUser: @@ -55,7 +54,7 @@ class TestUser: @pytest.mark.tier1 @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_with_username(self, username): + def test_positive_create_with_username(self, username, target_sat): """Create User for all variations of Username :id: a9827cda-7f6d-4785-86ff-3b6969c9c00a @@ -66,14 +65,14 @@ def test_positive_create_with_username(self, username): :CaseImportance: Critical """ - user = entities.User(login=username).create() + user = target_sat.api.User(login=username).create() assert user.login == username @pytest.mark.tier1 @pytest.mark.parametrize( 'firstname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_firstname(self, firstname): + def test_positive_create_with_firstname(self, firstname, target_sat): """Create User for all variations of First Name :id: 036bb958-227c-420c-8f2b-c607136f12e0 @@ -86,14 +85,14 @@ def test_positive_create_with_firstname(self, firstname): """ if len(str.encode(firstname)) > 50: firstname = firstname[:20] - user = entities.User(firstname=firstname).create() + user = target_sat.api.User(firstname=firstname).create() assert user.firstname == firstname @pytest.mark.tier1 @pytest.mark.parametrize( 'lastname', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_lastname(self, lastname): + def test_positive_create_with_lastname(self, lastname, target_sat): """Create User for all variations of Last Name :id: 95d3b571-77e7-42a1-9c48-21f242e8cdc2 @@ -106,12 +105,12 @@ def test_positive_create_with_lastname(self, lastname): """ if len(str.encode(lastname)) > 50: lastname = lastname[:20] - user = entities.User(lastname=lastname).create() + user = target_sat.api.User(lastname=lastname).create() assert user.lastname == lastname @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_create_with_email(self, mail): + def test_positive_create_with_email(self, mail, target_sat): """Create User for all variations of Email :id: e68caf51-44ba-4d32-b79b-9ab9b67b9590 @@ -122,12 +121,12 @@ def test_positive_create_with_email(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() assert user.mail == mail @pytest.mark.tier1 @pytest.mark.parametrize('description', **parametrized(valid_data_list())) - def test_positive_create_with_description(self, description): + def test_positive_create_with_description(self, description, target_sat): """Create User for all variations of Description :id: 1463d71c-b77d-4223-84fa-8370f77b3edf @@ -138,14 +137,14 @@ def test_positive_create_with_description(self, description): :CaseImportance: Critical """ - user = entities.User(description=description).create() + user = target_sat.api.User(description=description).create() assert user.description == description @pytest.mark.tier1 @pytest.mark.parametrize( 'password', **parametrized(generate_strings_list(exclude_types=['html'], max_length=50)) ) - def test_positive_create_with_password(self, password): + def test_positive_create_with_password(self, password, target_sat): """Create User for all variations of Password :id: 53d0a419-0730-4f7d-9170-d855adfc5070 @@ -156,13 +155,13 @@ def test_positive_create_with_password(self, password): :CaseImportance: Critical """ - user = entities.User(password=password).create() + user = target_sat.api.User(password=password).create() assert user is not None @pytest.mark.tier1 @pytest.mark.upgrade @pytest.mark.parametrize('mail', **parametrized(valid_emails_list())) - def test_positive_delete(self, mail): + def test_positive_delete(self, mail, target_sat): """Create random users and then delete it. :id: df6059e7-85c5-42fa-99b5-b7f1ef809f52 @@ -173,7 +172,7 @@ def test_positive_delete(self, mail): :CaseImportance: Critical """ - user = entities.User(mail=mail).create() + user = target_sat.api.User(mail=mail).create() user.delete() with pytest.raises(HTTPError): user.read() @@ -307,7 +306,7 @@ def test_positive_update_description(self, create_user, description): @pytest.mark.tier1 @pytest.mark.parametrize('admin_enable', [True, False]) - def test_positive_update_admin(self, admin_enable): + def test_positive_update_admin(self, admin_enable, target_sat): """Update a user and provide the ``admin`` attribute. :id: b5fedf65-37f5-43ca-806a-ac9a7979b19d @@ -318,13 +317,13 @@ def test_positive_update_admin(self, admin_enable): :CaseImportance: Critical """ - user = entities.User(admin=admin_enable).create() + user = target_sat.api.User(admin=admin_enable).create() user.admin = not admin_enable assert user.update().admin == (not admin_enable) @pytest.mark.tier1 @pytest.mark.parametrize('mail', **parametrized(invalid_emails_list())) - def test_negative_create_with_invalid_email(self, mail): + def test_negative_create_with_invalid_email(self, mail, target_sat): """Create User with invalid Email Address :id: ebbd1f5f-e71f-41f4-a956-ce0071b0a21c @@ -336,11 +335,11 @@ def test_negative_create_with_invalid_email(self, mail): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(mail=mail).create() + target_sat.api.User(mail=mail).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_usernames_list())) - def test_negative_create_with_invalid_username(self, invalid_name): + def test_negative_create_with_invalid_username(self, invalid_name, target_sat): """Create User with invalid Username :id: aaf157a9-0375-4405-ad87-b13970e0609b @@ -352,11 +351,11 @@ def test_negative_create_with_invalid_username(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(login=invalid_name).create() + target_sat.api.User(login=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_firstname(self, invalid_name): + def test_negative_create_with_invalid_firstname(self, invalid_name, target_sat): """Create User with invalid Firstname :id: cb1ca8a9-38b1-4d58-ae32-915b47b91657 @@ -368,11 +367,11 @@ def test_negative_create_with_invalid_firstname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(firstname=invalid_name).create() + target_sat.api.User(firstname=invalid_name).create() @pytest.mark.tier1 @pytest.mark.parametrize('invalid_name', **parametrized(invalid_names_list())) - def test_negative_create_with_invalid_lastname(self, invalid_name): + def test_negative_create_with_invalid_lastname(self, invalid_name, target_sat): """Create User with invalid Lastname :id: 59546d26-2b6b-400b-990f-0b5d1c35004e @@ -384,10 +383,10 @@ def test_negative_create_with_invalid_lastname(self, invalid_name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(lastname=invalid_name).create() + target_sat.api.User(lastname=invalid_name).create() @pytest.mark.tier1 - def test_negative_create_with_blank_authorized_by(self): + def test_negative_create_with_blank_authorized_by(self, target_sat): """Create User with blank authorized by :id: 1fe2d1e3-728c-4d89-97ae-3890e904f413 @@ -397,7 +396,7 @@ def test_negative_create_with_blank_authorized_by(self): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.User(auth_source='').create() + target_sat.api.User(auth_source='').create() @pytest.mark.tier1 def test_positive_table_preferences(self, module_target_sat): @@ -413,16 +412,16 @@ def test_positive_table_preferences(self, module_target_sat): :BZ: 1757394 """ - existing_roles = entities.Role().search() + existing_roles = module_target_sat.api.Role().search() password = gen_string('alpha') - user = entities.User(role=existing_roles, password=password).create() + user = module_target_sat.api.User(role=existing_roles, password=password).create() name = "hosts" columns = ["power_status", "name", "comment"] sc = ServerConfig( auth=(user.login, password), url=module_target_sat.url, verify=settings.server.verify_ca ) - entities.TablePreferences(sc, user=user, name=name, columns=columns).create() - table_preferences = entities.TablePreferences(sc, user=user).search() + module_target_sat.api.TablePreferences(sc, user=user, name=name, columns=columns).create() + table_preferences = module_target_sat.api.TablePreferences(sc, user=user).search() assert len(table_preferences) == 1 tp = table_preferences[0] assert hasattr(tp, 'name') @@ -437,14 +436,14 @@ class TestUserRole: """Test associations between users and roles.""" @pytest.fixture(scope='class') - def make_roles(self): + def make_roles(self, class_target_sat): """Create two roles.""" - return [entities.Role().create() for _ in range(2)] + return [class_target_sat.api.Role().create() for _ in range(2)] @pytest.mark.tier1 @pytest.mark.build_sanity @pytest.mark.parametrize('number_of_roles', range(1, 3)) - def test_positive_create_with_role(self, make_roles, number_of_roles): + def test_positive_create_with_role(self, make_roles, number_of_roles, class_target_sat): """Create a user with the ``role`` attribute. :id: 32daacf1-eed4-49b1-81e1-ab0a5b0113f2 @@ -458,7 +457,7 @@ def test_positive_create_with_role(self, make_roles, number_of_roles): :CaseImportance: Critical """ chosen_roles = make_roles[:number_of_roles] - user = entities.User(role=chosen_roles).create() + user = class_target_sat.api.User(role=chosen_roles).create() assert len(user.role) == number_of_roles assert {role.id for role in user.role} == {role.id for role in chosen_roles} @@ -488,14 +487,14 @@ class TestSshKeyInUser: """Implements the SSH Key in User Tests""" @pytest.fixture(scope='class') - def create_user(self): + def create_user(self, class_target_sat): """Create an user and import different keys from data json file""" - user = entities.User().create() + user = class_target_sat.api.User().create() data_keys = json.loads(DataFile.SSH_KEYS_JSON.read_bytes()) return dict(user=user, data_keys=data_keys) @pytest.mark.tier1 - def test_positive_CRD_ssh_key(self): + def test_positive_CRD_ssh_key(self, class_target_sat): """SSH Key can be added to User :id: d00905f6-3a70-4e2f-a5ae-fcac18274bb7 @@ -511,18 +510,18 @@ def test_positive_CRD_ssh_key(self): :CaseImportance: Critical """ - user = entities.User().create() + user = class_target_sat.api.User().create() ssh_name = gen_string('alpha') ssh_key = gen_ssh_keypairs()[1] - user_sshkey = entities.SSHKey(user=user, name=ssh_name, key=ssh_key).create() + user_sshkey = class_target_sat.api.SSHKey(user=user, name=ssh_name, key=ssh_key).create() assert ssh_name == user_sshkey.name assert ssh_key == user_sshkey.key user_sshkey.delete() - result = entities.SSHKey(user=user).search() + result = class_target_sat.api.SSHKey(user=user).search() assert len(result) == 0 @pytest.mark.tier1 - def test_negative_create_ssh_key(self, create_user): + def test_negative_create_ssh_key(self, create_user, target_sat): """Invalid ssh key can not be added in User Template :id: e924ff03-8b2c-4ab9-a054-ea491413e143 @@ -542,7 +541,7 @@ def test_negative_create_ssh_key(self, create_user): """ invalid_sshkey = gen_string('alpha', length=256) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_sshkey ).create() assert re.search('Key is not a valid public ssh key', context.value.response.text) @@ -551,7 +550,7 @@ def test_negative_create_ssh_key(self, create_user): assert re.search('Length could not be calculated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_invalid_length_ssh_key(self, create_user): + def test_negative_create_invalid_length_ssh_key(self, create_user, target_sat): """Attempt to add SSH key that has invalid length :id: 899f0c46-c7fe-4610-80f1-1add4a9cbc26 @@ -568,14 +567,14 @@ def test_negative_create_invalid_length_ssh_key(self, create_user): """ invalid_length_key = create_user['data_keys']['ssh_keys']['invalid_ssh_key'] with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=gen_string('alpha'), key=invalid_length_key ).create() assert re.search('Length could not be calculated', context.value.response.text) assert not re.search('Fingerprint could not be generated', context.value.response.text) @pytest.mark.tier1 - def test_negative_create_ssh_key_with_invalid_name(self, create_user): + def test_negative_create_ssh_key_with_invalid_name(self, create_user, target_sat): """Attempt to add SSH key that has invalid name length :id: e1e17839-a392-45bb-bb1e-28d3cd9dba1c @@ -591,14 +590,14 @@ def test_negative_create_ssh_key_with_invalid_name(self, create_user): """ invalid_ssh_key_name = gen_string('alpha', length=300) with pytest.raises(HTTPError) as context: - entities.SSHKey( + target_sat.api.SSHKey( user=create_user['user'], name=invalid_ssh_key_name, key=gen_ssh_keypairs()[1] ).create() assert re.search("Name is too long", context.value.response.text) @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_create_multiple_ssh_key_types(self, create_user): + def test_positive_create_multiple_ssh_key_types(self, create_user, class_target_sat): """Multiple types of ssh keys can be added to user :id: d1ffa908-dc86-40c8-b6f0-20650cc67046 @@ -615,15 +614,15 @@ def test_positive_create_multiple_ssh_key_types(self, create_user): dsa = create_user['data_keys']['ssh_keys']['dsa'] ecdsa = create_user['data_keys']['ssh_keys']['ecdsa'] ed = create_user['data_keys']['ssh_keys']['ed'] - user = entities.User().create() + user = class_target_sat.api.User().create() for key in [rsa, dsa, ecdsa, ed]: - entities.SSHKey(user=user, name=gen_string('alpha'), key=key).create() - user_sshkeys = entities.SSHKey(user=user).search() + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=key).create() + user_sshkeys = class_target_sat.api.SSHKey(user=user).search() assert len(user_sshkeys) == 4 @pytest.mark.tier2 @pytest.mark.upgrade - def test_positive_ssh_key_in_host_enc(self, target_sat): + def test_positive_ssh_key_in_host_enc(self, class_target_sat): """SSH key appears in host ENC output :id: 4b70a950-e777-4b2d-a83d-29279715fe6d @@ -639,13 +638,15 @@ def test_positive_ssh_key_in_host_enc(self, target_sat): :CaseLevel: Integration """ - org = entities.Organization().create() - loc = entities.Location(organization=[org]).create() - user = entities.User(organization=[org], location=[loc]).create() + org = class_target_sat.api.Organization().create() + loc = class_target_sat.api.Location(organization=[org]).create() + user = class_target_sat.api.User(organization=[org], location=[loc]).create() ssh_key = gen_ssh_keypairs()[1] - entities.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() - host = entities.Host(owner=user, owner_type='User', organization=org, location=loc).create() - sshkey_updated_for_host = f'{ssh_key} {user.login}@{target_sat.hostname}' + class_target_sat.api.SSHKey(user=user, name=gen_string('alpha'), key=ssh_key).create() + host = class_target_sat.api.Host( + owner=user, owner_type='User', organization=org, location=loc + ).create() + sshkey_updated_for_host = f'{ssh_key} {user.login}@{class_target_sat.hostname}' host_enc_key = host.enc()['data']['parameters']['ssh_authorized_keys'] assert sshkey_updated_for_host == host_enc_key[0] @@ -695,7 +696,7 @@ def create_ldap(self, ad_data, module_target_sat): @pytest.mark.tier2 @pytest.mark.upgrade @pytest.mark.parametrize('username', **parametrized(valid_usernames_list())) - def test_positive_create_in_ldap_mode(self, username, create_ldap): + def test_positive_create_in_ldap_mode(self, username, create_ldap, target_sat): """Create User in ldap mode :id: 6f8616b1-5380-40d2-8678-7c4434050cfb @@ -706,14 +707,14 @@ def test_positive_create_in_ldap_mode(self, username, create_ldap): :CaseLevel: Integration """ - user = entities.User( + user = target_sat.api.User( login=username, auth_source=create_ldap['authsource'], password='' ).create() assert user.login == username @pytest.mark.tier3 - def test_positive_ad_basic_no_roles(self, create_ldap): - """Login with LDAP Auth- AD for user with no roles/rights + def test_positive_ad_basic_no_roles(self, create_ldap, target_sat): + """Login with LDAP Auth AD for user with no roles/rights :id: 3910c6eb-6eff-4ab7-a50d-ba40f5c24c08 @@ -721,7 +722,7 @@ def test_positive_ad_basic_no_roles(self, create_ldap): :steps: Login to server with an AD user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. :CaseLevel: System """ @@ -731,7 +732,7 @@ def test_positive_ad_basic_no_roles(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(sc).search() @pytest.mark.tier3 @pytest.mark.upgrade @@ -764,8 +765,10 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_ user.delete() role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = module_target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = module_target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -780,22 +783,22 @@ def test_positive_access_entities_from_ldap_org_admin(self, create_ldap, module_ verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search( + module_target_sat.api.Architecture(sc).search() + user = module_target_sat.api.User().search( query={'search': 'login={}'.format(create_ldap['ldap_user_name'])} )[0] - user.role = [entities.Role(id=org_admin['id']).read()] + user.role = [module_target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + module_target_sat.api.Architecture, + module_target_sat.api.Audit, + module_target_sat.api.Bookmark, + module_target_sat.api.CommonParameter, + module_target_sat.api.LibvirtComputeResource, + module_target_sat.api.OVirtComputeResource, + module_target_sat.api.VMWareComputeResource, + module_target_sat.api.Errata, + module_target_sat.api.OperatingSystem, ]: entity(sc).search() @@ -843,7 +846,7 @@ def create_ldap(self, class_target_sat): user.delete() @pytest.mark.tier3 - def test_positive_ipa_basic_no_roles(self, create_ldap): + def test_positive_ipa_basic_no_roles(self, create_ldap, target_sat): """Login with LDAP Auth- FreeIPA for user with no roles/rights :id: 901a241d-aa76-4562-ab1a-a752e6fb7ed5 @@ -852,7 +855,7 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): :steps: Login to server with an FreeIPA user. - :expectedresults: Log in to foreman successfully but cannot access entities. + :expectedresults: Log in to foreman successfully but cannot access target_sat.api. :CaseLevel: System """ @@ -862,11 +865,11 @@ def test_positive_ipa_basic_no_roles(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() + target_sat.api.Architecture(sc).search() @pytest.mark.tier3 @pytest.mark.upgrade - def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): + def test_positive_access_entities_from_ipa_org_admin(self, create_ldap, target_sat): """LDAP FreeIPA User can access resources within its taxonomies if assigned role has permission for same taxonomies @@ -885,8 +888,10 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): :CaseLevel: System """ role_name = gen_string('alpha') - default_org_admin = entities.Role().search(query={'search': 'name="Organization admin"'}) - org_admin = entities.Role(id=default_org_admin[0].id).clone( + default_org_admin = target_sat.api.Role().search( + query={'search': 'name="Organization admin"'} + ) + org_admin = target_sat.api.Role(id=default_org_admin[0].id).clone( data={ 'role': { 'name': role_name, @@ -901,22 +906,22 @@ def test_positive_access_entities_from_ipa_org_admin(self, create_ldap): verify=settings.server.verify_ca, ) with pytest.raises(HTTPError): - entities.Architecture(sc).search() - user = entities.User().search(query={'search': 'login={}'.format(create_ldap['username'])})[ - 0 - ] - user.role = [entities.Role(id=org_admin['id']).read()] + target_sat.api.Architecture(sc).search() + user = target_sat.api.User().search( + query={'search': 'login={}'.format(create_ldap['username'])} + )[0] + user.role = [target_sat.api.Role(id=org_admin['id']).read()] user.update(['role']) for entity in [ - entities.Architecture, - entities.Audit, - entities.Bookmark, - entities.CommonParameter, - entities.LibvirtComputeResource, - entities.OVirtComputeResource, - entities.VMWareComputeResource, - entities.Errata, - entities.OperatingSystem, + target_sat.api.Architecture, + target_sat.api.Audit, + target_sat.api.Bookmark, + target_sat.api.CommonParameter, + target_sat.api.LibvirtComputeResource, + target_sat.api.OVirtComputeResource, + target_sat.api.VMWareComputeResource, + target_sat.api.Errata, + target_sat.api.OperatingSystem, ]: entity(sc).search() diff --git a/tests/foreman/api/test_usergroup.py b/tests/foreman/api/test_usergroup.py index 01dcef27010..a01ee213a38 100644 --- a/tests/foreman/api/test_usergroup.py +++ b/tests/foreman/api/test_usergroup.py @@ -22,7 +22,6 @@ from random import randint from fauxfactory import gen_string -from nailgun import entities import pytest from requests.exceptions import HTTPError @@ -38,12 +37,12 @@ class TestUserGroup: """Tests for the ``usergroups`` path.""" @pytest.fixture - def user_group(self): - return entities.UserGroup().create() + def user_group(self, target_sat): + return target_sat.api.UserGroup().create() @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_name(self, name): + def test_positive_create_with_name(self, target_sat, name): """Create new user group using different valid names :id: 3a2255d9-f48d-4f22-a4b9-132361bd9224 @@ -54,12 +53,12 @@ def test_positive_create_with_name(self, name): :CaseImportance: Critical """ - user_group = entities.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(name=name).create() assert user_group.name == name @pytest.mark.tier1 @pytest.mark.parametrize('login', **parametrized(valid_usernames_list())) - def test_positive_create_with_user(self, login): + def test_positive_create_with_user(self, target_sat, login): """Create new user group using valid user attached to that group. :id: ab127e09-31d2-4c5b-ae6c-726e4b11a21e @@ -70,13 +69,13 @@ def test_positive_create_with_user(self, login): :CaseImportance: Critical """ - user = entities.User(login=login).create() - user_group = entities.UserGroup(user=[user]).create() + user = target_sat.api.User(login=login).create() + user_group = target_sat.api.UserGroup(user=[user]).create() assert len(user_group.user) == 1 assert user_group.user[0].read().login == login @pytest.mark.tier1 - def test_positive_create_with_users(self): + def test_positive_create_with_users(self, target_sat): """Create new user group using multiple users attached to that group. :id: b8dbbacd-b5cb-49b1-985d-96df21440652 @@ -86,15 +85,15 @@ def test_positive_create_with_users(self): :CaseImportance: Critical """ - users = [entities.User().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(user=users).create() + users = [target_sat.api.User().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(user=users).create() assert sorted(user.login for user in users) == sorted( user.read().login for user in user_group.user ) @pytest.mark.tier1 @pytest.mark.parametrize('role_name', **parametrized(valid_data_list())) - def test_positive_create_with_role(self, role_name): + def test_positive_create_with_role(self, target_sat, role_name): """Create new user group using valid role attached to that group. :id: c4fac71a-9dda-4e5f-a5df-be362d3cbd52 @@ -105,13 +104,13 @@ def test_positive_create_with_role(self, role_name): :CaseImportance: Critical """ - role = entities.Role(name=role_name).create() - user_group = entities.UserGroup(role=[role]).create() + role = target_sat.api.Role(name=role_name).create() + user_group = target_sat.api.UserGroup(role=[role]).create() assert len(user_group.role) == 1 assert user_group.role[0].read().name == role_name @pytest.mark.tier1 - def test_positive_create_with_roles(self): + def test_positive_create_with_roles(self, target_sat): """Create new user group using multiple roles attached to that group. :id: 5838fcfd-e256-49cf-aef8-b2bf215b3586 @@ -121,15 +120,15 @@ def test_positive_create_with_roles(self): :CaseImportance: Critical """ - roles = [entities.Role().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(role=roles).create() + roles = [target_sat.api.Role().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(role=roles).create() assert sorted(role.name for role in roles) == sorted( role.read().name for role in user_group.role ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(valid_data_list())) - def test_positive_create_with_usergroup(self, name): + def test_positive_create_with_usergroup(self, target_sat, name): """Create new user group using another user group attached to the initial group. @@ -141,13 +140,13 @@ def test_positive_create_with_usergroup(self, name): :CaseImportance: Critical """ - sub_user_group = entities.UserGroup(name=name).create() - user_group = entities.UserGroup(usergroup=[sub_user_group]).create() + sub_user_group = target_sat.api.UserGroup(name=name).create() + user_group = target_sat.api.UserGroup(usergroup=[sub_user_group]).create() assert len(user_group.usergroup) == 1 assert user_group.usergroup[0].read().name == name @pytest.mark.tier2 - def test_positive_create_with_usergroups(self): + def test_positive_create_with_usergroups(self, target_sat): """Create new user group using multiple user groups attached to that initial group. @@ -158,15 +157,15 @@ def test_positive_create_with_usergroups(self): :CaseLevel: Integration """ - sub_user_groups = [entities.UserGroup().create() for _ in range(randint(3, 5))] - user_group = entities.UserGroup(usergroup=sub_user_groups).create() + sub_user_groups = [target_sat.api.UserGroup().create() for _ in range(randint(3, 5))] + user_group = target_sat.api.UserGroup(usergroup=sub_user_groups).create() assert sorted(usergroup.name for usergroup in sub_user_groups) == sorted( usergroup.read().name for usergroup in user_group.usergroup ) @pytest.mark.tier1 @pytest.mark.parametrize('name', **parametrized(invalid_values_list())) - def test_negative_create_with_name(self, name): + def test_negative_create_with_name(self, target_sat, name): """Attempt to create user group with invalid name. :id: 1a3384dc-5d52-442c-87c8-e38048a61dfa @@ -178,10 +177,10 @@ def test_negative_create_with_name(self, name): :CaseImportance: Critical """ with pytest.raises(HTTPError): - entities.UserGroup(name=name).create() + target_sat.api.UserGroup(name=name).create() @pytest.mark.tier1 - def test_negative_create_with_same_name(self): + def test_negative_create_with_same_name(self, target_sat): """Attempt to create user group with a name of already existent entity. :id: aba0925a-d5ec-4e90-86c6-404b9b6f0179 @@ -190,9 +189,9 @@ def test_negative_create_with_same_name(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() with pytest.raises(HTTPError): - entities.UserGroup(name=user_group.name).create() + target_sat.api.UserGroup(name=user_group.name).create() @pytest.mark.tier1 @pytest.mark.parametrize('new_name', **parametrized(valid_data_list())) @@ -212,7 +211,7 @@ def test_positive_update(self, user_group, new_name): assert new_name == user_group.name @pytest.mark.tier1 - def test_positive_update_with_new_user(self): + def test_positive_update_with_new_user(self, target_sat): """Add new user to user group :id: e11b57c3-5f86-4963-9cc6-e10e2f02468b @@ -221,14 +220,14 @@ def test_positive_update_with_new_user(self): :CaseImportance: Critical """ - user = entities.User().create() - user_group = entities.UserGroup().create() + user = target_sat.api.User().create() + user_group = target_sat.api.UserGroup().create() user_group.user = [user] user_group = user_group.update(['user']) assert user.login == user_group.user[0].read().login @pytest.mark.tier2 - def test_positive_update_with_existing_user(self): + def test_positive_update_with_existing_user(self, target_sat): """Update user that assigned to user group with another one :id: 71b78f64-867d-4bf5-9b1e-02698a17fb38 @@ -237,14 +236,14 @@ def test_positive_update_with_existing_user(self): :CaseLevel: Integration """ - users = [entities.User().create() for _ in range(2)] - user_group = entities.UserGroup(user=[users[0]]).create() + users = [target_sat.api.User().create() for _ in range(2)] + user_group = target_sat.api.UserGroup(user=[users[0]]).create() user_group.user[0] = users[1] user_group = user_group.update(['user']) assert users[1].login == user_group.user[0].read().login @pytest.mark.tier1 - def test_positive_update_with_new_role(self): + def test_positive_update_with_new_role(self, target_sat): """Add new role to user group :id: 8e0872c1-ae88-4971-a6fc-cd60127d6663 @@ -253,15 +252,15 @@ def test_positive_update_with_new_role(self): :CaseImportance: Critical """ - new_role = entities.Role().create() - user_group = entities.UserGroup().create() + new_role = target_sat.api.Role().create() + user_group = target_sat.api.UserGroup().create() user_group.role = [new_role] user_group = user_group.update(['role']) assert new_role.name == user_group.role[0].read().name @pytest.mark.tier1 @pytest.mark.upgrade - def test_positive_update_with_new_usergroup(self): + def test_positive_update_with_new_usergroup(self, target_sat): """Add new user group to existing one :id: 3cb29d07-5789-4f94-9fd9-a7e494b3c110 @@ -270,8 +269,8 @@ def test_positive_update_with_new_usergroup(self): :CaseImportance: Critical """ - new_usergroup = entities.UserGroup().create() - user_group = entities.UserGroup().create() + new_usergroup = target_sat.api.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.usergroup = [new_usergroup] user_group = user_group.update(['usergroup']) assert new_usergroup.name == user_group.usergroup[0].read().name @@ -295,7 +294,7 @@ def test_negative_update(self, user_group, new_name): assert user_group.read().name != new_name @pytest.mark.tier1 - def test_negative_update_with_same_name(self): + def test_negative_update_with_same_name(self, target_sat): """Attempt to update user group with a name of already existent entity. :id: 14888998-9282-4d81-9e99-234d19706783 @@ -305,15 +304,15 @@ def test_negative_update_with_same_name(self): :CaseImportance: Critical """ name = gen_string('alphanumeric') - entities.UserGroup(name=name).create() - new_user_group = entities.UserGroup().create() + target_sat.api.UserGroup(name=name).create() + new_user_group = target_sat.api.UserGroup().create() new_user_group.name = name with pytest.raises(HTTPError): new_user_group.update(['name']) assert new_user_group.read().name != name @pytest.mark.tier1 - def test_positive_delete(self): + def test_positive_delete(self, target_sat): """Create user group with valid name and then delete it :id: c5cfcc4a-9177-47bb-8f19-7a8930eb7ca3 @@ -322,7 +321,7 @@ def test_positive_delete(self): :CaseImportance: Critical """ - user_group = entities.UserGroup().create() + user_group = target_sat.api.UserGroup().create() user_group.delete() with pytest.raises(HTTPError): user_group.read() diff --git a/tests/foreman/api/test_webhook.py b/tests/foreman/api/test_webhook.py index b806dbb24c0..a220b19af99 100644 --- a/tests/foreman/api/test_webhook.py +++ b/tests/foreman/api/test_webhook.py @@ -18,7 +18,6 @@ """ import re -from nailgun import entities import pytest from requests.exceptions import HTTPError from wait_for import TimedOutError, wait_for @@ -68,7 +67,7 @@ def assert_event_triggered(channel, event): class TestWebhook: @pytest.mark.tier2 - def test_negative_invalid_event(self): + def test_negative_invalid_event(self, target_sat): """Test negative webhook creation with an invalid event :id: 60cd456a-9943-45cb-a72e-23a83a691499 @@ -78,11 +77,11 @@ def test_negative_invalid_event(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(event='invalid_event').create() + target_sat.api.Webhooks(event='invalid_event').create() @pytest.mark.tier2 @pytest.mark.parametrize('event', **parametrized(WEBHOOK_EVENTS)) - def test_positive_valid_event(self, event): + def test_positive_valid_event(self, event, target_sat): """Test positive webhook creation with a valid event :id: 9b505f1b-7ee1-4362-b44c-f3107d043a05 @@ -91,11 +90,11 @@ def test_positive_valid_event(self, event): :CaseImportance: High """ - hook = entities.Webhooks(event=event).create() + hook = target_sat.api.Webhooks(event=event).create() assert event in hook.event @pytest.mark.tier2 - def test_negative_invalid_method(self): + def test_negative_invalid_method(self, target_sat): """Test negative webhook creation with an invalid HTTP method :id: 573be312-7bf3-4d9e-aca1-e5cac810d04b @@ -105,11 +104,11 @@ def test_negative_invalid_method(self): :CaseImportance: High """ with pytest.raises(HTTPError): - entities.Webhooks(http_method='NONE').create() + target_sat.api.Webhooks(http_method='NONE').create() @pytest.mark.tier2 @pytest.mark.parametrize('method', **parametrized(WEBHOOK_METHODS)) - def test_positive_valid_method(self, method): + def test_positive_valid_method(self, method, target_sat): """Test positive webhook creation with a valid HTTP method :id: cf8f276a-d21e-44d0-92f2-657232240c7e @@ -118,12 +117,12 @@ def test_positive_valid_method(self, method): :CaseImportance: High """ - hook = entities.Webhooks(http_method=method).create() + hook = target_sat.api.Webhooks(http_method=method).create() assert hook.http_method == method @pytest.mark.tier1 @pytest.mark.e2e - def test_positive_end_to_end(self): + def test_positive_end_to_end(self, target_sat): """Create a new webhook. :id: 7593a04e-cf7e-414c-9e7e-3fe2936cc32a @@ -132,7 +131,7 @@ def test_positive_end_to_end(self): :CaseImportance: Critical """ - hook = entities.Webhooks().create() + hook = target_sat.api.Webhooks().create() assert hook hook.name = "testing" hook.http_method = "GET" @@ -149,7 +148,7 @@ def test_positive_end_to_end(self): (not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url' ) @pytest.mark.tier2 - def test_positive_event_triggered(self, module_org, target_sat): + def test_positive_event_triggered(self, module_org, module_target_sat): """Create a webhook and trigger the event associated with it. @@ -160,13 +159,13 @@ def test_positive_event_triggered(self, module_org, target_sat): :CaseImportance: Critical """ - hook = entities.Webhooks( + hook = module_target_sat.api.Webhooks( event='actions.katello.repository.sync_succeeded', http_method='GET' ).create() - repo = entities.Repository( + repo = module_target_sat.api.Repository( organization=module_org, content_type='yum', url=settings.repos.yum_0.url ).create() - with target_sat.session.shell() as shell: + with module_target_sat.api.session.shell() as shell: shell.send('foreman-tail') repo.sync() assert_event_triggered(shell, hook.event)