diff --git a/robottelo/host_helpers/capsule_mixins.py b/robottelo/host_helpers/capsule_mixins.py index 9e283509696..12a0b3a345f 100644 --- a/robottelo/host_helpers/capsule_mixins.py +++ b/robottelo/host_helpers/capsule_mixins.py @@ -1,6 +1,8 @@ -from datetime import datetime +from datetime import datetime, timedelta import time +from dateutil.parser import parse + from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS from robottelo.logging import logger from robottelo.utils.installer import InstallerCommand @@ -61,26 +63,85 @@ def wait_for_tasks( raise AssertionError(f"No task was found using query '{search_query}'") return tasks - def wait_for_sync(self, timeout=600, start_time=None): - """Wait for capsule sync to finish and assert the sync task succeeded""" - # Assert that a task to sync lifecycle environment to the capsule - # is started (or finished already) + def wait_for_sync(self, timeout=600, start_time=None, return_result=False): + """Wait for capsule sync to finish and assert the sync task succeeded. + Assert that a task to sync lifecycle environment to the capsule + is started (or finished already). + Assert all sync task(s) succeed, and update capsule's last sync time. + Assert last_sync_time is on or newer than start time. + After polling active tasks, Assert the last_sync_time is updated. + Assert the last task :id of any active_task(s), is the same :id for capsule's final_sync_task. + + :param start_time: datetime, that sync should occur on or after. + :return: if return_result = True: + :type list: [boolean, dict] + :value: [if active sync found, updated sync status] + """ + # query capsule sync status to catch any quick sync task(s) in progress + sync_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + active_tasks = sync_status['active_sync_tasks'] + active_sync = True if len(active_tasks) else False + if start_time is None: start_time = datetime.utcnow().replace(microsecond=0) + # 1s margin of safety for rounding + start_time = ( + (start_time - timedelta(seconds=1)) + .replace(microsecond=0) + .strftime('%Y-%m-%d %H:%M:%S UTC') + ) logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") - sync_status = self.nailgun_capsule.content_get_sync() logger.info(f"Active tasks {sync_status['active_sync_tasks']}") + # found some active sync task(s), or an updated last_sync_time assert ( len(sync_status['active_sync_tasks']) or datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') >= start_time + ), ( + f'No active sync task(s) were found for capsule: {self.hostname},' + f' capsule `last_sync time`: {sync_status["last_sync_time"]} is not on or newer than the `start_time`: {start_time}.' + f' an ongoing sync task or recent sync time for this capsule could not be found.' ) - # Wait till capsule sync finishes and assert the sync task succeeded - for task in sync_status['active_sync_tasks']: + # for any ongoing sync(s), polled task(s) must succeed + for task in active_tasks: self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout) - sync_status = self.nailgun_capsule.content_get_sync() + logger.info(f"Active sync task :id: {task['id']} succeeded.") + + # get updated capsule sync status after polling any task(s) + sync_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + logger.info(f"Querying updated sync status from capsule {self.hostname}.") + # final sync task end time, is the same as capsule's last_sync_time + assert parse(sync_status['last_sync_time']) == parse( + sync_status['last_sync_task']['ended_at'] + ) + logger.info( + f"Final sync task for the capsule ended at: {sync_status['last_sync_task']['ended_at']}." + ) + # check timedelta, that total time is not < 0s (last sync not before start time), and took less than timeout. + assert ( + timedelta(seconds=0) + <= parse(sync_status['last_sync_time']) - parse(start_time) + <= timedelta(seconds=timeout) + ), ( + f'Capsule: {self.hostname},\n last_sync_time: {sync_status["last_sync_time"]} was before the start time: {start_time},' + f' or duration exceeded timeout: {timeout}s.' + ) + if active_sync: + # if we found in-progress task(s), last one was the final sync task for capsule + assert active_tasks[-1]['id'] == sync_status['last_sync_task']['id'] + logger.info( + f"Active sync task :id {active_tasks[-1]['id']}, was the final capsule task." + ) + + # no failed or active sync task(s) remaining assert len(sync_status['last_failed_sync_tasks']) == 0 + assert len(sync_status['active_sync_tasks']) == 0 + + # return [if an active sync was encountered, updated sync status] + # :type list: [boolean, dict] + if return_result: + return [active_sync, sync_status] def get_published_repo_url(self, org, prod, repo, lce=None, cv=None): """Forms url of a repo or CV published on a Satellite or Capsule. diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index abbfd9f9f32..0d708aebb95 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -12,7 +12,10 @@ :CaseImportance: High """ -from datetime import datetime +from datetime import ( + datetime, + timedelta, +) import re from time import sleep @@ -20,11 +23,24 @@ from nailgun.entity_mixins import call_entity_method_with_timeout import pytest -from robottelo import constants from robottelo.config import settings from robottelo.constants import ( + CONTAINER_CLIENTS, CONTAINER_REGISTRY_HUB, CONTAINER_UPSTREAM_NAME, + ENVIRONMENT, + FAKE_1_YUM_REPOS_COUNT, + FAKE_3_YUM_REPO_RPMS, + FAKE_3_YUM_REPOS_COUNT, + FAKE_FILE_LARGE_COUNT, + FAKE_FILE_LARGE_URL, + FAKE_FILE_NEW_NAME, + KICKSTART_CONTENT, + PRDS, + REPOS, + REPOSET, + RH_CONTAINER_REGISTRY_HUB, + RPM_TO_UPLOAD, DataFile, ) from robottelo.constants.repos import ANSIBLE_GALAXY, CUSTOM_FILE_REPO @@ -83,14 +99,14 @@ def test_positive_uploaded_content_library_sync( assert repo.read().content_counts['rpm'] == 1 + timestamp = datetime.utcnow().replace(microsecond=0) # Publish new version of the content view cv.publish() + # query sync status as publish invokes sync, task succeeds + module_capsule_configured.wait_for_sync(start_time=timestamp) cv = cv.read() - assert len(cv.version) == 1 - module_capsule_configured.wait_for_sync() - # Verify the RPM published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -101,7 +117,7 @@ def test_positive_uploaded_content_library_sync( ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 - assert caps_files[0] == constants.RPM_TO_UPLOAD + assert caps_files[0] == RPM_TO_UPLOAD @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') @@ -149,13 +165,13 @@ def test_positive_checksum_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type is sha256, not sha1 on capsule repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -182,13 +198,13 @@ def test_positive_checksum_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type has updated to sha1 on capsule repomd = get_repomd(repo_url) checksum_types = re.findall(r'(?<=checksum type=").*?(?=")', repomd) @@ -257,12 +273,13 @@ def test_positive_sync_updated_repo( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Upload more content to the repository with open(DataFile.SRPM_TO_UPLOAD, 'rb') as handle: repo.upload_content(files={'content': handle}) @@ -276,12 +293,13 @@ def test_positive_sync_updated_repo( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check the content is synced on the Capsule side properly sat_repo_url = target_sat.get_published_repo_url( org=function_org.label, @@ -357,10 +375,18 @@ def test_positive_capsule_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() - # Promote content view to lifecycle environment + # prior to trigger (promoting), assert no sync tasks, and last sync time + active_tasks = module_capsule_configured.nailgun_capsule.content_get_sync( + synchronous=True, timeout=600 + )['active_sync_tasks'] + assert len(active_tasks) == 0 + # Promote content view to lifecycle environment, + # invoking capsule sync task(s) + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Content of the published content view in @@ -368,8 +394,6 @@ def test_positive_capsule_sync( # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in repository on satellite sat_repo_url = target_sat.get_published_repo_url( @@ -404,14 +428,14 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() - # Promote new content view version to lifecycle environment + # Promote new content view version to lifecycle environment, + # capsule sync task(s) invoked and succeed + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - - module_capsule_configured.wait_for_sync() - # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision(caps_repo_url) @@ -427,21 +451,22 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that packages count in the repository is updated - assert repo.content_counts['rpm'] == (constants.FAKE_1_YUM_REPOS_COUNT + 1) + assert repo.content_counts['rpm'] == (FAKE_1_YUM_REPOS_COUNT + 1) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in the repository sat_files = get_repo_files_by_url(sat_repo_url) @@ -451,7 +476,7 @@ def test_positive_capsule_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients') def test_positive_iso_library_sync( - self, module_capsule_configured, module_entitlement_manifest_org, module_target_sat + self, module_capsule_configured, module_sca_manifest_org, module_target_sat ): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically @@ -467,18 +492,18 @@ def test_positive_iso_library_sync( # Enable & sync RH repository with ISOs rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhsc'], - repo=constants.REPOS['rhsc7_iso']['name'], - reposet=constants.REPOSET['rhsc7_iso'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhsc'], + repo=REPOS['rhsc7_iso']['name'], + reposet=REPOSET['rhsc7_iso'], releasever=None, ) rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization lce = module_target_sat.api.LifecycleEnvironment( - organization=module_entitlement_manifest_org - ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] + organization=module_sca_manifest_org + ).search(query={'search': f'name={ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -491,23 +516,23 @@ def test_positive_iso_library_sync( # Create a content view with the repository cv = module_target_sat.api.ContentView( - organization=module_entitlement_manifest_org, repository=[rh_repo] + organization=module_sca_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view + timestamp = datetime.utcnow() cv.publish() - cv = cv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cv = cv.read() assert len(cv.version) == 1 # Verify ISOs are present on satellite sat_isos = get_repo_files_by_url(rh_repo.full_path, extension='iso') assert len(sat_isos) == 4 - module_capsule_configured.wait_for_sync() - # Verify all the ISOs are present on capsule caps_path = ( - f'{module_capsule_configured.url}/pulp/content/{module_entitlement_manifest_org.label}' + f'{module_capsule_configured.url}/pulp/content/{module_sca_manifest_org.label}' f'/{lce.label}/{cv.label}/content/dist/rhel/server/7/7Server/x86_64/sat-capsule/6.4/' 'iso/' ) @@ -540,8 +565,8 @@ def test_positive_on_demand_sync( the original package from the upstream repo """ repo_url = settings.repos.yum_3.url - packages_count = constants.FAKE_3_YUM_REPOS_COUNT - package = constants.FAKE_3_YUM_REPO_RPMS[0] + packages_count = FAKE_3_YUM_REPOS_COUNT + package = FAKE_3_YUM_REPO_RPMS[0] repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -573,13 +598,13 @@ def test_positive_on_demand_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify packages on Capsule match the source caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -624,7 +649,7 @@ def test_positive_update_with_immediate_sync( filesystem contains valid links to packages """ repo_url = settings.repos.yum_1.url - packages_count = constants.FAKE_1_YUM_REPOS_COUNT + packages_count = FAKE_1_YUM_REPOS_COUNT repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -655,13 +680,13 @@ def test_positive_update_with_immediate_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) @@ -683,13 +708,13 @@ def test_positive_update_with_immediate_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify the count of RPMs published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -730,7 +755,7 @@ def test_positive_capsule_pub_url_accessible(self, module_capsule_configured): @pytest.mark.skip_if_not_set('capsule', 'clients') @pytest.mark.parametrize('distro', ['rhel7', 'rhel8_bos', 'rhel9_bos']) def test_positive_sync_kickstart_repo( - self, target_sat, module_capsule_configured, function_entitlement_manifest_org, distro + self, target_sat, module_capsule_configured, function_sca_manifest_org, distro ): """Sync kickstart repository to the capsule. @@ -751,16 +776,14 @@ def test_positive_sync_kickstart_repo( """ repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['kickstart'][distro]['product'], - reposet=constants.REPOS['kickstart'][distro]['reposet'], - repo=constants.REPOS['kickstart'][distro]['name'], - releasever=constants.REPOS['kickstart'][distro]['version'], + org_id=function_sca_manifest_org.id, + product=REPOS['kickstart'][distro]['product'], + reposet=REPOS['kickstart'][distro]['reposet'], + repo=REPOS['kickstart'][distro]['name'], + releasever=REPOS['kickstart'][distro]['version'], ) repo = target_sat.api.Repository(id=repo_id).read() - lce = target_sat.api.LifecycleEnvironment( - organization=function_entitlement_manifest_org - ).create() + lce = target_sat.api.LifecycleEnvironment(organization=function_sca_manifest_org).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -775,7 +798,7 @@ def test_positive_sync_kickstart_repo( # Create a content view with the repository cv = target_sat.api.ContentView( - organization=function_entitlement_manifest_org, repository=[repo] + organization=function_sca_manifest_org, repository=[repo] ).create() # Sync repository repo.sync(timeout='10m') @@ -788,26 +811,26 @@ def test_positive_sync_kickstart_repo( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check for kickstart content on SAT and CAPS tail = ( - f'rhel/server/7/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' + f'rhel/server/7/{REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' if distro == 'rhel7' - else f'{distro.split("_")[0]}/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 + else f'{distro.split("_")[0]}/{REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 ) url_base = ( - f'pulp/content/{function_entitlement_manifest_org.label}/{lce.label}/{cv.label}/' + f'pulp/content/{function_sca_manifest_org.label}/{lce.label}/{cv.label}/' f'content/dist/{tail}' ) # Check kickstart specific files - for file in constants.KICKSTART_CONTENT: + for file in KICKSTART_CONTENT: sat_file = target_sat.md5_by_url(f'{target_sat.url}/{url_base}/{file}') caps_file = target_sat.md5_by_url(f'{module_capsule_configured.url}/{url_base}/{file}') assert sat_file == caps_file @@ -887,12 +910,13 @@ def test_positive_sync_container_repo_end_to_end( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Pull the images from capsule to the content host repo_paths = [ ( @@ -902,7 +926,7 @@ def test_positive_sync_container_repo_end_to_end( for repo in repos ] - for con_client in constants.CONTAINER_CLIENTS: + for con_client in CONTAINER_CLIENTS: result = container_contenthost.execute( f'{con_client} login -u {settings.server.admin_username}' f' -p {settings.server.admin_password} {module_capsule_configured.hostname}' @@ -1005,10 +1029,12 @@ def test_positive_sync_collection_repo( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync the repo + timestamp = datetime.utcnow() repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['ansible_collection'] == 2 - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) repo_path = repo.full_path.replace(target_sat.hostname, module_capsule_configured.hostname) coll_path = './collections' @@ -1063,7 +1089,7 @@ def test_positive_sync_file_repo( repo = target_sat.api.Repository( content_type='file', product=function_product, - url=constants.FAKE_FILE_LARGE_URL, + url=FAKE_FILE_LARGE_URL, ).create() repo.sync() @@ -1087,12 +1113,13 @@ def test_positive_sync_file_repo( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Run one more sync, check for status (BZ#1985122) sync_status = module_capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success' @@ -1114,8 +1141,8 @@ def test_positive_sync_file_repo( ) sat_files = get_repo_files_by_url(sat_repo_url, extension='iso') caps_files = get_repo_files_by_url(caps_repo_url, extension='iso') - assert len(sat_files) == len(caps_files) == constants.FAKE_FILE_LARGE_COUNT + 1 - assert constants.FAKE_FILE_NEW_NAME in caps_files + assert len(sat_files) == len(caps_files) == FAKE_FILE_LARGE_COUNT + 1 + assert FAKE_FILE_NEW_NAME in caps_files assert sat_files == caps_files for file in sat_files: @@ -1152,9 +1179,9 @@ def test_positive_sync_CV_to_multiple_LCEs( repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_manifest_org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhel7_extra']['name'], - reposet=constants.REPOSET['rhel7_extra'], + product=PRDS['rhel'], + repo=REPOS['rhel7_extra']['name'], + reposet=REPOSET['rhel7_extra'], releasever=None, ) repo = target_sat.api.Repository(id=repo_id).read() @@ -1183,16 +1210,20 @@ def test_positive_sync_CV_to_multiple_LCEs( # Promote the CV to both Capsule's LCEs without waiting for Capsule sync task completion. cvv = cv.version[-1].read() + assert len(cvv.environment) == 1 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce1.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce2.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 3 - # Check all sync tasks finished without errors. - module_capsule_configured.wait_for_sync() - @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_capsule_sync_status_persists( @@ -1235,7 +1266,8 @@ def test_positive_capsule_sync_status_persists( cvv = cv.version[-1].read() timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) # Delete all capsule sync tasks so that we fall back for audits. task_result = target_sat.execute( @@ -1264,7 +1296,7 @@ def test_positive_remove_capsule_orphans( self, target_sat, capsule_configured, - function_entitlement_manifest_org, + function_sca_manifest_org, function_lce_library, ): """Synchronize RPM content to the capsule, disassociate the capsule form the content @@ -1295,10 +1327,10 @@ def test_positive_remove_capsule_orphans( # Enable RHST repo and sync it to the Library LCE. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['rhst8']['product'], - repo=constants.REPOS['rhst8']['name'], - reposet=constants.REPOSET['rhst8'], + org_id=function_sca_manifest_org.id, + product=REPOS['rhst8']['product'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() @@ -1331,13 +1363,20 @@ def test_positive_remove_capsule_orphans( sync_status = capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success', 'Capsule sync task failed.' + # datetime string (local time) to search for proper task. + timestamp = (datetime.now().replace(microsecond=0) - timedelta(seconds=1)).strftime( + '%B %d, %Y at %I:%M:%S %p' + ) # Run orphan cleanup for the capsule. target_sat.execute( 'foreman-rake katello:delete_orphaned_content RAILS_ENV=production ' f'SMART_PROXY_ID={capsule_configured.nailgun_capsule.id}' ) target_sat.wait_for_tasks( - search_query=('label = Actions::Katello::OrphanCleanup::RemoveOrphans'), + search_query=( + 'label = Actions::Katello::OrphanCleanup::RemoveOrphans' + f' and started_at >= "{timestamp}"' + ), search_rate=5, max_tries=10, ) @@ -1388,7 +1427,7 @@ def test_positive_capsule_sync_openstack_container_repos( content_type='docker', docker_upstream_name=ups_name, product=function_product, - url=constants.RH_CONTAINER_REGISTRY_HUB, + url=RH_CONTAINER_REGISTRY_HUB, upstream_username=settings.subscription.rhn_username, upstream_password=settings.subscription.rhn_password, ).create() @@ -1411,12 +1450,13 @@ def test_positive_capsule_sync_openstack_container_repos( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - @pytest.mark.parametrize( 'repos_collection', [