Skip to content

Commit

Permalink
Merge branch 'master' into nilclass-publish
Browse files Browse the repository at this point in the history
  • Loading branch information
sambible authored Jan 17, 2024
2 parents 97f2c57 + f351608 commit 38ed52e
Show file tree
Hide file tree
Showing 5 changed files with 294 additions and 10 deletions.
3 changes: 3 additions & 0 deletions pytest_plugins/video_cleanup.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ def pytest_runtest_makereport(item):
if item.nodeid in test_results:
result_info = test_results[item.nodeid]
if result_info.outcome == 'passed':
report.user_properties = [
(key, value) for key, value in report.user_properties if key != 'video_url'
]
session_id_tuple = next(
(t for t in report.user_properties if t[0] == 'session_id'), None
)
Expand Down
2 changes: 1 addition & 1 deletion robottelo/cli/content_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class ContentExport(Base):
command_requires_org = True

@classmethod
def list(cls, output_format='json', options=None):
def list(cls, options=None, output_format='json'):
"""
List previous exports
"""
Expand Down
6 changes: 4 additions & 2 deletions robottelo/cli/hammer.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,12 @@ def parse_csv(output):
"""Parse CSV output from Hammer CLI and convert it to python dictionary."""
# ignore warning about puppet and ostree deprecation
output.replace('Puppet and OSTree will no longer be supported in Katello 3.16\n', '')
is_rex = True if 'Job invocation' in output else False
# Validate if the output is eligible for CSV conversions else return as it is
if not is_csv(output):
if not is_csv(output) and not is_rex:
return output
reader = csv.reader(output.splitlines())
output = output.splitlines()[0:2] if is_rex else output.splitlines()
reader = csv.reader(output)
# Generate the key names, spaces will be converted to dashes "-"
keys = [_normalize(header) for header in next(reader)]
# For each entry, create a dict mapping each key with each value
Expand Down
46 changes: 46 additions & 0 deletions tests/foreman/cli/test_remoteexecution.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from dateutil.relativedelta import FR, relativedelta
from fauxfactory import gen_string
import pytest
from wait_for import wait_for

from robottelo import constants
from robottelo.config import settings
Expand Down Expand Up @@ -581,6 +582,51 @@ def test_positive_run_scheduled_job_template(self, rex_contenthost, target_sat):
sleep(30)
assert_job_invocation_result(target_sat, invocation_command['id'], client.hostname)

@pytest.mark.tier3
@pytest.mark.rhel_ver_list([8, 9])
def test_recurring_with_unreachable_host(self, module_target_sat, rhel_contenthost):
"""Run a recurring task against a host that is not reachable and verify it gets rescheduled
:id: 570f6d75-6bbf-40b0-a1df-6c26b588dca8
:expectedresults: The job is being rescheduled indefinitely even though it fails
:BZ: 1784254
:customerscenario: true
:parametrized: yes
"""
cli = module_target_sat.cli
host = module_target_sat.cli_factory.make_fake_host()
# shutdown the host and wait for it to surely be unreachable
rhel_contenthost.execute("shutdown -h +1") # shutdown after one minute
sleep(120)
invocation = module_target_sat.cli_factory.job_invocation(
{
'job-template': 'Run Command - Script Default',
'inputs': 'command=echo this wont ever run',
'search-query': f'name ~ {host.name}',
'cron-line': '* * * * *', # every minute
}
)
cli.RecurringLogic.info(
{'id': cli.JobInvocation.info({'id': invocation.id})['recurring-logic-id']}
)
# wait for the third task to be planned which verifies the BZ
wait_for(
lambda: int(
cli.RecurringLogic.info(
{'id': cli.JobInvocation.info({'id': invocation.id})['recurring-logic-id']}
)['task-count']
)
> 2,
timeout=180,
delay=10,
)
# check that the first task indeed failed which verifies the test was done correctly
assert cli.JobInvocation.info({'id': invocation.id})['failed'] != '0'


class TestAnsibleREX:
"""Test class for remote execution via Ansible"""
Expand Down
247 changes: 240 additions & 7 deletions tests/foreman/cli/test_satellitesync.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,6 +1163,91 @@ def test_negative_import_invalid_path(self, module_org, module_target_sat):
'--metadata-file option'
) in error.value.message

@pytest.mark.tier3
@pytest.mark.parametrize(
'function_synced_rh_repo',
['rhae2'],
indirect=True,
)
def test_negative_import_incomplete_archive(
self,
target_sat,
config_export_import_settings,
export_import_cleanup_function,
function_synced_rh_repo,
function_sca_manifest_org,
function_import_org_with_manifest,
):
"""Try to import an incomplete export archive (mock interrupted transfer).
:id: c3b898bb-c6c8-402d-82f9-b15774d9f0fc
:parametrized: yes
:setup:
1. Enabled and synced RH repository.
:steps:
1. Create CV with the setup repo, publish it and export.
2. Corrupt the export archive so that it's incomplete.
3. Try to import the incomplete archive.
4. Verify no content is imported and the import CV can be deleted.
:expectedresults:
1. The import should fail.
2. No content should be added, the empty import CV can be deleted.
"""
# Create CV with the setup repo, publish it and export
cv = target_sat.cli_factory.make_content_view(
{
'organization-id': function_sca_manifest_org.id,
'repository-ids': [function_synced_rh_repo['id']],
}
)
target_sat.cli.ContentView.publish({'id': cv['id']})
cv = target_sat.cli.ContentView.info({'id': cv['id']})
assert len(cv['versions']) == 1
cvv = cv['versions'][0]
export = target_sat.cli.ContentExport.completeVersion(
{'id': cvv['id'], 'organization-id': function_sca_manifest_org.id}
)
assert '1.0' in target_sat.validate_pulp_filepath(
function_sca_manifest_org, PULP_EXPORT_DIR
)

# Corrupt the export archive so that it's incomplete
tar_files = target_sat.execute(
f'find {PULP_EXPORT_DIR}{function_sca_manifest_org.name}/{cv["name"]}/ -name *.tar'
).stdout.splitlines()
assert len(tar_files) == 1, 'Expected just one tar file in the export'

size = int(target_sat.execute(f'du -b {tar_files[0]}').stdout.split()[0])
assert size > 0, 'Export tar should not be empty'

res = target_sat.execute(f'truncate -s {size // 2} {tar_files[0]}')
assert res.status == 0, 'Truncation of the tar file failed'

# Try to import the incomplete archive
import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message'])
with pytest.raises(CLIReturnCodeError) as error:
target_sat.cli.ContentImport.version(
{'organization-id': function_import_org_with_manifest.id, 'path': import_path}
)
assert '1 subtask(s) failed' in error.value.message

# Verify no content is imported and the import CV can be deleted
imported_cv = target_sat.cli.ContentView.info(
{'name': cv['name'], 'organization-id': function_import_org_with_manifest.id}
)
assert len(imported_cv['versions']) == 0, 'There should be no CV version imported'

target_sat.cli.ContentView.delete({'id': imported_cv['id']})
with pytest.raises(CLIReturnCodeError) as error:
target_sat.cli.ContentView.info(
{'name': cv['name'], 'organization-id': function_import_org_with_manifest.id}
)
assert 'content_view not found' in error.value.message, 'The imported CV should be gone'

@pytest.mark.tier3
def test_postive_export_cv_with_mixed_content_repos(
self,
Expand Down Expand Up @@ -1231,6 +1316,92 @@ def test_postive_export_cv_with_mixed_content_repos(
# Verify export directory is not empty
assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != ''

@pytest.mark.tier3
def test_postive_export_import_cv_with_mixed_content_syncable(
self,
export_import_cleanup_function,
target_sat,
function_org,
function_synced_custom_repo,
function_synced_file_repo,
function_import_org,
):
"""Export and import CV with mixed content in the syncable format.
:id: cb1aecac-d48a-4154-9ca7-71788674148f
:setup:
1. Synced repositories of syncable-supported content types: yum, file
:steps:
1. Create CV, add all setup repos and publish.
2. Export CV version contents in syncable format.
3. Import the syncable export, check the content.
:expectedresults:
1. Export succeeds and content is exported.
2. Import succeeds, content is imported and matches the export.
"""
# Create CV, add all setup repos and publish
cv = target_sat.cli_factory.make_content_view({'organization-id': function_org.id})
repos = [
function_synced_custom_repo,
function_synced_file_repo,
]
for repo in repos:
target_sat.cli.ContentView.add_repository(
{
'id': cv['id'],
'organization-id': function_org.id,
'repository-id': repo['id'],
}
)
target_sat.cli.ContentView.publish({'id': cv['id']})
exporting_cv = target_sat.cli.ContentView.info({'id': cv['id']})
exporting_cvv = target_sat.cli.ContentView.version_info(
{'id': exporting_cv['versions'][0]['id']}
)
exported_packages = target_sat.cli.Package.list(
{'content-view-version-id': exporting_cvv['id']}
)
exported_files = target_sat.cli.File.list({'content-view-version-id': exporting_cvv['id']})

# Export CV version contents in syncable format
assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) == ''
export = target_sat.cli.ContentExport.completeVersion(
{'id': exporting_cvv['id'], 'organization-id': function_org.id, 'format': 'syncable'}
)
assert target_sat.validate_pulp_filepath(function_org, PULP_EXPORT_DIR) != ''

# Import the syncable export
import_path = target_sat.move_pulp_archive(function_org, export['message'])
target_sat.cli.ContentImport.version(
{'organization-id': function_import_org.id, 'path': import_path}
)
importing_cv = target_sat.cli.ContentView.info(
{'name': exporting_cv['name'], 'organization-id': function_import_org.id}
)
assert all(
[exporting_cv[key] == importing_cv[key] for key in ['label', 'name']]
), 'Imported CV name/label does not match the export'
assert (
len(exporting_cv['versions']) == len(importing_cv['versions']) == 1
), 'CV versions count does not match'

importing_cvv = target_sat.cli.ContentView.version_info(
{'id': importing_cv['versions'][0]['id']}
)
assert (
len(exporting_cvv['repositories']) == len(importing_cvv['repositories']) == len(repos)
), 'Repositories count does not match'

imported_packages = target_sat.cli.Package.list(
{'content-view-version-id': importing_cvv['id']}
)
imported_files = target_sat.cli.File.list({'content-view-version-id': importing_cvv['id']})
assert exported_packages == imported_packages, 'Imported RPMs do not match the export'
assert exported_files == imported_files, 'Imported Files do not match the export'

@pytest.mark.tier3
def test_postive_export_import_cv_with_file_content(
self,
Expand Down Expand Up @@ -1698,6 +1869,8 @@ def test_positive_export_incremental_syncable_check_content(
:id: 6ff771cd-39ef-4865-8ae8-629f4baf5f98
:parametrized: yes
:setup:
1. Enabled and synced RH repository.
Expand Down Expand Up @@ -1819,22 +1992,80 @@ def test_positive_reimport_repo(self):
"""

@pytest.mark.stubbed
@pytest.mark.tier3
def test_negative_export_repo_from_future_datetime(self):
"""Incremental export fails with future datetime.
@pytest.mark.parametrize(
'function_synced_rh_repo',
['rhae2'],
indirect=True,
)
def test_export_repo_incremental_with_history_id(
self,
target_sat,
export_import_cleanup_function,
config_export_import_settings,
function_sca_manifest_org,
function_synced_rh_repo,
):
"""Test incremental export with history id.
:id: 1e8bc352-198f-4d59-b437-1b184141fab4
:parametrized: yes
:setup:
1. Enabled and synced RH repository.
:steps:
1. Export the repo incrementally from the future date time.
1. Run repo complete export, ensure it's listed in history.
2. Run incremental export using history id of the complete export,
ensure it's listed in history.
3. Run incremental export using non-existent history id.
:expectedresults:
1. Error is raised for attempting to export from future datetime.
:CaseAutomation: NotAutomated
1. First (complete) export succeeds and can be listed including history id.
2. Second (incremental) export succeeds and can be listed including history id.
3. Third (incremental) export fails when wrong id is provided.
"""
# Verify export directory is empty
assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == ''

# Run repo complete export, ensure it's listed in history.
target_sat.cli.ContentExport.completeRepository({'id': function_synced_rh_repo['id']})
assert '1.0' in target_sat.validate_pulp_filepath(
function_sca_manifest_org, PULP_EXPORT_DIR
)

history = target_sat.cli.ContentExport.list(
{'organization-id': function_sca_manifest_org.id}
)
assert len(history) == 1, 'Expected just one item in the export history'

# Run incremental export using history id of the complete export,
# ensure it's listed in history.
target_sat.cli.ContentExport.incrementalRepository(
{'id': function_synced_rh_repo['id'], 'from-history-id': history[0]['id']}
)
assert '2.0' in target_sat.validate_pulp_filepath(
function_sca_manifest_org, PULP_EXPORT_DIR
)

history = target_sat.cli.ContentExport.list(
{'organization-id': function_sca_manifest_org.id}
)
assert len(history) == 2, 'Expected two items in the export history'
assert int(history[1]['id']) == int(history[0]['id']) + 1, 'Inconsistent history spotted'

# Run incremental export using non-existent history id.
next_id = int(history[1]['id']) + 1
with pytest.raises(CLIReturnCodeError) as error:
target_sat.cli.ContentExport.incrementalRepository(
{'id': function_synced_rh_repo['id'], 'from-history-id': next_id}
)
assert (
f"Couldn't find Katello::ContentViewVersionExportHistory with 'id'={next_id}"
in error.value.message
), 'Unexpected error message'

@pytest.mark.tier3
@pytest.mark.upgrade
Expand Down Expand Up @@ -1940,6 +2171,8 @@ def test_positive_export_import_mismatch_label(
:id: eb2f3e8e-3ee6-4713-80ab-3811a098e079
:parametrized: yes
:setup:
1. Enabled and synced RH yum repository.
Expand Down

0 comments on commit 38ed52e

Please sign in to comment.