Skip to content

Commit

Permalink
ISS refactor - batch 3
Browse files Browse the repository at this point in the history
Introduced changes:
1. Extended test_positive_import_content_for_disconnected_sat_with_existing_content
   by assertion for correct error message when non-import-only CV is used for import
2. Removed _import_entities
3. Added test for customer scenarios:
   test_postive_export_import_cv_with_long_name
   test_positive_export_rerun_failed_import
   test_postive_export_import_repo_with_GPG

(cherry picked from commit aa0b931)
  • Loading branch information
vsedmik authored and web-flow committed Oct 31, 2023
1 parent 7216f6f commit e5cacb9
Showing 1 changed file with 199 additions and 41 deletions.
240 changes: 199 additions & 41 deletions tests/foreman/cli/test_satellitesync.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@
:Upstream: No
"""
import os
from time import sleep

from fauxfactory import gen_string
from manifester import Manifester
import pytest
from wait_for import wait_for

from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.content_export import ContentExport
Expand All @@ -46,6 +48,7 @@
PULP_IMPORT_DIR,
REPO_TYPE,
REPOS,
DataFile,
)
from robottelo.constants.repos import ANSIBLE_GALAXY

Expand Down Expand Up @@ -486,41 +489,6 @@ def _create_cv(cv_name, repo, module_org, publish=True):
return content_view, cvv_id


def _import_entities(product, repo, cv, mos='no'):
"""Sets same CV, product and repository in importing organization as
exporting organization
:param str product: The product name same as exporting product
:param str repo: The repo name same as exporting repo
:param str cv: The cv name same as exporting cv
:param str mos: Mirror on Sync repo, by default 'no' can override to 'yes'
:returns dictionary with CLI entities created in this function
"""
importing_org = make_org()
importing_prod = make_product({'organization-id': importing_org['id'], 'name': product})
importing_repo = make_repository(
{
'name': repo,
'mirror-on-sync': mos,
'download-policy': 'immediate',
'product-id': importing_prod['id'],
}
)
importing_cv = make_content_view({'name': cv, 'organization-id': importing_org['id']})
ContentView.add_repository(
{
'id': importing_cv['id'],
'organization-id': importing_org['id'],
'repository-id': importing_repo['id'],
}
)
return {
'importing_org': importing_org,
'importing_repo': importing_repo,
'importing_cv': importing_cv,
}


class TestContentViewSync:
"""Implements Content View Export Import tests in CLI"""

Expand Down Expand Up @@ -1333,6 +1301,107 @@ def test_postive_export_import_cv_with_file_content(
assert len(imported_files)
assert len(exported_files) == len(imported_files)

@pytest.mark.tier2
@pytest.mark.parametrize(
'function_synced_rhel_repo',
['rhae2'],
indirect=True,
)
def test_positive_export_rerun_failed_import(
self,
target_sat,
config_export_import_settings,
export_import_cleanup_function,
function_synced_rhel_repo,
function_sca_manifest_org,
function_import_org_with_manifest,
):
"""Verify that import can be rerun successfully after failed import.
:id: 73e7cece-9a93-4203-9c2c-813d5a8d7700
:parametrized: yes
:setup:
1. Enabled and synced RH repository.
:steps:
1. Create CV, add repo from the setup, publish it and run export.
2. Start import of the CV into another organization and kill it before it's done.
3. Rerun the import again, let it finish and check the CVV was imported.
:expectedresults:
1. First import should fail, no CVV should be added.
2. Second import should succeed without errors and should contain the CVV.
:CaseImportance: Medium
:BZ: 2058905
:customerscenario: true
"""
# Create CV and publish
cv_name = gen_string('alpha')
cv = target_sat.cli_factory.make_content_view(
{'name': cv_name, 'organization-id': function_sca_manifest_org.id}
)
target_sat.cli.ContentView.add_repository(
{
'id': cv['id'],
'organization-id': function_sca_manifest_org.id,
'repository-id': function_synced_rhel_repo['id'],
}
)
target_sat.cli.ContentView.publish({'id': cv['id']})
cv = target_sat.cli.ContentView.info({'id': cv['id']})
assert len(cv['versions']) == 1
cvv = cv['versions'][0]
# Verify export directory is empty
assert target_sat.validate_pulp_filepath(function_sca_manifest_org, PULP_EXPORT_DIR) == ''
# Export the CV
export = target_sat.cli.ContentExport.completeVersion(
{'id': cvv['id'], 'organization-id': function_sca_manifest_org.id}
)
import_path = target_sat.move_pulp_archive(function_sca_manifest_org, export['message'])
assert target_sat.execute(f'ls {import_path}').stdout != ''
# Run the import asynchronously
task_id = target_sat.cli.ContentImport.version(
{
'organization-id': function_import_org_with_manifest.id,
'path': import_path,
'async': True,
}
)['id']
# Wait for the CV creation on import and make the import fail
wait_for(
lambda: target_sat.cli.ContentView.info(
{'name': cv_name, 'organization-id': function_import_org_with_manifest.id}
)
)
target_sat.cli.Service.restart()
sleep(30)
# Assert that the initial import task did not succeed and CVV was removed
assert (
target_sat.api.ForemanTask()
.search(
query={'search': f'Actions::Katello::ContentViewVersion::Import and id = {task_id}'}
)[0]
.result
!= 'success'
)
importing_cvv = target_sat.cli.ContentView.info(
{'name': cv_name, 'organization-id': function_import_org_with_manifest.id}
)['versions']
assert len(importing_cvv) == 0
# Rerun the import and let it finish
target_sat.cli.ContentImport.version(
{'organization-id': function_import_org_with_manifest.id, 'path': import_path}
)
importing_cvv = target_sat.cli.ContentView.info(
{'name': cv_name, 'organization-id': function_import_org_with_manifest.id}
)['versions']
assert len(importing_cvv) == 1

@pytest.mark.tier3
def test_postive_export_import_ansible_collection_repo(
self,
Expand Down Expand Up @@ -1390,6 +1459,73 @@ def test_postive_export_import_ansible_collection_repo(
assert len(import_product['content']) == 1
assert import_product['content'][0]['content-type'] == "ansible_collection"

@pytest.mark.tier3
def test_postive_export_import_repo_with_GPG(
self,
target_sat,
config_export_import_settings,
export_import_cleanup_function,
function_org,
function_synced_custom_repo,
function_import_org,
):
"""Test export and import of a repository with GPG key
:id: a5b455aa-e87e-4ae5-a1c7-4c8e6c7f7af5
:setup:
1. Product with synced custom repository.
:steps:
1. Create a GPG key and add it to the setup repository.
2. Export the repository and import it into another organization.
:expectedresults:
1. Export and import succeeds without any errors.
2. GPG key is imported to the importing org too.
:CaseImportance: Medium
:BZ: 2178645, 2090390
:customerscenario: true
"""
# Create a GPG key and add it to the setup repository.
gpg_key = target_sat.api.GPGKey(
organization=function_org,
content=DataFile.VALID_GPG_KEY_FILE.read_text(),
).create()
target_sat.cli.Repository.update(
{'id': function_synced_custom_repo.id, 'gpg-key-id': gpg_key.id}
)
# Export the repository and import it into another organization.
export = target_sat.cli.ContentExport.completeRepository(
{'id': function_synced_custom_repo.id}
)
import_path = target_sat.move_pulp_archive(function_org, export['message'])
target_sat.cli.ContentImport.repository(
{
'organization-id': function_import_org.id,
'path': import_path,
}
)
# Check the imported repo has the GPG key assigned.
imported_repo = target_sat.cli.Repository.info(
{
'name': function_synced_custom_repo.name,
'product': function_synced_custom_repo.product.name,
'organization-id': function_import_org.id,
}
)
assert int(imported_repo['content-counts']['packages'])
assert imported_repo['gpg-key']['name'] == gpg_key.name
# Check the GPG key is imported to the importing org too.
imported_gpg = target_sat.cli.ContentCredential.info(
{'organization-id': function_import_org.id, 'name': gpg_key.name}
)
assert imported_gpg
assert imported_gpg['content'] == gpg_key.content

@pytest.mark.tier3
@pytest.mark.parametrize(
'function_synced_rhel_repo',
Expand Down Expand Up @@ -1480,13 +1616,15 @@ def test_positive_import_content_for_disconnected_sat_with_existing_content(
1. Product with synced custom repository, published in a CV.
:steps:
1. Run complete export of the CV.
2. On Disconnected satellite, create a cv with same name as cv on 2 and with
'import-only' selected.
3. Run the import command.
1. Run complete export of the CV from setup.
2. On Disconnected satellite, create a CV with the same name as setup CV and with
'import-only' set to False and run the import command.
3. On Disconnected satellite, create a CV with the same name as setup CV and with
'import-only' set to True and run the import command.
:expectedresults:
1. Import should run successfully
1. Import should fail with correct message when existing CV has 'import-only' set False.
2. Import should succeed when existing CV has 'import-only' set True.
:bz: 2030101
Expand All @@ -1505,7 +1643,27 @@ def test_positive_import_content_for_disconnected_sat_with_existing_content(
result = target_sat.execute(f'ls {import_path}')
assert result.stdout != ''
# Import section
# Create cv with 'import-only' set to true
# Create cv with 'import-only' set to False
cv = target_sat.cli_factory.make_content_view(
{
'name': export_cv_name,
'import-only': False,
'organization-id': function_import_org.id,
}
)
with pytest.raises(CLIReturnCodeError) as error:
target_sat.cli.ContentImport.version(
{'organization-id': function_import_org.id, 'path': import_path}
)
assert (
f"Unable to import in to Content View specified in the metadata - '{export_cv_name}'. "
"The 'import_only' attribute for the content view is set to false. To mark this "
"Content View as importable, have your system administrator run the following command "
f"on the server. \n foreman-rake katello:set_content_view_import_only ID={cv.id}"
) in error.value.message
target_sat.cli.ContentView.remove({'id': cv.id, 'destroy-content-view': 'yes'})

# Create cv with 'import-only' set to True
target_sat.cli_factory.make_content_view(
{'name': export_cv_name, 'import-only': True, 'organization-id': function_import_org.id}
)
Expand Down

0 comments on commit e5cacb9

Please sign in to comment.