Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[6.12.z] Fix test failures from cli.factory refactor #13467

Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 32 additions & 32 deletions tests/foreman/cli/test_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -2517,8 +2517,8 @@ def test_positive_sync_ansible_collection(self, repo, module_target_sat):
:parametrized: yes

"""
module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']})
module_target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'

@pytest.mark.tier2
Expand Down Expand Up @@ -2550,8 +2550,8 @@ def test_positive_export_ansible_collection(self, repo, module_org, target_sat):

"""
import_org = target_sat.cli_factory.make_org()
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
# export
result = target_sat.cli.ContentExport.completeLibrary({'organization-id': module_org.id})
Expand All @@ -2567,15 +2567,15 @@ def test_positive_export_ansible_collection(self, repo, module_org, target_sat):
{'name': 'Import-Library', 'organization-label': import_org['label']}
)
assert cv['description'] == 'Content View used for importing into library'
prods = target_sat.cli_factory.Product.list({'organization-id': import_org['id']})
prod = target_sat.cli_factory.Product.info(
prods = target_sat.cli.Product.list({'organization-id': import_org['id']})
prod = target_sat.cli.Product.info(
{'id': prods[0]['id'], 'organization-id': import_org['id']}
)
ac_content = [
cont for cont in prod['content'] if cont['content-type'] == 'ansible_collection'
]
assert len(ac_content) > 0
repo = target_sat.cli_factory.Repository.info(
repo = target_sat.cli.Repository.info(
{'name': ac_content[0]['repo-name'], 'product-id': prod['id']}
)
result = target_sat.execute(f'curl {repo["published-at"]}')
Expand Down Expand Up @@ -2610,8 +2610,8 @@ def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat)

"""
import_org = target_sat.cli_factory.make_org()
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
published_url = repo['published-at']
# sync from different org
Expand All @@ -2628,8 +2628,8 @@ def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat)
[{ name: theforeman.operations, version: "0.1.0"}]}',
}
)
target_sat.cli_factory.Repository.synchronize({'id': repo_2['id']})
repo_2_status = target_sat.cli_factory.Repository.info({'id': repo_2['id']})
target_sat.cli.Repository.synchronize({'id': repo_2['id']})
repo_2_status = target_sat.cli.Repository.info({'id': repo_2['id']})
assert repo_2_status['sync']['status'] == 'Success'


Expand All @@ -2653,8 +2653,8 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, target_sat):
lifecycle environment
"""
lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id})
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
synced_repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
synced_repo = target_sat.cli.Repository.info({'id': repo['id']})
assert synced_repo['sync']['status'].lower() == 'success'
assert synced_repo['content-counts']['packages'] == '35'
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
Expand Down Expand Up @@ -2688,7 +2688,7 @@ def test_positive_sync(self, repo, module_org, module_product, target_sat):

:expectedresults: drpms can be listed in repository
"""
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
result = target_sat.execute(
f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library"
f"/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm"
Expand All @@ -2711,7 +2711,7 @@ def test_positive_sync_publish_cv(self, repo, module_org, module_product, target

:expectedresults: drpms can be listed in content view
"""
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']})
target_sat.cli.ContentView.publish({'id': cv['id']})
Expand Down Expand Up @@ -2740,7 +2740,7 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product
lifecycle environment
"""
lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id})
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']})
target_sat.cli.ContentView.publish({'id': cv['id']})
Expand Down Expand Up @@ -2976,7 +2976,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat)
local_path=DataFile.RPM_TO_UPLOAD,
remote_path=f"/tmp/{RPM_TO_UPLOAD}",
)
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -2985,7 +2985,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat)
}
)
assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
query={"search": f"name={RPM_TO_UPLOAD} and repository={repo['name']}"}
Expand Down Expand Up @@ -3041,7 +3041,7 @@ def test_positive_remove_file(self, repo, target_sat):
local_path=DataFile.RPM_TO_UPLOAD,
remote_path=f"/tmp/{RPM_TO_UPLOAD}",
)
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3050,13 +3050,13 @@ def test_positive_remove_file(self, repo, target_sat):
}
)
assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 0
files = target_sat.cli.File.list({'repository-id': repo['id']})
target_sat.cli_factory.Repository.remove_content(
target_sat.cli.Repository.remove_content(
{'id': repo['id'], 'ids': [file_['id'] for file_ in files]}
)
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['content-counts']['files'] == '0'

@pytest.mark.tier2
Expand Down Expand Up @@ -3094,8 +3094,8 @@ def test_positive_remote_directory_sync(self, repo, module_target_sat):
:expectedresults: entire directory is synced over http

"""
module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']})
module_target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
assert repo['content-counts']['files'] == '2'

Expand Down Expand Up @@ -3132,8 +3132,8 @@ def test_positive_file_repo_local_directory_sync(self, repo, target_sat):
f'wget -P {CUSTOM_LOCAL_FOLDER} -r -np -nH --cut-dirs=5 -R "index.html*" '
f'{CUSTOM_FILE_REPO}'
)
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 1

@pytest.mark.tier2
Expand Down Expand Up @@ -3172,8 +3172,8 @@ def test_positive_symlinks_sync(self, repo, target_sat):
)
target_sat.execute(f'ln -s {CUSTOM_LOCAL_FOLDER} /{gen_string("alpha")}')

target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 1

@pytest.mark.tier2
Expand Down Expand Up @@ -3206,7 +3206,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
"""
text_file_name = f'test-{gen_string("alpha", 5)}.txt'.lower()
target_sat.execute(f'echo "First File" > /tmp/{text_file_name}')
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3215,7 +3215,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
}
)
assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
# Assert there is only one file
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
Expand All @@ -3224,7 +3224,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
assert text_file_name == filesearch[0].name
# Create new version of the file by changing the text
target_sat.execute(f'echo "Second File" > /tmp/{text_file_name}')
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3233,7 +3233,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
}
)
assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
# Assert there is still only one file
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
Expand Down