Skip to content

Commit

Permalink
Fix test failures from cli.factory refactor
Browse files Browse the repository at this point in the history
(cherry picked from commit 5fac78b)
  • Loading branch information
jameerpathan111 authored and JacobCallahan committed Dec 14, 2023
1 parent d9c3d03 commit cd96a26
Showing 1 changed file with 32 additions and 32 deletions.
64 changes: 32 additions & 32 deletions tests/foreman/cli/test_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -2553,8 +2553,8 @@ def test_positive_sync_ansible_collection(self, repo, module_target_sat):
:parametrized: yes
"""
module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']})
module_target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'

@pytest.mark.tier2
Expand Down Expand Up @@ -2586,8 +2586,8 @@ def test_positive_export_ansible_collection(self, repo, module_org, target_sat):
"""
import_org = target_sat.cli_factory.make_org()
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
# export
result = target_sat.cli.ContentExport.completeLibrary({'organization-id': module_org.id})
Expand All @@ -2603,15 +2603,15 @@ def test_positive_export_ansible_collection(self, repo, module_org, target_sat):
{'name': 'Import-Library', 'organization-label': import_org['label']}
)
assert cv['description'] == 'Content View used for importing into library'
prods = target_sat.cli_factory.Product.list({'organization-id': import_org['id']})
prod = target_sat.cli_factory.Product.info(
prods = target_sat.cli.Product.list({'organization-id': import_org['id']})
prod = target_sat.cli.Product.info(
{'id': prods[0]['id'], 'organization-id': import_org['id']}
)
ac_content = [
cont for cont in prod['content'] if cont['content-type'] == 'ansible_collection'
]
assert len(ac_content) > 0
repo = target_sat.cli_factory.Repository.info(
repo = target_sat.cli.Repository.info(
{'name': ac_content[0]['repo-name'], 'product-id': prod['id']}
)
result = target_sat.execute(f'curl {repo["published-at"]}')
Expand Down Expand Up @@ -2646,8 +2646,8 @@ def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat)
"""
import_org = target_sat.cli_factory.make_org()
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
published_url = repo['published-at']
# sync from different org
Expand All @@ -2664,8 +2664,8 @@ def test_positive_sync_ansible_collection_from_satellite(self, repo, target_sat)
[{ name: theforeman.operations, version: "0.1.0"}]}',
}
)
target_sat.cli_factory.Repository.synchronize({'id': repo_2['id']})
repo_2_status = target_sat.cli_factory.Repository.info({'id': repo_2['id']})
target_sat.cli.Repository.synchronize({'id': repo_2['id']})
repo_2_status = target_sat.cli.Repository.info({'id': repo_2['id']})
assert repo_2_status['sync']['status'] == 'Success'


Expand All @@ -2689,8 +2689,8 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, target_sat):
lifecycle environment
"""
lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id})
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
synced_repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
synced_repo = target_sat.cli.Repository.info({'id': repo['id']})
assert synced_repo['sync']['status'].lower() == 'success'
assert synced_repo['content-counts']['packages'] == '35'
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
Expand Down Expand Up @@ -2724,7 +2724,7 @@ def test_positive_sync(self, repo, module_org, module_product, target_sat):
:expectedresults: drpms can be listed in repository
"""
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
result = target_sat.execute(
f"ls /var/lib/pulp/published/yum/https/repos/{module_org.label}/Library"
f"/custom/{module_product.label}/{repo['label']}/drpms/ | grep .drpm"
Expand All @@ -2747,7 +2747,7 @@ def test_positive_sync_publish_cv(self, repo, module_org, module_product, target
:expectedresults: drpms can be listed in content view
"""
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']})
target_sat.cli.ContentView.publish({'id': cv['id']})
Expand Down Expand Up @@ -2776,7 +2776,7 @@ def test_positive_sync_publish_promote_cv(self, repo, module_org, module_product
lifecycle environment
"""
lce = target_sat.cli_factory.make_lifecycle_environment({'organization-id': module_org.id})
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
cv = target_sat.cli_factory.make_content_view({'organization-id': module_org.id})
target_sat.cli.ContentView.add_repository({'id': cv['id'], 'repository-id': repo['id']})
target_sat.cli.ContentView.publish({'id': cv['id']})
Expand Down Expand Up @@ -3012,7 +3012,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat)
local_path=DataFile.RPM_TO_UPLOAD,
remote_path=f"/tmp/{RPM_TO_UPLOAD}",
)
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3021,7 +3021,7 @@ def test_positive_upload_file_to_file_repo(self, repo_options, repo, target_sat)
}
)
assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
query={"search": f"name={RPM_TO_UPLOAD} and repository={repo['name']}"}
Expand Down Expand Up @@ -3077,7 +3077,7 @@ def test_positive_remove_file(self, repo, target_sat):
local_path=DataFile.RPM_TO_UPLOAD,
remote_path=f"/tmp/{RPM_TO_UPLOAD}",
)
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3086,13 +3086,13 @@ def test_positive_remove_file(self, repo, target_sat):
}
)
assert f"Successfully uploaded file '{RPM_TO_UPLOAD}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 0
files = target_sat.cli.File.list({'repository-id': repo['id']})
target_sat.cli_factory.Repository.remove_content(
target_sat.cli.Repository.remove_content(
{'id': repo['id'], 'ids': [file_['id'] for file_ in files]}
)
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert repo['content-counts']['files'] == '0'

@pytest.mark.tier2
Expand Down Expand Up @@ -3130,8 +3130,8 @@ def test_positive_remote_directory_sync(self, repo, module_target_sat):
:expectedresults: entire directory is synced over http
"""
module_target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli_factory.Repository.info({'id': repo['id']})
module_target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = module_target_sat.cli.Repository.info({'id': repo['id']})
assert repo['sync']['status'] == 'Success'
assert repo['content-counts']['files'] == '2'

Expand Down Expand Up @@ -3168,8 +3168,8 @@ def test_positive_file_repo_local_directory_sync(self, repo, target_sat):
f'wget -P {CUSTOM_LOCAL_FOLDER} -r -np -nH --cut-dirs=5 -R "index.html*" '
f'{CUSTOM_FILE_REPO}'
)
target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 1

@pytest.mark.tier2
Expand Down Expand Up @@ -3208,8 +3208,8 @@ def test_positive_symlinks_sync(self, repo, target_sat):
)
target_sat.execute(f'ln -s {CUSTOM_LOCAL_FOLDER} /{gen_string("alpha")}')

target_sat.cli_factory.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
target_sat.cli.Repository.synchronize({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
assert int(repo['content-counts']['files']) > 1

@pytest.mark.tier2
Expand Down Expand Up @@ -3242,7 +3242,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
"""
text_file_name = f'test-{gen_string("alpha", 5)}.txt'.lower()
target_sat.execute(f'echo "First File" > /tmp/{text_file_name}')
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3251,7 +3251,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
}
)
assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
# Assert there is only one file
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
Expand All @@ -3260,7 +3260,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
assert text_file_name == filesearch[0].name
# Create new version of the file by changing the text
target_sat.execute(f'echo "Second File" > /tmp/{text_file_name}')
result = target_sat.cli_factory.Repository.upload_content(
result = target_sat.cli.Repository.upload_content(
{
'name': repo['name'],
'organization': repo['organization'],
Expand All @@ -3269,7 +3269,7 @@ def test_file_repo_contains_only_newer_file(self, repo_options, repo, target_sat
}
)
assert f"Successfully uploaded file '{text_file_name}'" in result[0]['message']
repo = target_sat.cli_factory.Repository.info({'id': repo['id']})
repo = target_sat.cli.Repository.info({'id': repo['id']})
# Assert there is still only one file
assert repo['content-counts']['files'] == '1'
filesearch = entities.File().search(
Expand Down

0 comments on commit cd96a26

Please sign in to comment.