diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index fa46d2f5066..382a20b0e48 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -8,6 +8,7 @@ on: env: PYCURL_SSL_LIBRARY: openssl ROBOTTELO_BUGZILLA__API_KEY: ${{ secrets.BUGZILLA_KEY }} + ROBOTTELO_JIRA__API_KEY: ${{ secrets.JIRA_KEY }} jobs: codechecks: diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index a08485c8f49..92fa9f329a9 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -55,6 +55,7 @@ jobs: id: cscheck env: ROBOTTELO_BUGZILLA__API_KEY: ${{ secrets.BUGZILLA_KEY }} + ROBOTTELO_JIRA__API_KEY: ${{ secrets.JIRA_KEY }} - name: Customer scenario status run: | diff --git a/conf/jira.yaml.template b/conf/jira.yaml.template new file mode 100644 index 00000000000..e76ac35f157 --- /dev/null +++ b/conf/jira.yaml.template @@ -0,0 +1,5 @@ +JIRA: + # url default value is set to 'https://issues.redhat.com' even if not provided. + URL: https://issues.redhat.com + # Provide api_key to access Jira REST API + API_KEY: replace-with-jira-api-key diff --git a/pytest_plugins/metadata_markers.py b/pytest_plugins/metadata_markers.py index 57b12aa5c1f..bcadb671ae5 100644 --- a/pytest_plugins/metadata_markers.py +++ b/pytest_plugins/metadata_markers.py @@ -7,9 +7,22 @@ from robottelo.config import settings from robottelo.hosts import get_sat_rhel_version from robottelo.logging import collection_logger as logger +from robottelo.utils.issue_handlers.jira import are_any_jira_open FMT_XUNIT_TIME = '%Y-%m-%dT%H:%M:%S' IMPORTANCE_LEVELS = [] +selected = [] +deselected = [] + + +def parse_comma_separated_list(option_value): + if isinstance(option_value, str): + if option_value.lower() == 'true': + return True + if option_value.lower() == 'false': + return False + return option_value.split(',') + return None def pytest_addoption(parser): @@ -26,6 +39,25 @@ def pytest_addoption(parser): '--team', help='Comma separated list of teams to include in test collection', ) + parser.addoption( + '--blocked-by', + type=parse_comma_separated_list, + nargs='?', + const=True, + default=True, + help='Comma separated list of Jiras to collect tests matching BlockedBy testimony marker. ' + 'If no issue is provided all the tests with BlockedBy testimony marker will be processed ' + 'and deselected if any issue is open.', + ) + parser.addoption( + '--verifies-issues', + type=parse_comma_separated_list, + nargs='?', + const=True, + default=False, + help='Comma separated list of Jiras to collect tests matching Verifies testimony marker. ' + 'If no issue is provided all the tests with Verifies testimony marker will be selected.', + ) def pytest_configure(config): @@ -34,6 +66,8 @@ def pytest_configure(config): 'importance: CaseImportance testimony token, use --importance to filter', 'component: Component testimony token, use --component to filter', 'team: Team testimony token, use --team to filter', + 'blocked_by: BlockedBy testimony token, use --blocked-by to filter', + 'verifies_issues: Verifies testimony token, use --verifies_issues to filter', ]: config.addinivalue_line("markers", marker) @@ -56,6 +90,57 @@ def pytest_configure(config): re.IGNORECASE, ) +blocked_by_regex = re.compile( + # To match :BlockedBy: SAT-32932 + r'\s*:BlockedBy:\s*(?P.*\S*)', + re.IGNORECASE, +) + +verifies_regex = re.compile( + # To match :Verifies: SAT-32932 + r'\s*:Verifies:\s*(?P.*\S*)', + re.IGNORECASE, +) + + +def handle_verification_issues(item, verifies_marker, verifies_issues): + """Handles the logic for deselecting tests based on Verifies testimony token + and --verifies-issues pytest option. + """ + if verifies_issues: + if not verifies_marker: + log_and_deselect(item, '--verifies-issues') + return False + if isinstance(verifies_issues, list): + verifies_args = verifies_marker.args[0] + if all(issue not in verifies_issues for issue in verifies_args): + log_and_deselect(item, '--verifies-issues') + return False + return True + + +def handle_blocked_by(item, blocked_by_marker, blocked_by): + """Handles the logic for deselecting tests based on BlockedBy testimony token + and --blocked-by pytest option. + """ + if isinstance(blocked_by, list): + if not blocked_by_marker: + log_and_deselect(item, '--blocked-by') + return False + if all(issue not in blocked_by for issue in blocked_by_marker.args[0]): + log_and_deselect(item, '--blocked-by') + return False + elif isinstance(blocked_by, bool) and blocked_by_marker: + if blocked_by and are_any_jira_open(blocked_by_marker.args[0]): + log_and_deselect(item, '--blocked-by') + return False + return True + + +def log_and_deselect(item, option): + logger.debug(f'Deselected test {item.nodeid} due to "{option}" pytest option.') + deselected.append(item) + @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items, config): @@ -81,9 +166,8 @@ def pytest_collection_modifyitems(items, config): importance = [i for i in (config.getoption('importance') or '').split(',') if i != ''] component = [c for c in (config.getoption('component') or '').split(',') if c != ''] team = [a.lower() for a in (config.getoption('team') or '').split(',') if a != ''] - - selected = [] - deselected = [] + verifies_issues = config.getoption('verifies_issues') + blocked_by = config.getoption('blocked_by') logger.info('Processing test items to add testimony token markers') for item in items: item.user_properties.append( @@ -100,6 +184,8 @@ def pytest_collection_modifyitems(items, config): for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module)) if d is not None ] + blocked_by_marks_to_add = [] + verifies_marks_to_add = [] for docstring in item_docstrings: item_mark_names = [m.name for m in item.iter_markers()] # Add marker starting at smallest docstring scope @@ -113,6 +199,18 @@ def pytest_collection_modifyitems(items, config): doc_team = team_regex.findall(docstring) if doc_team and 'team' not in item_mark_names: item.add_marker(pytest.mark.team(doc_team[0].lower())) + doc_verifies = verifies_regex.findall(docstring) + if doc_verifies and 'verifies_issues' not in item_mark_names: + verifies_marks_to_add.extend(str(b.strip()) for b in doc_verifies[-1].split(',')) + doc_blocked_by = blocked_by_regex.findall(docstring) + if doc_blocked_by and 'blocked_by' not in item_mark_names: + blocked_by_marks_to_add.extend( + str(b.strip()) for b in doc_blocked_by[-1].split(',') + ) + if blocked_by_marks_to_add: + item.add_marker(pytest.mark.blocked_by(blocked_by_marks_to_add)) + if verifies_marks_to_add: + item.add_marker(pytest.mark.verifies_issues(verifies_marks_to_add)) # add markers as user_properties so they are recorded in XML properties of the report # pytest-ibutsu will include user_properties dict in testresult metadata @@ -136,7 +234,7 @@ def pytest_collection_modifyitems(items, config): item.user_properties.append(("SnapVersion", snap_version)) # exit early if no filters were passed - if importance or component or team: + if importance or component or team or verifies_issues or blocked_by: # Filter test collection based on CLI options for filtering # filters should be applied together # such that --component Repository --importance Critical --team rocket @@ -169,6 +267,15 @@ def pytest_collection_modifyitems(items, config): deselected.append(item) continue + # Filter tests based on --verifies-issues and --blocked-by pytest options + # and Verifies and BlockedBy testimony tokens. + verifies_marker = item.get_closest_marker('verifies_issues', False) + blocked_by_marker = item.get_closest_marker('blocked_by', False) + if not handle_verification_issues(item, verifies_marker, verifies_issues): + continue + if not handle_blocked_by(item, blocked_by_marker, blocked_by): + continue + selected.append(item) # selected will be empty if no filter option was passed, defaulting to full items list diff --git a/robottelo/config/validators.py b/robottelo/config/validators.py index 013be110fe0..1a6a6d7f4aa 100644 --- a/robottelo/config/validators.py +++ b/robottelo/config/validators.py @@ -189,6 +189,10 @@ must_exist=True, ), ], + jira=[ + Validator('jira.url', default='https://issues.redhat.com'), + Validator('jira.api_key', must_exist=True), + ], ldap=[ Validator( 'ldap.basedn', diff --git a/robottelo/constants/__init__.py b/robottelo/constants/__init__.py index 8857efd00a3..d9a60340a3f 100644 --- a/robottelo/constants/__init__.py +++ b/robottelo/constants/__init__.py @@ -1738,10 +1738,15 @@ class Colored(Box): ), } - +# Bugzilla statuses used by Robottelo issue handler. OPEN_STATUSES = ("NEW", "ASSIGNED", "POST", "MODIFIED") CLOSED_STATUSES = ("ON_QA", "VERIFIED", "RELEASE_PENDING", "CLOSED") WONTFIX_RESOLUTIONS = ("WONTFIX", "CANTFIX", "DEFERRED") +# Jira statuses used by Robottelo issue handler. +JIRA_OPEN_STATUSES = ("New", "Backlog", "Refinement", "To Do", "In Progress") +JIRA_ONQA_STATUS = "Review" +JIRA_CLOSED_STATUSES = ("Release Pending", "Closed") +JIRA_WONTFIX_RESOLUTIONS = "Obsolete" GROUP_MEMBERSHIP_MAPPER = { "config": { diff --git a/robottelo/utils/issue_handlers/README.md b/robottelo/utils/issue_handlers/README.md index 1ef130583d0..8661a4965e5 100644 --- a/robottelo/utils/issue_handlers/README.md +++ b/robottelo/utils/issue_handlers/README.md @@ -13,7 +13,7 @@ Issue handler should expose 3 functions. ### `is_open_(issue, data=None)` -e.g: `is_open_bz, is_open_gh, is_open_jr` for Bugzilla, Github and Jira. +e.g: `is_open_bz, is_open_gh, is_open_jira` for Bugzilla, Github and Jira. This function is dispatched from `robottelo.helpers.is_open` that is also used to check for status in the `pytest.mark.skip_if_open` marker. @@ -78,10 +78,10 @@ Example of `collected_data`: ## Issue handlers implemented - `.bugzilla.py`: BZ:123456 +- `.jira.py`: SAT-22761 ## Issue handlers to be implemented - `.github.py`: GH:satelliteqe/robottelo#123 - `.gitlab.py`: GL:path/to/repo#123 -- `.jira.py`: JR:SATQE-4561 - `.redmine.py`: RM:pulp.plan.io#5580 diff --git a/robottelo/utils/issue_handlers/__init__.py b/robottelo/utils/issue_handlers/__init__.py index d59c97aec63..e0ce3bed460 100644 --- a/robottelo/utils/issue_handlers/__init__.py +++ b/robottelo/utils/issue_handlers/__init__.py @@ -1,8 +1,10 @@ +import re + # Methods related to issue handlers in general -from robottelo.utils.issue_handlers import bugzilla +from robottelo.utils.issue_handlers import bugzilla, jira -handler_methods = {'BZ': bugzilla.is_open_bz} -SUPPORTED_HANDLERS = tuple(f"{handler}:" for handler in handler_methods) +handler_methods = {'BZ': bugzilla.is_open_bz, 'SAT': jira.is_open_jira} +SUPPORTED_HANDLERS = tuple(f"{handler}" for handler in handler_methods) def add_workaround(data, matches, usage, validation=(lambda *a, **k: True), **kwargs): @@ -16,10 +18,11 @@ def add_workaround(data, matches, usage, validation=(lambda *a, **k: True), **kw def should_deselect(issue, data=None): """Check if test should be deselected based on marked issue.""" # Handlers can be extended to support different issue trackers. - handlers = {'BZ': bugzilla.should_deselect_bz} - supported_handlers = tuple(f"{handler}:" for handler in handlers) - if str(issue).startswith(supported_handlers): - handler_code = str(issue).partition(":")[0] + res = re.split(':|-', issue) + handlers = {'BZ': bugzilla.should_deselect_bz, 'SAT': jira.should_deselect_jira} + supported_handlers = tuple(f"{handler}" for handler in handlers) + if str(res[0]).startswith(supported_handlers): + handler_code = res[0] return handlers[handler_code.strip()](issue.strip(), data) return None @@ -29,19 +32,20 @@ def is_open(issue, data=None): Issue must be prefixed by its handler e.g: - Bugzilla: BZ:123456 + Bugzilla: BZ:123456, Jira: SAT-12345 Arguments: issue {str} -- A string containing handler + number e.g: BZ:123465 data {dict} -- Issue data indexed by : or None """ + res = re.split(':|-', issue) # Handlers can be extended to support different issue trackers. - if str(issue).startswith(SUPPORTED_HANDLERS): - handler_code = str(issue).partition(":")[0] + if str(res[0]).startswith(SUPPORTED_HANDLERS): + handler_code = res[0] else: # EAFP raise AttributeError( "is_open argument must be a string starting with a handler code " - "e.g: 'BZ:123456'" + "e.g: 'BZ:123456' for Bugzilla and 'SAT-12345' for Jira." f"supported handlers are: {SUPPORTED_HANDLERS}" ) return handler_methods[handler_code.strip()](issue.strip(), data) diff --git a/robottelo/utils/issue_handlers/bugzilla.py b/robottelo/utils/issue_handlers/bugzilla.py index 20836a3660d..dd1c35da2ab 100644 --- a/robottelo/utils/issue_handlers/bugzilla.py +++ b/robottelo/utils/issue_handlers/bugzilla.py @@ -137,7 +137,7 @@ def collect_data_bz(collected_data, cached_data): # pragma: no cover def collect_dupes(bz, collected_data, cached_data=None): # pragma: no cover - """Recursivelly find for duplicates""" + """Recursively find for duplicates""" cached_data = cached_data or {} if bz.get('resolution') == 'DUPLICATE': # Collect duplicates @@ -180,15 +180,15 @@ def collect_clones(bz, collected_data, cached_data=None): # pragma: no cover @retry( - stop=stop_after_attempt(4), # Retry 3 times before raising - wait=wait_fixed(20), # Wait seconds between retries + stop=stop_after_attempt(4), + wait=wait_fixed(20), ) def get_data_bz(bz_numbers, cached_data=None): # pragma: no cover """Get a list of marked BZ data and query Bugzilla REST API. Arguments: bz_numbers {list of str} -- ['123456', ...] - cached_data + cached_data {dict} -- Cached data previous loaded from API Returns: [list of dicts] -- [{'id':..., 'status':..., 'resolution': ...}] diff --git a/robottelo/utils/issue_handlers/jira.py b/robottelo/utils/issue_handlers/jira.py new file mode 100644 index 00000000000..dfeb1c966c4 --- /dev/null +++ b/robottelo/utils/issue_handlers/jira.py @@ -0,0 +1,274 @@ +from collections import defaultdict +import re + +from packaging.version import Version +import pytest +import requests +from tenacity import retry, stop_after_attempt, wait_fixed + +from robottelo.config import settings +from robottelo.constants import ( + JIRA_CLOSED_STATUSES, + JIRA_ONQA_STATUS, + JIRA_OPEN_STATUSES, + JIRA_WONTFIX_RESOLUTIONS, +) +from robottelo.hosts import get_sat_version +from robottelo.logging import logger + +# match any version as in `sat-6.14.x` or `sat-6.13.0` or `6.13.9` +# The .version group being a `d.d` string that can be casted to Version() +VERSION_RE = re.compile(r'(?:sat-)*?(?P\d\.\d)\.\w*') + + +def is_open_jira(issue, data=None): + """Check if specific Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issue {str} -- The Jira reference e.g: SAT-20548 + data {dict} -- Issue data indexed by : or None + """ + jira = try_from_cache(issue, data) + if jira.get("is_open") is not None: # issue has been already processed + return jira["is_open"] + + jira = follow_duplicates(jira) + status = jira.get('status', '') + resolution = jira.get('resolution', '') + + # Jira is explicitly in OPEN status + if status in JIRA_OPEN_STATUSES: + return True + + # Jira is Closed/Obsolete so considered not fixed yet, Jira is open + if status in JIRA_CLOSED_STATUSES and resolution in JIRA_WONTFIX_RESOLUTIONS: + return True + + # Jira is Closed with a resolution in (Done, Done-Errata, ...) + # server.version is higher or equal than Jira fixVersion + # Consider fixed, Jira is not open + fix_version = jira.get('fixVersions') + if fix_version: + return get_sat_version() < Version(min(fix_version)) + return status not in JIRA_CLOSED_STATUSES and status != JIRA_ONQA_STATUS + + +def are_all_jira_open(issues, data=None): + """Check if all Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issues {list} -- The Jira reference e.g: ['SAT-20548', 'SAT-20548'] + data {dict} -- Issue data indexed by : or None + """ + return all(is_open_jira(issue, data) for issue in issues) + + +def are_any_jira_open(issues, data=None): + """Check if any of the Jira is open consulting a cached `data` dict or + calling Jira REST API. + + Arguments: + issues {list} -- The Jira reference e.g: ['SAT-20548', 'SAT-20548'] + data {dict} -- Issue data indexed by : or None + """ + return any(is_open_jira(issue, data) for issue in issues) + + +def should_deselect_jira(issue, data=None): + """Check if test should be deselected based on marked issue. + + 1. Resolution "Obsolete" should deselect + + Arguments: + issue {str} -- The Jira reference e.g: SAT-12345 + data {dict} -- Issue data indexed by : or None + """ + + jira = try_from_cache(issue, data) + if jira.get("is_deselected") is not None: # issue has been already processed + return jira["is_deselected"] + + jira = follow_duplicates(jira) + + return ( + jira.get('status') in JIRA_CLOSED_STATUSES + and jira.get('resolution') in JIRA_WONTFIX_RESOLUTIONS + ) + + +def follow_duplicates(jira): + """recursively load the duplicate data""" + if jira.get('dupe_data'): + jira = follow_duplicates(jira['dupe_data']) + return jira + + +def try_from_cache(issue, data=None): + """Try to fetch issue from given data cache or previous loaded on pytest. + + Arguments: + issue {str} -- The Jira reference e.g: SAT-12345 + data {dict} -- Issue data indexed by : or None + """ + try: + # issue must be passed in `data` argument or already fetched in pytest + if not data and not len(pytest.issue_data[issue]['data']): + raise ValueError + return data or pytest.issue_data[issue]['data'] + except (KeyError, AttributeError, ValueError): # pragma: no cover + # If not then call Jira API again + return get_single_jira(str(issue)) + + +def collect_data_jira(collected_data, cached_data): # pragma: no cover + """Collect data from Jira API and aggregate in a dictionary. + + Arguments: + collected_data {dict} -- dict with Jira issues collected by pytest + cached_data {dict} -- Cached data previous loaded from API + """ + jira_data = ( + get_data_jira( + [item for item in collected_data if item.startswith('SAT-')], + cached_data=cached_data, + ) + or [] + ) + for data in jira_data: + # If Jira is CLOSED/DUPLICATE collect the duplicate + collect_dupes(data, collected_data, cached_data=cached_data) + + jira_key = f"{data['key']}" + data["is_open"] = is_open_jira(jira_key, data) + collected_data[jira_key]['data'] = data + + +def collect_dupes(jira, collected_data, cached_data=None): # pragma: no cover + """Recursively find for duplicates""" + cached_data = cached_data or {} + if jira.get('resolution') == 'Duplicate': + # Collect duplicates + jira['dupe_data'] = get_single_jira(jira.get('dupe_of'), cached_data=cached_data) + dupe_key = f"{jira['dupe_of']}" + # Store Duplicate also in the main collection for caching + if dupe_key not in collected_data: + collected_data[dupe_key]['data'] = jira['dupe_data'] + collected_data[dupe_key]['is_dupe'] = True + collect_dupes(jira['dupe_data'], collected_data, cached_data) + + +# --- API Calls --- + +# cannot use lru_cache in functions that has unhashable args +CACHED_RESPONSES = defaultdict(dict) + + +@retry( + stop=stop_after_attempt(4), # Retry 3 times before raising + wait=wait_fixed(20), # Wait seconds between retries +) +def get_data_jira(jira_numbers, cached_data=None): # pragma: no cover + """Get a list of marked Jira data and query Jira REST API. + + Arguments: + jira_numbers {list of str} -- ['SAT-12345', ...] + cached_data {dict} -- Cached data previous loaded from API + + Returns: + [list of dicts] -- [{'id':..., 'status':..., 'resolution': ...}] + """ + if not jira_numbers: + return [] + + cached_by_call = CACHED_RESPONSES['get_data'].get(str(sorted(jira_numbers))) + if cached_by_call: + return cached_by_call + + if cached_data: + logger.debug(f"Using cached data for {set(jira_numbers)}") + if not all([f'{number}' in cached_data for number in jira_numbers]): + logger.debug("There are Jira's out of cache.") + return [item['data'] for _, item in cached_data.items() if 'data' in item] + + # Ensure API key is set + if not settings.jira.api_key: + logger.warning( + "Config file is missing jira api_key " + "so all tests with skip_if_open mark is skipped. " + "Provide api_key or a jira_cache.json." + ) + # Provide default data for collected Jira's. + return [get_default_jira(number) for number in jira_numbers] + + # No cached data so Call Jira API + logger.debug(f"Calling Jira API for {set(jira_numbers)}") + jira_fields = [ + "key", + "summary", + "status", + "resolution", + "fixVersions", + ] + # Following fields are dynamically calculated/loaded + for field in ('is_open', 'version'): + assert field not in jira_fields + + # Generate jql + jql = ' OR '.join([f"id = {id}" for id in jira_numbers]) + + response = requests.get( + f"{settings.jira.url}/rest/api/latest/search/", + params={ + "jql": jql, + "fields": ",".join(jira_fields), + }, + headers={"Authorization": f"Bearer {settings.jira.api_key}"}, + ) + response.raise_for_status() + data = response.json().get('issues') + # Clean the data, only keep the required info. + data = [ + { + 'key': issue['key'], + 'summary': issue['fields']['summary'], + 'status': issue['fields']['status']['name'], + 'resolution': issue['fields']['resolution']['name'] + if issue['fields']['resolution'] + else '', + 'fixVersions': [ver['name'] for ver in issue['fields']['fixVersions']] + if issue['fields']['fixVersions'] + else [], + } + for issue in data + if issue is not None + ] + CACHED_RESPONSES['get_data'][str(sorted(jira_numbers))] = data + return data + + +def get_single_jira(number, cached_data=None): # pragma: no cover + """Call Jira API to get a single Jira data and cache it""" + cached_data = cached_data or {} + jira_data = CACHED_RESPONSES['get_single'].get(number) + if not jira_data: + try: + jira_data = cached_data[f"{number}"]['data'] + except (KeyError, TypeError): + jira_data = get_data_jira([str(number)], cached_data) + jira_data = jira_data and jira_data[0] + CACHED_RESPONSES['get_single'][number] = jira_data + return jira_data or get_default_jira(number) + + +def get_default_jira(number): # pragma: no cover + """This is the default Jira data when it is not possible to reach Jira api""" + return { + "key": number, + "is_open": True, + "is_deselected": False, + "status": "", + "resolution": "", + "error": "missing jira api_key", + } diff --git a/testimony.yaml b/testimony.yaml index 312e59f197b..1088ac8f33a 100644 --- a/testimony.yaml +++ b/testimony.yaml @@ -1,6 +1,8 @@ Team: required: true BZ: {} +BlockedBy: {} +Verifies: {} CaseAutomation: casesensitive: true choices: