From 76cf6cc70ccb01204cca4ccbac5ee36fd48a853d Mon Sep 17 00:00:00 2001 From: Alex Ionescu Date: Thu, 12 Oct 2023 15:47:03 +0300 Subject: [PATCH] Bug 1843109 - Add tests in alerts API for profiler links --- tests/conftest.py | 35 ++++++++++++ .../webapp/api/test_performance_alerts_api.py | 24 +++++++- .../api/test_performance_alertsummary_api.py | 4 ++ .../webapp/api/performance_serializers.py | 40 ++++++++------ treeherder/webapp/api/utils.py | 55 +++++++++++++------ 5 files changed, 124 insertions(+), 34 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index bd5e73be954..52927521fb1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -887,6 +887,41 @@ def _fetch_intermittent_bugs(additional_params, limit, duplicate_chain_length): ) +class MockResponse: + def __init__(self): + self.status_code = 200 + self.content = { + "artifacts": [ + { + "storageType": "fake", + "name": "fake/path.json", + "expires": "2999-12-31T23:59:59.999Z", + "contentType": "application/octet-stream", + } + ] + } + + +@pytest.fixture +def mock_get_artifact_list(monkeypatch): + import treeherder.webapp.api.utils + + def _mock_get(url, params=None): + return MockResponse() + + monkeypatch.setattr(treeherder.webapp.api.utils, 'fetch_json', _mock_get) + + +@pytest.fixture +def mock_cache(monkeypatch): + import django.core.cache.cache + + def mockreturn_cache(*args, **kwargs): + return {"task_id": "some_id", "retry_id": 0} + + monkeypatch.setattr(django.core.cache.cache, 'get', mockreturn_cache) + + @pytest.fixture def text_log_error_lines(test_job, failure_lines): lines = [ diff --git a/tests/webapp/api/test_performance_alerts_api.py b/tests/webapp/api/test_performance_alerts_api.py index 71658b148c2..5ea9e5ecad2 100644 --- a/tests/webapp/api/test_performance_alerts_api.py +++ b/tests/webapp/api/test_performance_alerts_api.py @@ -37,6 +37,8 @@ def test_alerts_get( 'series_signature', 'taskcluster_metadata', 'prev_taskcluster_metadata', + 'profile_url', + 'prev_profile_url', 'summary_id', 'status', 't_value', @@ -60,8 +62,12 @@ def test_alerts_put( client, push_stored, test_repository, + test_perf_datum, + test_perf_datum_2, test_perf_alert, test_perf_alert_summary_2, + test_taskcluster_metadata, + test_taskcluster_metadata_2, test_user, test_sheriff, ): @@ -110,8 +116,12 @@ def test_reassign_different_repository( push_stored, test_repository, test_repository_2, + test_perf_datum, + test_perf_datum_2, test_perf_alert, test_perf_alert_summary_2, + test_taskcluster_metadata, + test_taskcluster_metadata_2, test_sheriff, ): # verify that we can't reassign to another performance alert summary @@ -535,7 +545,15 @@ def test_timestamps_on_manual_created_alert_via_their_endpoints( assert manual_alert.created <= manual_alert.first_triaged -def test_alert_timestamps_via_endpoint(authorized_sheriff_client, test_sheriff, test_perf_alert): +def test_alert_timestamps_via_endpoint( + authorized_sheriff_client, + test_sheriff, + test_perf_datum, + test_perf_datum_2, + test_perf_alert, + test_taskcluster_metadata, + test_taskcluster_metadata_2, +): # updating autogenerated alert: # created doesn't change, last_updated & first_triaged update old_created = test_perf_alert.created @@ -572,10 +590,14 @@ def test_alert_timestamps_via_endpoint(authorized_sheriff_client, test_sheriff, def test_related_alerts_timestamps_via_endpoint( authorized_sheriff_client, test_sheriff, + test_perf_datum, + test_perf_datum_2, test_perf_alert, relation, test_perf_alert_summary, test_perf_alert_summary_2, + test_taskcluster_metadata, + test_taskcluster_metadata_2, ): # downstream/reassign use case assert test_perf_alert.first_triaged is None diff --git a/tests/webapp/api/test_performance_alertsummary_api.py b/tests/webapp/api/test_performance_alertsummary_api.py index 688822d8334..d6998318d9e 100644 --- a/tests/webapp/api/test_performance_alertsummary_api.py +++ b/tests/webapp/api/test_performance_alertsummary_api.py @@ -99,6 +99,8 @@ def test_alert_summaries_get( 'series_signature', 'taskcluster_metadata', 'prev_taskcluster_metadata', + 'profile_url', + 'prev_profile_url', 'is_regression', 'starred', 'manually_created', @@ -174,6 +176,8 @@ def test_alert_summaries_get_onhold( 'series_signature', 'taskcluster_metadata', 'prev_taskcluster_metadata', + 'profile_url', + 'prev_profile_url', 'is_regression', 'starred', 'manually_created', diff --git a/treeherder/webapp/api/performance_serializers.py b/treeherder/webapp/api/performance_serializers.py index a13c62a4fe0..0c4eb26a9dc 100644 --- a/treeherder/webapp/api/performance_serializers.py +++ b/treeherder/webapp/api/performance_serializers.py @@ -1,7 +1,7 @@ import decimal from django.contrib.auth.models import User -from django.core.cache import cache, caches +from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist from django.db import transaction from rest_framework import exceptions, serializers @@ -27,12 +27,14 @@ def get_tc_metadata(alert, push): repository=alert.series_signature.repository, push=push, ).first() - metadata = TaskclusterMetadata.objects.get(job=datum.job) - task_metadata = { - 'task_id': metadata.task_id, - 'retry_id': metadata.retry_id, - } - return task_metadata + if datum: + metadata = TaskclusterMetadata.objects.get(job=datum.job) + return { + 'task_id': metadata.task_id, + 'retry_id': metadata.retry_id, + } + else: + return {} class OptionalBooleanField(serializers.BooleanField): @@ -204,29 +206,33 @@ def update(self, instance, validated_data): def get_taskcluster_metadata(self, alert): try: - task_metadata = get_tc_metadata(alert, alert.summary.push) - cache.set("task_metadata", task_metadata) - return task_metadata + taskcluster_metadata = get_tc_metadata(alert, alert.summary.push) + cache.clear() + cache.set("task_metadata", taskcluster_metadata) + return taskcluster_metadata except ObjectDoesNotExist: return {} def get_prev_taskcluster_metadata(self, alert): try: - task_metadata = get_tc_metadata(alert, alert.summary.prev_push) - cache.set("prev_task_metadata", task_metadata) - return task_metadata + taskcluster_metadata = get_tc_metadata(alert, alert.summary.prev_push) + cache.set("prev_task_metadata", taskcluster_metadata) + return taskcluster_metadata except ObjectDoesNotExist: return {} def get_profile_url(self, alert): - task_metadata = cache.get("task_metadata") - url = get_profile_artifact_url(alert, task_metadata) + taskcluster_metadata = cache.get("task_metadata") if cache.get("task_metadata") else {} + url = get_profile_artifact_url(alert, taskcluster_metadata) return url def get_prev_profile_url(self, alert): - prev_task_metadata = cache.get("prev_task_metadata") - url = get_profile_artifact_url(alert, prev_task_metadata) + prev_taskcluster_metadata = ( + cache.get("prev_task_metadata") if cache.get("prev_task_metadata") else {} + ) + url = get_profile_artifact_url(alert, prev_taskcluster_metadata) + cache.clear() return url diff --git a/treeherder/webapp/api/utils.py b/treeherder/webapp/api/utils.py index 1fb6f797041..d0047b40d55 100644 --- a/treeherder/webapp/api/utils.py +++ b/treeherder/webapp/api/utils.py @@ -1,11 +1,8 @@ -import requests -import json -import taskcluster_urls as liburls - import time from datetime import datetime, timedelta import django_filters +import taskcluster_urls from django.db.models import Aggregate, CharField # queries are faster when filtering a range by id rather than name @@ -55,19 +52,45 @@ def get_end_of_day(date): return date + timedelta(days=1, microseconds=-1) +def get_artifact_list(root_url, task_id): + artifacts_url = taskcluster_urls.api( + root_url, 'queue', 'v1', 'task/{}/artifacts'.format(task_id) + ) + print(artifacts_url) + artifacts = {"artifacts": []} + # try: + # artifacts = fetch_json(artifacts_url) + # except Exception as e: + # print(e) + # finally: + return artifacts.get("artifacts", []) + + def get_profile_artifact_url(alert, task_metadata): tc_root_url = alert.summary.repository.tc_root_url - index_url = liburls.api( - tc_root_url, - 'queue', - 'v1', - f"task/{task_metadata['task_id']}/runs/{task_metadata['retry_id']}/artifacts" - ) - response = requests.get(index_url) - artifacts = json.loads(response.content) + if not task_metadata.get('task_id'): + return "" + artifacts_json = get_artifact_list(tc_root_url, task_metadata.get('task_id')) profile_artifact = [ - artifact for artifact in artifacts["artifacts"] - if artifact["name"].startswith("public/test_info/profile_") - and artifact["name"].endswith(".zip") + artifact + for artifact in artifacts_json + if artifacts_json + and artifact.get("name", "").startswith("public/test_info/profile_") + and artifact.get("contentType", "") == "application/x-zip-compressed" ] - return f"{index_url}/{profile_artifact[0]['name']}" + print(profile_artifact) + if not profile_artifact: + return "" + artifact_url = "/".join( + [ + tc_root_url, + "queue/v1/task/", + task_metadata['task_id'], + "runs", + task_metadata['retry_id'], + "artifacts", + profile_artifact[0]['name'], + ] + ) + + return artifact_url