Skip to content

Commit

Permalink
feat: add paginator for collecting issue
Browse files Browse the repository at this point in the history
  • Loading branch information
ImMin5 committed Sep 6, 2024
1 parent da38060 commit adc170e
Show file tree
Hide file tree
Showing 4 changed files with 92 additions and 39 deletions.
32 changes: 30 additions & 2 deletions src/plugin/connector/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,38 @@ def dispatch_request(
auth = self.get_auth(secret_data)

try:
for response in self._pagination(method, url, headers, auth, params, data):
yield response
max_results = params.get("maxResults")
start_at = params.get("startAt")
total_count = params.get("total_count")
if max_results and start_at and total_count:
for response in self._pagination_with_count(
method, url, headers, auth, params, data
):
yield response

else:
for response in self._pagination(
method, url, headers, auth, params, data
):
yield response

except Exception as e:
_LOGGER.error(f"[dispatch_request] Error {e}")
raise ERROR_UNKNOWN(message=e)

@staticmethod
def _pagination_with_count(
method: str,
url: str,
headers: dict,
auth: HTTPBasicAuth,
params: dict,
data: dict = None,
) -> list:
responses = []
while True:
print(url, params)

@staticmethod
def _pagination(
method: str,
Expand All @@ -49,6 +74,7 @@ def _pagination(
) -> list:
responses = []
while True:
print(url, params)
response = requests.request(
method,
url,
Expand All @@ -63,6 +89,8 @@ def _pagination(
else:
response_values = response_json.get("values")

print(response_json)

_LOGGER.debug(
f"[dispatch_request] {url} {response.status_code} {response.reason}"
)
Expand Down
93 changes: 60 additions & 33 deletions src/plugin/connector/issue_connector.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging
from typing import Generator

import requests

from plugin.connector.base import JiraBaseConnector

_LOGGER = logging.getLogger("spaceone")
Expand All @@ -16,40 +18,65 @@ def search_issue(
self,
secret_data: dict,
project_id_or_key: str,
) -> dict:
params = {
"jql": f"project = '{project_id_or_key}' AND created >= startOfYear(-1) order by created DESC",
"startAt": 0,
"maxResults": 1000,
"fields": [
"summary",
"comment",
"created",
"creator",
"assignee",
"duedate",
"issuelinks",
"issuetype",
"labels",
"lastViewed",
"priority",
"progress",
"project",
"reporter",
"resolution",
"resolutiondate",
"status",
"statuscategorychangedate",
"subtasks",
"updated",
"watches",
],
}
) -> list:
request_url = "rest/api/3/search"
_LOGGER.debug(f"[search_issue] {request_url}")

responses = self.dispatch_request(
"GET", request_url, secret_data, params=params
)
start_at = 0
max_results = 100
while True:
# Set up request
url = f"{self.get_base_url(secret_data)}{request_url}"
headers = {"Accept": "application/json"}
auth = self.get_auth(secret_data)

params = {
"jql": f"project = '{project_id_or_key}' AND created >= startOfYear(-1) order by created DESC",
"startAt": start_at,
"maxResults": max_results,
"fields": [
"summary",
"comment",
"created",
"creator",
"assignee",
"duedate",
"issuelinks",
"issuetype",
"labels",
"lastViewed",
"priority",
"progress",
"project",
"reporter",
"resolution",
"resolutiondate",
"status",
"statuscategorychangedate",
"subtasks",
"updated",
"watches",
"-avatarUrls",
],
}

response = requests.request(
"GET", url, headers=headers, params=params, auth=auth
)

if response.status_code != 200:
_LOGGER.error(
f"Failed to fetch issues: {response.status_code} {response.text}"
)
break

response_json = response.json()
issues = response_json.get("issues", [])
for issue in issues:
yield issue

total = response_json.get("total", 0)
start_at += max_results

return next(responses)
if start_at >= total:
break
5 changes: 2 additions & 3 deletions src/plugin/manager/issue_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,8 @@ def collect_cloud_service(self, options, secret_data, schema):
def _list_issue_from_project(
self, project_key: str, domain: str, secret_data: dict
):
for issue_info in self.issue_connector.search_issue(
secret_data, project_key
).get("issues", []):
response_stream = self.issue_connector.search_issue(secret_data, project_key)
for issue_info in response_stream:
reference = {
"resource_id": f"jira:{issue_info['id']}",
"external_link": f"https://{domain}.atlassian.net/browse/{issue_info['key']}",
Expand Down
1 change: 0 additions & 1 deletion src/plugin/metadata/projects/user.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ search:

table:
fields:
- Name: data.displayName
- Email: data.emailAddress
- Account Id: data.accountId
- Active: data.active
Expand Down

0 comments on commit adc170e

Please sign in to comment.