Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Scaling points for PRs based on size of the PR #409

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion app/api/leaderboard/functions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ export const getLeaderboardData = async (
roles: ("core" | "intern" | "operations" | "contributor")[] = [],
) => {
const contributors = await getContributors();

const data = contributors
.filter((a) => a.highlights.points)
.map((contributor) => ({
Expand Down
33 changes: 29 additions & 4 deletions lib/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,17 @@ export async function getContributorBySlug(file: string, detail = false) {
return {
activity: [
...acc.activity,
{ ...activity, points: points[activity.type] || 0 },
{
...activity,
points:
(points[activity.type] || 0) +
(activity.linked_issues?.length || 0),
},
],
points: acc.points + (points[activity.type] || 0),
points:
acc.points +
(points[activity.type] || 0) +
(activity.linked_issues?.length || 0),
comment_created:
acc.comment_created + (activity.type === "comment_created" ? 1 : 0),
eod_update: acc.eod_update + (activity.type === "eod_update" ? 1 : 0),
Expand All @@ -131,6 +139,8 @@ export async function getContributorBySlug(file: string, detail = false) {
acc.issue_assigned + (activity.type === "issue_assigned" ? 1 : 0),
issue_opened:
acc.issue_opened + (activity.type === "issue_opened" ? 1 : 0),
linked_issues:
acc.linked_issues + (activity.linked_issues?.length || 0),
};
},
{
Expand All @@ -144,6 +154,7 @@ export async function getContributorBySlug(file: string, detail = false) {
pr_reviewed: 0,
issue_assigned: 0,
issue_opened: 0,
linked_issues: 0,
} as Highlights & { activity: Activity[] },
);

Expand Down Expand Up @@ -181,12 +192,13 @@ export async function getContributorBySlug(file: string, detail = false) {
pr_collaborated: weightedActivity.pr_collaborated,
issue_assigned: weightedActivity.issue_assigned,
issue_opened: weightedActivity.issue_opened,
linked_issues: weightedActivity.linked_issues,
},
weekSummary: getLastWeekHighlights(calendarData),
summarize,
calendarData: detail ? calendarData : [],
...data,
} as Contributor & { summarize: typeof summarize };
} as unknown as Contributor & { summarize: typeof summarize };
}

let contributors: Awaited<ReturnType<typeof getContributorBySlug>>[] | null =
Expand Down Expand Up @@ -218,6 +230,15 @@ function getCalendarData(activity: Activity[]) {
} else {
acc[date][activity.type] = 1;
}

if (activity.type === "pr_merged") {
if (acc[date]["linked_issues"]) {
acc[date]["linked_issues"] += activity.linked_issues?.length || 0;
} else {
acc[date]["linked_issues"] = activity.linked_issues?.length || 0;
}
}

if (!acc[date].types.includes(activity.type)) {
acc[date].types.push(activity.type);
// console.log(activity.type);
Expand Down Expand Up @@ -263,9 +284,11 @@ const computePoints = (
calendarDataEntry: Highlights,
initialPoints: number,
) => {
return HIGHLIGHT_KEYS.map(
let totalPoints = HIGHLIGHT_KEYS.map(
(key) => points[key] * (calendarDataEntry[key] ?? 0),
).reduce((a, b) => a + b, initialPoints);
totalPoints += calendarDataEntry.linked_issues || 0;
return totalPoints;
};

const HighlightsReducer = (acc: Highlights, day: Highlights) => {
Expand All @@ -279,6 +302,7 @@ const HighlightsReducer = (acc: Highlights, day: Highlights) => {
pr_collaborated: acc.pr_collaborated + (day.pr_collaborated ?? 0),
issue_assigned: acc.issue_assigned + (day.issue_assigned ?? 0),
issue_opened: acc.issue_opened + (day.issue_opened ?? 0),
linked_issues: acc.linked_issues + (day.linked_issues ?? 0),
};
};

Expand All @@ -292,6 +316,7 @@ const HighlightsInitialValue = {
pr_collaborated: 0,
issue_assigned: 0,
issue_opened: 0,
linked_issues: 0,
} as Highlights;

const getLastWeekHighlights = (calendarData: Highlights[]) => {
Expand Down
2 changes: 2 additions & 0 deletions lib/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ export interface Highlights {
pr_collaborated: number;
issue_assigned: number;
issue_opened: number;
linked_issues: number;
}

export interface WeekSummary {
Expand Down Expand Up @@ -76,6 +77,7 @@ export interface Activity {
text: string;
collaborated_with?: string[];
turnaround_time?: number;
linked_issues?: string[]; //issues url, which are linked to merged prs
}

export interface OpenPr {
Expand Down
22 changes: 20 additions & 2 deletions scraper/src/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pathlib import Path
from urllib.parse import parse_qsl, urlparse
from zoneinfo import ZoneInfo

from linked_issue_parser import LinkedIssueParser
import requests

logging.basicConfig(
Expand Down Expand Up @@ -71,6 +71,14 @@ def append(self, user, event):
"authored_issue_and_pr": [],
}

def parse_linked_issues(self, pr_body):
if isinstance(pr_body, str):
pattern = r"#(\d+)"
matches = re.findall(pattern, pr_body)
Samuel-Aktar-Laskar marked this conversation as resolved.
Show resolved Hide resolved
return len(set(matches))
else:
return 0

def parse_event(self, event, event_time):
user = event["actor"]["login"]
try:
Expand Down Expand Up @@ -124,7 +132,7 @@ def parse_event(self, event, event_time):
"title": f'{event["repo"]["name"]}#{event["payload"]["pull_request"]["number"]}',
"time": event_time,
"link": event["payload"]["pull_request"]["html_url"],
"text": event["payload"]["pull_request"]["title"],
"text": event["payload"]["pull_request"]["title"]
},
)

Expand All @@ -133,6 +141,15 @@ def parse_event(self, event, event_time):
and event["payload"]["pull_request"]["merged"]
):
turnaround_time = self.caclculate_turnaround_time(event)
pr_body = event["payload"]["pull_request"]["body"]
repo = event["repo"]["name"]
parts = repo.split('/')
org_name = parts[0]
repo_name = parts[1]
pr_no = event['payload']['pull_request']['number']
linked_issue_parser = LinkedIssueParser(org=org_name,repo=repo_name,pr_no=pr_no,pr_body=pr_body)
linked_issues = linked_issue_parser.parse_linked_issues()
self.log.debug(f'linked_issues for pr {pr_no} are {linked_issues}')
self.append(
event["payload"]["pull_request"]["user"]["login"],
{
Expand All @@ -142,6 +159,7 @@ def parse_event(self, event, event_time):
"link": event["payload"]["pull_request"]["html_url"],
"text": event["payload"]["pull_request"]["title"],
"turnaround_time": turnaround_time,
"linked_issues" : linked_issues
},
)

Expand Down
113 changes: 113 additions & 0 deletions scraper/src/linked_issue_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import re
from os import getenv
import requests

class LinkedIssueParser:
def __init__(self,org:str, repo:str, pr_no:int, pr_body: str):
self.repo = repo
self.pr_no = pr_no
self.pr_body = pr_body
self.org = org

# The logic here is as follows:
# Get a list of all events on a Pull Request of the type CONNECTED_EVENT and DISCONNECTED_EVENT
# Create a map, keyed by Issue number and keep a count of how may times the issue is CONNECTED and DISCONNECTED
# From that map, look for keys that have an odd-numbered count, as these are the events that have been CONNECTED that don't have a corresponding DISCONNECTED event.

def parse_ui_linked_issues(self):
query = """
{{
resource(url: "https://github.com/{org}/{repo}/pull/{pr_no}") {{
... on PullRequest {{
timelineItems(itemTypes: [CONNECTED_EVENT, DISCONNECTED_EVENT], first: 100) {{
nodes {{
... on ConnectedEvent {{
id
subject {{
... on Issue {{
number
}}
}}
}}
... on DisconnectedEvent {{
id
subject {{
... on Issue {{
number
}}
}}
}}
}}
}}
}}
}}
}}
""".format(org = self.org, repo = self.repo, pr_no = self.pr_no)
gh_url = 'https://api.github.com/graphql'
token = getenv('GITHUB_TOKEN')
headers = {
'Authorization': f'Bearer {token}',
'Content-Type':'application/json'
}
response = requests.post(gh_url,headers=headers,json={'query':query})
if response.status_code != 200:
return []
data = response.json()
issues = {}
for node in data['data']['resource']['timelineItems']['nodes']:
issue_number = node['subject']['number']
if issue_number in issues:
issues[issue_number] +=1
else:
issues[issue_number] = 1

linked_issues = []
for issue, count in issues.items():
if count % 2 != 0:
linked_issues.append(f'https://github.com/{self.org}/{self.repo}/issues/{issue}')
return linked_issues

def get_concat_commit_messages(self):
commit_url = f'https://api.github.com/repos/{self.org}/{self.repo}/pulls/{self.pr_no}/commits'
resposne = requests.get(commit_url)
if resposne.status_code != 200:
return ""
json_data = resposne.json()
result = ""
for commit in json_data:
message = commit['commit']['message']
result = f'{result} , {message}'
return result

def parse_desc_linked_issues(self):
pattern_same_repo = r'\b(?:close[sd]?|fix(?:e[sd])?|resolve[sd]?)\s+#(\d+)'
pattern_other_repo = r'\b(?:close[sd]?|fix(?:e[sd])?|resolve[sd]?)\s+(\S+\/\S+)#(\d+)'
commit_messages = self.get_concat_commit_messages()
text = f'{self.pr_body} {commit_messages}'
same_repo_linked_issues = re.findall(pattern_same_repo,text,re.IGNORECASE)
other_repo_linked_issues = re.findall(pattern_other_repo,text,re.IGNORECASE)
linked_issues = set([])
for issue in same_repo_linked_issues:
linked_issues.add(issue)
for issue in other_repo_linked_issues:
linked_issues.add(issue)
linked_issues_url = []
for issue in linked_issues:
if isinstance(issue, str):
linked_issues_url.append(f'https://github.com/{self.org}/{self.repo}/issues/{issue}')
elif isinstance(issue, tuple):
linked_issues_url.append(f'https://github.com/{issue[0]}/issues/{issue[1]}')
continue
return linked_issues_url


def parse_linked_issues(self):
linked_issues = []
ui_linked_issues = self.parse_ui_linked_issues()
desc_linked_issues = self.parse_desc_linked_issues()
for issue in ui_linked_issues:
linked_issues.append(issue)
for issue in desc_linked_issues:
linked_issues.append(issue)
return linked_issues

Loading