Skip to content

Commit 4888b60

Browse files
authored
Merge pull request #415 from github/draft-tracking
2 parents aab6341 + fb362c0 commit 4888b60

13 files changed

+508
-187
lines changed

Diff for: README.md

+2
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ Feel free to inquire about its usage by creating an issue in this repository.
2020
| Time to Close | The period from creation to closure.\* |
2121
| Time to Answer (Discussions Only) | The time from creation to an answer. |
2222
| Time in Label | The duration from label application to removal, requires `LABELS_TO_MEASURE` env variable. |
23+
| Time in Draft (PRs Only) | The duration from creation to the PR being marked as ready for review. |
2324

2425
\*For pull requests, these metrics exclude the time the PR was in draft mode.
2526

@@ -151,6 +152,7 @@ This action can be configured to authenticate with GitHub App Installation or Pe
151152
| `HIDE_TIME_TO_ANSWER` | False | False | If set to `true`, the time to answer a discussion will not be displayed in the generated Markdown file. |
152153
| `HIDE_TIME_TO_CLOSE` | False | False | If set to `true`, the time to close will not be displayed in the generated Markdown file. |
153154
| `HIDE_TIME_TO_FIRST_RESPONSE` | False | False | If set to `true`, the time to first response will not be displayed in the generated Markdown file. |
155+
| `DRAFT_PR_TRACKING` | False | False | If set to `true`, draft PRs will be included in the metrics as a new column and in the summary stats. |
154156
| `IGNORE_USERS` | False | False | A comma separated list of users to ignore when calculating metrics. (ie. `IGNORE_USERS: 'user1,user2'`). To ignore bots, append `[bot]` to the user (ie. `IGNORE_USERS: 'github-actions[bot]'`) Users in this list will also have their authored issues and pull requests removed from the Markdown table. |
155157
| `ENABLE_MENTOR_COUNT` | False | False | If set to 'TRUE' count number of comments users left on discussions, issues and PRs and display number of active mentors |
156158
| `MIN_MENTOR_COMMENTS` | False | 10 | Minimum number of comments to count as a mentor |

Diff for: classes.py

+3
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ class IssueWithMetrics:
1818
time_to_close (timedelta, optional): The time it took to close the issue.
1919
time_to_answer (timedelta, optional): The time it took to answer the
2020
discussions in the issue.
21+
time_in_draft (timedelta, optional): The time the PR was in draft state.
2122
label_metrics (dict, optional): A dictionary containing the label metrics
2223
mentor_activity (dict, optional): A dictionary containing active mentors
2324
@@ -33,6 +34,7 @@ def __init__(
3334
time_to_first_response=None,
3435
time_to_close=None,
3536
time_to_answer=None,
37+
time_in_draft=None,
3638
labels_metrics=None,
3739
mentor_activity=None,
3840
):
@@ -42,5 +44,6 @@ def __init__(
4244
self.time_to_first_response = time_to_first_response
4345
self.time_to_close = time_to_close
4446
self.time_to_answer = time_to_answer
47+
self.time_in_draft = time_in_draft
4548
self.label_metrics = labels_metrics
4649
self.mentor_activity = mentor_activity

Diff for: config.py

+23-8
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""A module for managing environment variables used in GitHub metrics calculation.
22
3-
This module defines a class for encapsulating environment variables and a function to retrieve these variables.
3+
This module defines a class for encapsulating environment variables
4+
and a function to retrieve these variables.
45
56
Classes:
67
EnvVars: Represents the collection of environment variables used in the script.
@@ -23,27 +24,36 @@ class EnvVars:
2324
2425
Attributes:
2526
gh_app_id (int | None): The GitHub App ID to use for authentication
26-
gh_app_installation_id (int | None): The GitHub App Installation ID to use for authentication
27-
gh_app_private_key_bytes (bytes): The GitHub App Private Key as bytes to use for authentication
27+
gh_app_installation_id (int | None): The GitHub App Installation ID to use for
28+
authentication
29+
gh_app_private_key_bytes (bytes): The GitHub App Private Key as bytes to use for
30+
authentication
2831
gh_token (str | None): GitHub personal access token (PAT) for API authentication
2932
ghe (str): The GitHub Enterprise URL to use for authentication
3033
hide_author (bool): If true, the author's information is hidden in the output
31-
hide_items_closed_count (bool): If true, the number of items closed metric is hidden in the output
34+
hide_items_closed_count (bool): If true, the number of items closed metric is hidden
35+
in the output
3236
hide_label_metrics (bool): If true, the label metrics are hidden in the output
3337
hide_time_to_answer (bool): If true, the time to answer discussions is hidden in the output
3438
hide_time_to_close (bool): If true, the time to close metric is hidden in the output
35-
hide_time_to_first_response (bool): If true, the time to first response metric is hidden in the output
39+
hide_time_to_first_response (bool): If true, the time to first response metric is hidden
40+
in the output
3641
ignore_users (List[str]): List of usernames to ignore when calculating metrics
3742
labels_to_measure (List[str]): List of labels to measure how much time the lable is applied
3843
enable_mentor_count (bool): If set to TRUE, compute number of mentors
3944
min_mentor_comments (str): If set, defines the minimum number of comments for mentors
40-
max_comments_eval (str): If set, defines the maximum number of comments to look at for mentor evaluation
41-
heavily_involved_cutoff (str): If set, defines the cutoff after which heavily involved commentors in
45+
max_comments_eval (str): If set, defines the maximum number of comments to look
46+
at for mentor evaluation
47+
heavily_involved_cutoff (str): If set, defines the cutoff after which heavily
48+
involved commentors in
4249
search_query (str): Search query used to filter issues/prs/discussions on GitHub
43-
non_mentioning_links (bool): If set to TRUE, links do not cause a notification in the desitnation repository
50+
non_mentioning_links (bool): If set to TRUE, links do not cause a notification
51+
in the desitnation repository
4452
report_title (str): The title of the report
4553
output_file (str): The name of the file to write the report to
4654
rate_limit_bypass (bool): If set to TRUE, bypass the rate limit for the GitHub API
55+
draft_pr_tracking (bool): If set to TRUE, track PR time in draft state
56+
in addition to other metrics
4757
"""
4858

4959
def __init__(
@@ -70,6 +80,7 @@ def __init__(
7080
report_title: str,
7181
output_file: str,
7282
rate_limit_bypass: bool = False,
83+
draft_pr_tracking: bool = False,
7384
):
7485
self.gh_app_id = gh_app_id
7586
self.gh_app_installation_id = gh_app_installation_id
@@ -93,6 +104,7 @@ def __init__(
93104
self.report_title = report_title
94105
self.output_file = output_file
95106
self.rate_limit_bypass = rate_limit_bypass
107+
self.draft_pr_tracking = draft_pr_tracking
96108

97109
def __repr__(self):
98110
return (
@@ -119,6 +131,7 @@ def __repr__(self):
119131
f"{self.report_title}"
120132
f"{self.output_file}"
121133
f"{self.rate_limit_bypass}"
134+
f"{self.draft_pr_tracking}"
122135
)
123136

124137

@@ -203,6 +216,7 @@ def get_env_vars(test: bool = False) -> EnvVars:
203216
report_title = os.getenv("REPORT_TITLE", "Issue Metrics")
204217
output_file = os.getenv("OUTPUT_FILE", "")
205218
rate_limit_bypass = get_bool_env_var("RATE_LIMIT_BYPASS", False)
219+
draft_pr_tracking = get_bool_env_var("DRAFT_PR_TRACKING", False)
206220

207221
# Hidden columns
208222
hide_author = get_bool_env_var("HIDE_AUTHOR", False)
@@ -240,4 +254,5 @@ def get_env_vars(test: bool = False) -> EnvVars:
240254
report_title,
241255
output_file,
242256
rate_limit_bypass,
257+
draft_pr_tracking,
243258
)

Diff for: issue_metrics.py

+27-20
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from markdown_writer import write_to_markdown
3131
from most_active_mentors import count_comments_per_user, get_mentor_count
3232
from search import get_owners_and_repositories, search_issues
33+
from time_in_draft import get_stats_time_in_draft, measure_time_in_draft
3334
from time_to_answer import get_stats_time_to_answer, measure_time_to_answer
3435
from time_to_close import get_stats_time_to_close, measure_time_to_close
3536
from time_to_first_response import (
@@ -112,20 +113,21 @@ def get_per_issue_metrics(
112113
continue
113114

114115
issue_with_metrics = IssueWithMetrics(
115-
issue.title, # type: ignore
116-
issue.html_url, # type: ignore
117-
issue.user["login"], # type: ignore
118-
None,
119-
None,
120-
None,
121-
None,
116+
title=issue.title, # type: ignore
117+
html_url=issue.html_url, # type: ignore
118+
author=issue.user["login"], # type: ignore
122119
)
123120

124121
# Check if issue is actually a pull request
125122
pull_request, ready_for_review_at = None, None
126123
if issue.issue.pull_request_urls: # type: ignore
127124
pull_request = issue.issue.pull_request() # type: ignore
128125
ready_for_review_at = get_time_to_ready_for_review(issue, pull_request)
126+
if env_vars.draft_pr_tracking:
127+
issue_with_metrics.time_in_draft = measure_time_in_draft(
128+
issue=issue,
129+
ready_for_review_at=ready_for_review_at,
130+
)
129131

130132
if env_vars.hide_time_to_first_response is False:
131133
issue_with_metrics.time_to_first_response = (
@@ -242,6 +244,7 @@ def main(): # pragma: no cover
242244
average_time_to_first_response=None,
243245
average_time_to_close=None,
244246
average_time_to_answer=None,
247+
average_time_in_draft=None,
245248
average_time_in_labels=None,
246249
num_issues_opened=None,
247250
num_issues_closed=None,
@@ -266,6 +269,7 @@ def main(): # pragma: no cover
266269
average_time_to_first_response=None,
267270
average_time_to_close=None,
268271
average_time_to_answer=None,
272+
average_time_in_draft=None,
269273
average_time_in_labels=None,
270274
num_issues_opened=None,
271275
num_issues_closed=None,
@@ -297,6 +301,7 @@ def main(): # pragma: no cover
297301
stats_time_to_close = get_stats_time_to_close(issues_with_metrics)
298302

299303
stats_time_to_answer = get_stats_time_to_answer(issues_with_metrics)
304+
stats_time_in_draft = get_stats_time_in_draft(issues_with_metrics)
300305

301306
num_mentor_count = 0
302307
if enable_mentor_count:
@@ -308,23 +313,25 @@ def main(): # pragma: no cover
308313

309314
# Write the results to json and a markdown file
310315
write_to_json(
311-
issues_with_metrics,
312-
stats_time_to_first_response,
313-
stats_time_to_close,
314-
stats_time_to_answer,
315-
stats_time_in_labels,
316-
num_issues_open,
317-
num_issues_closed,
318-
num_mentor_count,
319-
search_query,
320-
output_file,
316+
issues_with_metrics=issues_with_metrics,
317+
stats_time_to_first_response=stats_time_to_first_response,
318+
stats_time_to_close=stats_time_to_close,
319+
stats_time_to_answer=stats_time_to_answer,
320+
stats_time_in_draft=stats_time_in_draft,
321+
stats_time_in_labels=stats_time_in_labels,
322+
num_issues_opened=num_issues_open,
323+
num_issues_closed=num_issues_closed,
324+
num_mentor_count=num_mentor_count,
325+
search_query=search_query,
326+
output_file=output_file,
321327
)
322328

323329
write_to_markdown(
324330
issues_with_metrics=issues_with_metrics,
325331
average_time_to_first_response=stats_time_to_first_response,
326332
average_time_to_close=stats_time_to_close,
327333
average_time_to_answer=stats_time_to_answer,
334+
average_time_in_draft=stats_time_in_draft,
328335
average_time_in_labels=stats_time_in_labels,
329336
num_issues_opened=num_issues_open,
330337
num_issues_closed=num_issues_closed,
@@ -345,9 +352,9 @@ def main(): # pragma: no cover
345352
shutil.move("issue_metrics_0.md", "issue_metrics.md")
346353
print(
347354
"Issue metrics markdown file is too large for GitHub issue body and has been \
348-
split into multiple files. ie. issue_metrics.md, issue_metrics_1.md, etc. \
349-
The full file is saved as issue_metrics_full.md\n\
350-
See https://github.com/github/issue-metrics/blob/main/docs/dealing-with-large-issue-metrics.md"
355+
split into multiple files. ie. issue_metrics.md, issue_metrics_1.md, etc. \
356+
The full file is saved as issue_metrics_full.md\n\
357+
See https://github.com/github/issue-metrics/blob/main/docs/dealing-with-large-issue-metrics.md"
351358
)
352359

353360

Diff for: json_writer.py

+53-24
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,15 @@
22
33
Functions:
44
write_to_json(
5-
issues_with_metrics: List[IssueWithMetrics],
6-
average_time_to_first_response: timedelta,
7-
average_time_to_close: timedelta,
8-
average_time_to_answer: timedelta,
9-
num_issues_opened: int,
10-
num_issues_closed: int,
5+
issues_with_metrics: Union[List[IssueWithMetrics], None],
6+
stats_time_to_first_response: Union[dict[str, timedelta], None],
7+
stats_time_to_close: Union[dict[str, timedelta], None],
8+
stats_time_to_answer: Union[dict[str, timedelta], None],
9+
stats_time_in_draft: Union[dict[str, timedelta], None],
10+
stats_time_in_labels: Union[dict[str, dict[str, timedelta]], None],
11+
num_issues_opened: Union[int, None],
12+
num_issues_closed: Union[int, None],
13+
num_mentor_count: Union[int, None],
1114
search_query: str,
1215
output_file: str,
1316
) -> str:
@@ -28,6 +31,7 @@ def write_to_json(
2831
stats_time_to_first_response: Union[dict[str, timedelta], None],
2932
stats_time_to_close: Union[dict[str, timedelta], None],
3033
stats_time_to_answer: Union[dict[str, timedelta], None],
34+
stats_time_in_draft: Union[dict[str, timedelta], None],
3135
stats_time_in_labels: Union[dict[str, dict[str, timedelta]], None],
3236
num_issues_opened: Union[int, None],
3337
num_issues_closed: Union[int, None],
@@ -40,37 +44,48 @@ def write_to_json(
4044
4145
json structure is like following
4246
{
43-
"average_time_to_first_response": "2 days, 12:00:00",
44-
"average_time_to_close": "5 days, 0:00:00",
45-
"average_time_to_answer": "1 day, 0:00:00",
47+
"average_time_to_first_response": "None",
48+
"average_time_to_close": "None",
49+
"average_time_to_answer": "None",
50+
"average_time_in_draft": "None",
51+
"average_time_in_labels": {},
52+
"median_time_to_first_response": "None",
53+
"median_time_to_close": "None",
54+
"median_time_to_answer": "None",
55+
"median_time_in_draft": "None",
56+
"median_time_in_labels": {},
57+
"90_percentile_time_to_first_response": "None",
58+
"90_percentile_time_to_close": "None",
59+
"90_percentile_time_to_answer": "None",
60+
"90_percentile_time_in_draft": "None",
61+
"90_percentile_time_in_labels": {},
4662
"num_items_opened": 2,
47-
"num_items_closed": 1,
63+
"num_items_closed": 0,
4864
"num_mentor_count": 5,
4965
"total_item_count": 2,
5066
"issues": [
5167
{
5268
"title": "Issue 1",
5369
"html_url": "https://github.com/owner/repo/issues/1",
54-
"author": "author",
55-
"time_to_first_response": "3 days, 0:00:00",
56-
"time_to_close": "6 days, 0:00:00",
70+
"author": "alice",
71+
"time_to_first_response": "None",
72+
"time_to_close": "None",
5773
"time_to_answer": "None",
58-
"label_metrics": {
59-
"bug": "1 day, 16:24:12"
60-
}
74+
"time_in_draft": "None",
75+
"label_metrics": {}
6176
},
6277
{
6378
"title": "Issue 2",
6479
"html_url": "https://github.com/owner/repo/issues/2",
65-
"author": "author",
66-
"time_to_first_response": "2 days, 0:00:00",
67-
"time_to_close": "4 days, 0:00:00",
68-
"time_to_answer": "1 day, 0:00:00",
69-
"label_metrics": {
70-
}
71-
},
80+
"author": "bob",
81+
"time_to_first_response": "None",
82+
"time_to_close": "None",
83+
"time_to_answer": "None",
84+
"time_in_draft": "None",
85+
"label_metrics": {}
86+
}
7287
],
73-
"search_query": "is:issue is:open repo:owner/repo"
88+
"search_query": "is:issue repo:owner/repo"
7489
}
7590
7691
"""
@@ -106,6 +121,16 @@ def write_to_json(
106121
med_time_to_answer = stats_time_to_answer["med"]
107122
p90_time_to_answer = stats_time_to_answer["90p"]
108123

124+
# time in draft
125+
average_time_in_draft = None
126+
med_time_in_draft = None
127+
p90_time_in_draft = None
128+
if stats_time_in_draft is not None:
129+
average_time_in_draft = stats_time_in_draft["avg"]
130+
med_time_in_draft = stats_time_in_draft["med"]
131+
p90_time_in_draft = stats_time_in_draft["90p"]
132+
133+
# time in labels
109134
average_time_in_labels = {}
110135
med_time_in_labels = {}
111136
p90_time_in_labels = {}
@@ -122,14 +147,17 @@ def write_to_json(
122147
"average_time_to_first_response": str(average_time_to_first_response),
123148
"average_time_to_close": str(average_time_to_close),
124149
"average_time_to_answer": str(average_time_to_answer),
150+
"average_time_in_draft": str(average_time_in_draft),
125151
"average_time_in_labels": average_time_in_labels,
126152
"median_time_to_first_response": str(med_time_to_first_response),
127153
"median_time_to_close": str(med_time_to_close),
128154
"median_time_to_answer": str(med_time_to_answer),
155+
"median_time_in_draft": str(med_time_in_draft),
129156
"median_time_in_labels": med_time_in_labels,
130157
"90_percentile_time_to_first_response": str(p90_time_to_first_response),
131158
"90_percentile_time_to_close": str(p90_time_to_close),
132159
"90_percentile_time_to_answer": str(p90_time_to_answer),
160+
"90_percentile_time_in_draft": str(p90_time_in_draft),
133161
"90_percentile_time_in_labels": p90_time_in_labels,
134162
"num_items_opened": num_issues_opened,
135163
"num_items_closed": num_issues_closed,
@@ -152,6 +180,7 @@ def write_to_json(
152180
"time_to_first_response": str(issue.time_to_first_response),
153181
"time_to_close": str(issue.time_to_close),
154182
"time_to_answer": str(issue.time_to_answer),
183+
"time_in_draft": str(issue.time_in_draft),
155184
"label_metrics": formatted_label_metrics,
156185
}
157186
)

0 commit comments

Comments
 (0)