-
Notifications
You must be signed in to change notification settings - Fork 60
/
markdown_writer.py
314 lines (283 loc) · 12.1 KB
/
markdown_writer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
"""A module for writing GitHub issue metrics to a markdown file.
This module provides a function for writing a list of GitHub issues with metrics
to a markdown file. The metrics include the average time to first response, the
average time to close, and the average time to answer the discussions. The module
also provides functions for sorting the issues by time to first response.
Functions:
write_to_markdown(
issues_with_metrics: Union[List[IssueWithMetrics], None],
average_time_to_first_response: Union[dict[str, timedelta], None],
average_time_to_close: Union[dict[str, timedelta], None],
average_time_to_answer: Union[dict[str, timedelta], None],
average_time_in_labels: Union[dict, None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
num_mentor_count: Union[int, None],
labels: List[str],
search_query: str,
hide_label_metrics: bool,
hide_items_closed_count: bool,
non_mentioning_links: bool,
report_title: str,
output_file: str,
) -> None:
Write the issues with metrics to a markdown file.
get_non_hidden_columns(
average_time_to_first_response: timedelta,
average_time_to_close: timedelta,
average_time_to_answer: timedelta
) -> List[str]:
Get the columns that are not hidden.
"""
from datetime import timedelta
from typing import List, Union
from classes import IssueWithMetrics
from config import get_env_vars
def get_non_hidden_columns(labels) -> List[str]:
"""
Get a list of the columns that are not hidden.
Args:
labels (List[str]): A list of the labels that are used in the issues.
Returns:
List[str]: A list of the columns that are not hidden.
"""
columns = ["Title", "URL"]
env_vars = get_env_vars()
# Find the number of columns and which are to be hidden
hide_author = env_vars.hide_author
if not hide_author:
columns.append("Author")
hide_time_to_first_response = env_vars.hide_time_to_first_response
if not hide_time_to_first_response:
columns.append("Time to first response")
hide_time_to_close = env_vars.hide_time_to_close
if not hide_time_to_close:
columns.append("Time to close")
hide_time_to_answer = env_vars.hide_time_to_answer
if not hide_time_to_answer:
columns.append("Time to answer")
enable_time_in_draft = env_vars.draft_pr_tracking
if enable_time_in_draft:
columns.append("Time in draft")
hide_label_metrics = env_vars.hide_label_metrics
if not hide_label_metrics and labels:
for label in labels:
columns.append(f"Time spent in {label}")
return columns
def write_to_markdown(
issues_with_metrics: Union[List[IssueWithMetrics], None],
average_time_to_first_response: Union[dict[str, timedelta], None],
average_time_to_close: Union[dict[str, timedelta], None],
average_time_to_answer: Union[dict[str, timedelta], None],
average_time_in_draft: Union[dict[str, timedelta], None],
average_time_in_labels: Union[dict, None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
num_mentor_count: Union[int, None],
labels=None,
search_query=None,
hide_label_metrics=False,
hide_items_closed_count=False,
non_mentioning_links=False,
report_title="",
output_file="",
ghe="",
) -> None:
"""Write the issues with metrics to a markdown file.
Args:
issues_with_metrics (IssueWithMetrics): A list of GitHub issues with metrics
average_time_to_first_response (datetime.timedelta): The average time to first
response for the issues.
average_time_to_close (datetime.timedelta): The average time to close for the issues.
average_time_to_answer (datetime.timedelta): The average time to answer the discussions.
average_time_in_draft (datetime.timedelta): The average time spent in draft for the issues.
average_time_in_labels (dict): A dictionary containing the average time spent in each label.
file (file object, optional): The file object to write to. If not provided,
a file named "issue_metrics.md" will be created.
num_issues_opened (int): The Number of items that remain opened.
num_issues_closed (int): The number of issues that were closed.
num_mentor_count (int): The number of very active commentors.
labels (List[str]): A list of the labels that are used in the issues.
search_query (str): The search query used to find the issues.
hide_label_metrics (bool): Represents whether the user has chosen to hide label
metrics in the output
hide_items_closed_count (bool): Represents whether the user has chosen to hide
the number of items closed
non_mentioning_links (bool): Represents whether links do not cause a notification
in the destination repository
report_title (str): The title of the report
output_file (str): The name of the file to write the report to
ghe (str): the GitHub Enterprise endpoint
Returns:
None.
"""
columns = get_non_hidden_columns(labels)
output_file_name = output_file if output_file else "issue_metrics.md"
with open(output_file_name, "w", encoding="utf-8") as file:
file.write(f"# {report_title}\n\n")
# If all the metrics are None, then there are no issues
if not issues_with_metrics or len(issues_with_metrics) == 0:
file.write("no issues found for the given search criteria\n\n")
file.write(
"\n_This report was generated with the \
[Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
if search_query:
file.write(f"Search query used to find these items: `{search_query}`\n")
return
# Write first table with overall metrics
write_overall_metrics_tables(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
average_time_in_draft,
average_time_in_labels,
num_issues_opened,
num_issues_closed,
num_mentor_count,
labels,
columns,
file,
hide_label_metrics,
hide_items_closed_count,
)
# Write second table with individual issue/pr/discussion metrics
# First write the header
file.write("|")
for column in columns:
file.write(f" {column} |")
file.write("\n")
# Then write the column dividers
file.write("|")
for _ in columns:
file.write(" --- |")
file.write("\n")
# Then write the issues/pr/discussions row by row
for issue in issues_with_metrics:
# Replace the vertical bar with the HTML entity
issue.title = issue.title.replace("|", "|")
# Replace any whitespace
issue.title = issue.title.strip()
endpoint = ghe.removeprefix("https://") if ghe else "github.com"
if non_mentioning_links:
file.write(
f"| {issue.title} | "
f"{issue.html_url}".replace(
f"https://{endpoint}", f"https://www.{endpoint}"
)
+ " |"
)
else:
file.write(f"| {issue.title} | {issue.html_url} |")
if "Author" in columns:
file.write(f" [{issue.author}](https://{endpoint}/{issue.author}) |")
if "Time to first response" in columns:
file.write(f" {issue.time_to_first_response} |")
if "Time to close" in columns:
file.write(f" {issue.time_to_close} |")
if "Time to answer" in columns:
file.write(f" {issue.time_to_answer} |")
if "Time in draft" in columns:
file.write(f" {issue.time_in_draft} |")
if labels and issue.label_metrics:
for label in labels:
if f"Time spent in {label}" in columns:
file.write(f" {issue.label_metrics[label]} |")
file.write("\n")
file.write(
"\n_This report was generated with the \
[Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
if search_query:
file.write(f"Search query used to find these items: `{search_query}`\n")
print("Wrote issue metrics to issue_metrics.md")
def write_overall_metrics_tables(
issues_with_metrics,
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
average_time_in_draft,
stats_time_in_labels,
num_issues_opened,
num_issues_closed,
num_mentor_count,
labels,
columns,
file,
hide_label_metrics,
hide_items_closed_count=False,
):
"""Write the overall metrics tables to the markdown file."""
if any(
column in columns
for column in [
"Time to first response",
"Time to close",
"Time to answer",
"Time in draft",
]
) or (hide_label_metrics is False and len(labels) > 0):
file.write("| Metric | Average | Median | 90th percentile |\n")
file.write("| --- | --- | --- | ---: |\n")
if "Time to first response" in columns:
if stats_time_to_first_response is not None:
file.write(
f"| Time to first response "
f"| {stats_time_to_first_response['avg']} "
f"| {stats_time_to_first_response['med']} "
f"| {stats_time_to_first_response['90p']} |\n"
)
else:
file.write("| Time to first response | None | None | None |\n")
if "Time to close" in columns:
if stats_time_to_close is not None:
file.write(
f"| Time to close "
f"| {stats_time_to_close['avg']} "
f"| {stats_time_to_close['med']} "
f"| {stats_time_to_close['90p']} |\n"
)
else:
file.write("| Time to close | None | None | None |\n")
if "Time to answer" in columns:
if stats_time_to_answer is not None:
file.write(
f"| Time to answer "
f"| {stats_time_to_answer['avg']} "
f"| {stats_time_to_answer['med']} "
f"| {stats_time_to_answer['90p']} |\n"
)
else:
file.write("| Time to answer | None | None | None |\n")
if "Time in draft" in columns:
if average_time_in_draft is not None:
file.write(
f"| Time in draft "
f"| {average_time_in_draft['avg']} "
f"| {average_time_in_draft['med']} "
f"| {average_time_in_draft['90p']} |\n"
)
else:
file.write("| Time in draft | None | None | None |\n")
if labels and stats_time_in_labels:
for label in labels:
if (
f"Time spent in {label}" in columns
and label in stats_time_in_labels["avg"]
):
file.write(
f"| Time spent in {label} "
f"| {stats_time_in_labels['avg'][label]} "
f"| {stats_time_in_labels['med'][label]} "
f"| {stats_time_in_labels['90p'][label]} |\n"
)
file.write("\n")
# Write count stats to a separate table
file.write("| Metric | Count |\n")
file.write("| --- | ---: |\n")
file.write(f"| Number of items that remain open | {num_issues_opened} |\n")
if not hide_items_closed_count:
file.write(f"| Number of items closed | {num_issues_closed} |\n")
file.write(f"| Number of most active mentors | {num_mentor_count} |\n")
file.write(f"| Total number of items created | {len(issues_with_metrics)} |\n\n")