|
30 | 30 | average_time_to_answer: timedelta |
31 | 31 | ) -> List[str]: |
32 | 32 | Get the columns that are not hidden. |
| 33 | + sort_issues( |
| 34 | + issues: List[IssueWithMetrics], |
| 35 | + sort_by: str | None, |
| 36 | + sort_order: str |
| 37 | + ) -> List[IssueWithMetrics]: |
| 38 | + Sort issues by the specified field. |
| 39 | + group_issues( |
| 40 | + issues: List[IssueWithMetrics], |
| 41 | + group_by: str | None |
| 42 | + ) -> dict[str, List[IssueWithMetrics]]: |
| 43 | + Group issues by the specified field. |
33 | 44 | """ |
34 | 45 |
|
35 | 46 | from datetime import timedelta |
36 | | -from typing import List, Union |
| 47 | +from typing import Dict, List, Union |
37 | 48 |
|
38 | 49 | from classes import IssueWithMetrics |
39 | 50 | from config import get_env_vars |
@@ -98,6 +109,94 @@ def get_non_hidden_columns(labels) -> List[str]: |
98 | 109 | return columns |
99 | 110 |
|
100 | 111 |
|
| 112 | +def sort_issues( |
| 113 | + issues: List[IssueWithMetrics], sort_by: str | None, sort_order: str |
| 114 | +) -> List[IssueWithMetrics]: |
| 115 | + """Sort issues by the specified field. |
| 116 | +
|
| 117 | + Args: |
| 118 | + issues (List[IssueWithMetrics]): List of issues to sort. |
| 119 | + sort_by (str | None): Field to sort by (e.g., 'time_to_close', 'time_to_first_response'). |
| 120 | + sort_order (str): Sort order, either 'asc' for ascending or 'desc' for descending. |
| 121 | +
|
| 122 | + Returns: |
| 123 | + List[IssueWithMetrics]: Sorted list of issues. |
| 124 | + """ |
| 125 | + if not sort_by or not issues: |
| 126 | + return issues |
| 127 | + |
| 128 | + # Map of valid sort fields |
| 129 | + valid_fields = { |
| 130 | + "time_to_close", |
| 131 | + "time_to_first_response", |
| 132 | + "time_to_answer", |
| 133 | + "time_in_draft", |
| 134 | + "created_at", |
| 135 | + } |
| 136 | + |
| 137 | + if sort_by not in valid_fields: |
| 138 | + return issues |
| 139 | + |
| 140 | + reverse = sort_order == "desc" |
| 141 | + |
| 142 | + # Sort with None values at the end, regardless of sort order |
| 143 | + non_none_issues: List[IssueWithMetrics] = [] |
| 144 | + none_issues: List[IssueWithMetrics] = [] |
| 145 | + |
| 146 | + for issue in issues: |
| 147 | + value = getattr(issue, sort_by, None) |
| 148 | + if value is None: |
| 149 | + none_issues.append(issue) |
| 150 | + else: |
| 151 | + non_none_issues.append(issue) |
| 152 | + |
| 153 | + sorted_non_none = sorted( |
| 154 | + non_none_issues, |
| 155 | + key=lambda issue: getattr(issue, sort_by), |
| 156 | + reverse=reverse, |
| 157 | + ) |
| 158 | + |
| 159 | + return sorted_non_none + none_issues |
| 160 | + |
| 161 | + |
| 162 | +def group_issues( |
| 163 | + issues: List[IssueWithMetrics], group_by: str | None |
| 164 | +) -> Dict[str, List[IssueWithMetrics]]: |
| 165 | + """Group issues by the specified field. |
| 166 | +
|
| 167 | + Args: |
| 168 | + issues (List[IssueWithMetrics]): List of issues to group. |
| 169 | + group_by (str | None): Field to group by (e.g., 'author', 'assignee'). |
| 170 | +
|
| 171 | + Returns: |
| 172 | + Dict[str, List[IssueWithMetrics]]: Dictionary of grouped issues. |
| 173 | + """ |
| 174 | + if not group_by or not issues: |
| 175 | + return {"": issues} |
| 176 | + |
| 177 | + # Map of valid group fields |
| 178 | + valid_fields = {"author", "assignee"} |
| 179 | + |
| 180 | + if group_by not in valid_fields: |
| 181 | + return {"": issues} |
| 182 | + |
| 183 | + grouped: Dict[str, List[IssueWithMetrics]] = {} |
| 184 | + for issue in issues: |
| 185 | + if group_by == "author": |
| 186 | + key = issue.author or "Unknown" |
| 187 | + elif group_by == "assignee": |
| 188 | + # Use the first assignee or "Unassigned" |
| 189 | + key = issue.assignees[0] if issue.assignees else "Unassigned" |
| 190 | + else: |
| 191 | + key = "Unknown" |
| 192 | + |
| 193 | + if key not in grouped: |
| 194 | + grouped[key] = [] |
| 195 | + grouped[key].append(issue) |
| 196 | + |
| 197 | + return grouped |
| 198 | + |
| 199 | + |
101 | 200 | def write_to_markdown( |
102 | 201 | issues_with_metrics: Union[List[IssueWithMetrics], None], |
103 | 202 | average_time_to_first_response: Union[dict[str, timedelta], None], |
@@ -188,70 +287,82 @@ def write_to_markdown( |
188 | 287 |
|
189 | 288 | # Write second table with individual issue/pr/discussion metrics |
190 | 289 | # Skip this table if hide_items_list is True |
191 | | - if not env_vars.hide_items_list: |
192 | | - # First write the header |
193 | | - file.write("|") |
194 | | - for column in columns: |
195 | | - file.write(f" {column} |") |
196 | | - file.write("\n") |
197 | | - |
198 | | - # Then write the column dividers |
199 | | - file.write("|") |
200 | | - for _ in columns: |
201 | | - file.write(" --- |") |
202 | | - file.write("\n") |
203 | | - |
204 | | - # Then write the issues/pr/discussions row by row |
205 | | - for issue in issues_with_metrics: |
206 | | - # Replace the vertical bar with the HTML entity |
207 | | - issue.title = issue.title.replace("|", "|") |
208 | | - # Replace any whitespace |
209 | | - issue.title = issue.title.strip() |
210 | | - |
211 | | - endpoint = ghe.removeprefix("https://") if ghe else "github.com" |
212 | | - if non_mentioning_links: |
213 | | - file.write( |
214 | | - f"| {issue.title} | " |
215 | | - f"{issue.html_url}".replace( |
216 | | - f"https://{endpoint}", f"https://www.{endpoint}" |
| 290 | + if not env_vars.hide_items_list: # pylint: disable=too-many-nested-blocks |
| 291 | + # Apply sorting and grouping |
| 292 | + sorted_issues = sort_issues( |
| 293 | + issues_with_metrics, env_vars.sort_by, env_vars.sort_order |
| 294 | + ) |
| 295 | + grouped_issues_dict = group_issues(sorted_issues, env_vars.group_by) |
| 296 | + |
| 297 | + # If grouping, write separate sections for each group |
| 298 | + for group_name, group_issues_list in grouped_issues_dict.items(): |
| 299 | + # Write group header if grouping is enabled |
| 300 | + if env_vars.group_by and group_name: |
| 301 | + file.write(f"\n### {group_name}\n\n") |
| 302 | + |
| 303 | + # First write the header |
| 304 | + file.write("|") |
| 305 | + for column in columns: |
| 306 | + file.write(f" {column} |") |
| 307 | + file.write("\n") |
| 308 | + |
| 309 | + # Then write the column dividers |
| 310 | + file.write("|") |
| 311 | + for _ in columns: |
| 312 | + file.write(" --- |") |
| 313 | + file.write("\n") |
| 314 | + |
| 315 | + # Then write the issues/pr/discussions row by row |
| 316 | + for issue in group_issues_list: |
| 317 | + # Replace the vertical bar with the HTML entity |
| 318 | + issue.title = issue.title.replace("|", "|") |
| 319 | + # Replace any whitespace |
| 320 | + issue.title = issue.title.strip() |
| 321 | + |
| 322 | + endpoint = ghe.removeprefix("https://") if ghe else "github.com" |
| 323 | + if non_mentioning_links: |
| 324 | + file.write( |
| 325 | + f"| {issue.title} | " |
| 326 | + f"{issue.html_url}".replace( |
| 327 | + f"https://{endpoint}", f"https://www.{endpoint}" |
| 328 | + ) |
| 329 | + + " |" |
217 | 330 | ) |
218 | | - + " |" |
219 | | - ) |
220 | | - else: |
221 | | - file.write(f"| {issue.title} | {issue.html_url} |") |
222 | | - if "Assignee" in columns: |
223 | | - if issue.assignees: |
224 | | - assignee_links = [ |
225 | | - f"[{assignee}](https://{endpoint}/{assignee})" |
226 | | - for assignee in issue.assignees |
227 | | - ] |
228 | | - file.write(f" {', '.join(assignee_links)} |") |
229 | 331 | else: |
230 | | - file.write(" None |") |
231 | | - if "Author" in columns: |
232 | | - file.write( |
233 | | - f" [{issue.author}](https://{endpoint}/{issue.author}) |" |
234 | | - ) |
235 | | - if "Time to first response" in columns: |
236 | | - file.write(f" {issue.time_to_first_response} |") |
237 | | - if "Time to close" in columns: |
238 | | - file.write(f" {issue.time_to_close} |") |
239 | | - if "Time to answer" in columns: |
240 | | - file.write(f" {issue.time_to_answer} |") |
241 | | - if "Time in draft" in columns: |
242 | | - file.write(f" {issue.time_in_draft} |") |
243 | | - if labels and issue.label_metrics: |
244 | | - for label in labels: |
245 | | - if f"Time spent in {label}" in columns: |
246 | | - file.write(f" {issue.label_metrics[label]} |") |
247 | | - if "Created At" in columns: |
248 | | - file.write(f" {issue.created_at} |") |
249 | | - if "Status" in columns: |
250 | | - file.write(f" {issue.status} |") |
251 | | - if "PR Comments" in columns: |
252 | | - file.write(f" {issue.pr_comment_count or 'N/A'} |") |
| 332 | + file.write(f"| {issue.title} | {issue.html_url} |") |
| 333 | + if "Assignee" in columns: |
| 334 | + if issue.assignees: |
| 335 | + assignee_links = [ |
| 336 | + f"[{assignee}](https://{endpoint}/{assignee})" |
| 337 | + for assignee in issue.assignees |
| 338 | + ] |
| 339 | + file.write(f" {', '.join(assignee_links)} |") |
| 340 | + else: |
| 341 | + file.write(" None |") |
| 342 | + if "Author" in columns: |
| 343 | + file.write( |
| 344 | + f" [{issue.author}](https://{endpoint}/{issue.author}) |" |
| 345 | + ) |
| 346 | + if "Time to first response" in columns: |
| 347 | + file.write(f" {issue.time_to_first_response} |") |
| 348 | + if "Time to close" in columns: |
| 349 | + file.write(f" {issue.time_to_close} |") |
| 350 | + if "Time to answer" in columns: |
| 351 | + file.write(f" {issue.time_to_answer} |") |
| 352 | + if "Time in draft" in columns: |
| 353 | + file.write(f" {issue.time_in_draft} |") |
| 354 | + if labels and issue.label_metrics: |
| 355 | + for label in labels: |
| 356 | + if f"Time spent in {label}" in columns: |
| 357 | + file.write(f" {issue.label_metrics[label]} |") |
| 358 | + if "Created At" in columns: |
| 359 | + file.write(f" {issue.created_at} |") |
| 360 | + if "Status" in columns: |
| 361 | + file.write(f" {issue.status} |") |
| 362 | + if "PR Comments" in columns: |
| 363 | + file.write(f" {issue.pr_comment_count or 'N/A'} |") |
| 364 | + file.write("\n") |
253 | 365 | file.write("\n") |
254 | | - file.write("\n") |
255 | 366 | file.write("_This report was generated with the \ |
256 | 367 | [Issue Metrics Action](https://github.com/github-community-projects/issue-metrics)_\n") |
257 | 368 | if search_query: |
|
0 commit comments