Skip to content
Open
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 142 additions & 0 deletions bug_triage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
"""
Automated Bug Triage Tool
This script scans markdown bug reports, classifies them by severity based on

Check failure on line 3 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (W291)

bug_triage.py:3:77: W291 Trailing whitespace help: Remove trailing whitespace
keywords, and generates a summarized triage report in Markdown format.
"""

import glob
import os
from datetime import datetime
from typing import Dict, List

Check failure on line 10 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP035)

bug_triage.py:10:1: UP035 `typing.List` is deprecated, use `list` instead

Check failure on line 10 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP035)

bug_triage.py:10:1: UP035 `typing.Dict` is deprecated, use `dict` instead
from pathlib import Path

Check failure on line 11 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (I001)

bug_triage.py:7:1: I001 Import block is un-sorted or un-formatted help: Organize imports

# Configuration: Adjust these paths based on your local environment
BASE_DIR = Path(__file__).parent
BUG_PATH = str(BASE_DIR / "production" / "qa" / "bugs" / "*.md")
OUTPUT_PATH = str(BASE_DIR / "production" / "qa")

def classify_severity(content: str) -> str:
"""
Classifies bug severity based on specific keywords found in the content.
Returns S1 (Critical) through S4 (Minor).
>>> classify_severity("The application had a fatal crash on startup.")
'S1'
>>> classify_severity("The UI is a bit slow today.")
'S3'

Check failure on line 25 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (W291)

bug_triage.py:25:9: W291 Trailing whitespace help: Remove trailing whitespace
"""
content = content.lower()

if any(k in content for k in ["crash", "data loss", "cannot start", "fatal"]):
return "S1"
if any(k in content for k in ["broken", "not working", "fail"]):
return "S2"
if any(k in content for k in ["slow", "incorrect", "glitch"]):
return "S3"
return "S4"


def classify_priority(severity: str) -> str:
"""
Maps the technical severity level to a business priority level.
>>> classify_priority("S1")
'P1'
>>> classify_priority("S4")
'P4'
"""
priority_map = {
"S1": "P1",
"S2": "P2",
"S3": "P3"
}
return priority_map.get(severity, "P4")
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As there is no test file in this pull request nor any test function or class in the file bug_triage.py, please provide doctest for the function read_bugs



def read_bugs() -> List[Dict]:

Check failure on line 54 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP006)

bug_triage.py:54:25: UP006 Use `dict` instead of `Dict` for type annotation help: Replace with `dict`

Check failure on line 54 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP006)

bug_triage.py:54:20: UP006 Use `list` instead of `List` for type annotation help: Replace with `list`
"""
Reads all markdown files in the BUG_PATH and extracts metadata.
"""
files = glob.glob(BUG_PATH)
bugs = []

# Sorting files ensures consistent BUG-ID assignment across runs
for i, file_path in enumerate(sorted(files)):
try:
with open(file_path, "r", encoding="utf-8") as f:

Check failure on line 64 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP015)

bug_triage.py:64:34: UP015 Unnecessary mode argument help: Remove mode argument
content = f.read()

severity = classify_severity(content)
priority = classify_priority(severity)

bugs.append({
"id": f"BUG-{i+1:03}",
"file": file_path,
"severity": severity,
"priority": priority,
# Extract first line as summary, capped at 80 chars
"summary": content.strip().split("\n")[0][:80]
})
except IOError as e:

Check failure on line 78 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP024)

bug_triage.py:78:16: UP024 Replace aliased errors with `OSError` help: Replace `IOError` with builtin `OSError`
print(f"⚠️ Could not read file {file_path}: {e}")

return bugs


Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As there is no test file in this pull request nor any test function or class in the file bug_triage.py, please provide doctest for the function generate_report

def generate_report(bugs: List[Dict]) -> None:

Check failure on line 84 in bug_triage.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (UP006)

bug_triage.py:84:27: UP006 Use `list` instead of `List` for type annotation help: Replace with `list`
"""
Groups bugs by priority and writes a summarized Markdown report.
"""
date = datetime.now().strftime("%Y-%m-%d")

# Ensure the output directory exists
if not os.path.exists(OUTPUT_PATH):
os.makedirs(OUTPUT_PATH)

output_file = os.path.join(OUTPUT_PATH, f"bug-triage-{date}.md")

# Filter bugs into priority buckets
p1 = [b for b in bugs if b["priority"] == "P1"]
p2 = [b for b in bugs if b["priority"] == "P2"]
p3 = [b for b in bugs if b["priority"] == "P3"]
p4 = [b for b in bugs if b["priority"] == "P4"]

report_content = [
"# Bug Triage Report",
f"**Date**: {date} ",
f"**Open bugs processed**: {len(bugs)}",
"\n---\n",
"## Triage Summary\n",
"| Priority | Count |",
"|----------|-------|",
f"| P1 | {len(p1)} |",
f"| P2 | {len(p2)} |",
f"| P3 | {len(p3)} |",
f"| P4 | {len(p4)} |",
"\n---\n",
"## P1 Bugs (Critical)"
]

for b in p1:
report_content.append(f"- {b['id']} | {b['severity']} | {b['summary']}")

report_content.append("\n## P2 Bugs (High)")
for b in p2:
report_content.append(f"- {b['id']} | {b['severity']} | {b['summary']}")

report_content.append("\n## Backlog (P3/P4)")
for b in p3 + p4:
report_content.append(f"- {b['id']} | {b['severity']} | {b['summary']}")

with open(output_file, "w", encoding="utf-8") as f:
f.write("\n".join(report_content))

print(f"✅ Report successfully generated at: {output_file}")


if __name__ == "__main__":
extracted_bugs = read_bugs()

if not extracted_bugs:
print(f"❌ No bug files found in: {BUG_PATH}")
print("Tip: Ensure the directory exists and contains .md files.")
else:
generate_report(extracted_bugs)
Loading