Compare commits

...

3 Commits

Author SHA1 Message Date
Nikhil Sonti
1b4aaa75b6 fix(ci): show warning icon for zero-test suites instead of failure 2026-04-15 21:35:36 -07:00
Nikhil Sonti
25320458b9 fix(ci): use payload SHA for staleness check, handle missing artifacts
- Replace context.sha (merge commit SHA) with
  context.payload.pull_request.head.sha so the staleness guard
  compares the correct values and the comment actually gets posted
- Add continue-on-error to download-artifact so cancelled runs
  gracefully fall through to the "no test results" message
2026-04-15 21:05:40 -07:00
Nikhil Sonti
6f9da20fe1 fix(ci): add PR comment with test summary and block on failure
Add a `comment` job to the test workflow that parses JUnit XML artifacts
and posts a sticky PR comment showing pass/fail counts per suite, with
failed test names listed in a collapsible section and a link to the run.

Guards against fork PRs (read-only token) and stale overlapping runs
(skips comment if PR head has moved past our SHA).
2026-04-15 20:54:47 -07:00

View File

@@ -136,3 +136,124 @@ jobs:
echo "See the uploaded \`junit-${{ matrix.suite }}\` artifact for details." >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
comment:
name: PR test summary
needs: test
if: >-
always()
&& github.event_name == 'pull_request'
&& github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-latest
permissions:
pull-requests: write
actions: read
steps:
- name: Download JUnit artifacts
uses: actions/download-artifact@v4
continue-on-error: true
with:
path: junit
pattern: junit-*
- name: Build comment body
run: |
python3 <<'PY'
import glob, os, xml.etree.ElementTree as ET
run_url = f"{os.environ['GITHUB_SERVER_URL']}/{os.environ['GITHUB_REPOSITORY']}/actions/runs/{os.environ['GITHUB_RUN_ID']}"
marker = "<!-- browseros-agent-tests-summary -->"
suites = []
failed_cases = []
total_tests = total_failed = total_skipped = 0
for xml_path in sorted(glob.glob("junit/junit-*/*.xml")):
suite_name = os.path.basename(os.path.dirname(xml_path)).removeprefix("junit-")
try:
root = ET.parse(xml_path).getroot()
except ET.ParseError:
suites.append({"name": suite_name, "passed": 0, "failed": 1, "skipped": 0, "total": 1})
total_tests += 1
total_failed += 1
failed_cases.append((suite_name, "(could not parse junit XML)"))
continue
testsuites = root.findall("testsuite") if root.tag == "testsuites" else [root]
s_tests = s_fail = s_err = s_skip = 0
for ts in testsuites:
s_tests += int(ts.get("tests") or 0)
s_fail += int(ts.get("failures") or 0)
s_err += int(ts.get("errors") or 0)
s_skip += int(ts.get("skipped") or 0)
for tc in ts.iter("testcase"):
if tc.find("failure") is not None or tc.find("error") is not None:
cls = tc.get("classname") or ""
name = tc.get("name") or "(unnamed)"
label = f"{cls} > {name}" if cls else name
failed_cases.append((suite_name, label))
s_failed = s_fail + s_err
s_passed = max(s_tests - s_failed - s_skip, 0)
suites.append({"name": suite_name, "passed": s_passed, "failed": s_failed, "skipped": s_skip, "total": s_tests})
total_tests += s_tests
total_failed += s_failed
total_skipped += s_skip
total_passed = max(total_tests - total_failed - total_skipped, 0)
if total_tests == 0:
header = "## :warning: No test results were produced"
elif total_failed == 0:
header = f"## :white_check_mark: Tests passed — {total_passed}/{total_tests}"
else:
header = f"## :x: Tests failed — {total_failed}/{total_tests} failed"
lines = [marker, header, ""]
if suites:
lines.append("| Suite | Passed | Failed | Skipped |")
lines.append("|-------|--------|--------|---------|")
for s in suites:
icon = ":white_check_mark:" if s["failed"] == 0 and s["total"] > 0 else ":warning:" if s["total"] == 0 else ":x:"
lines.append(f"| {icon} `{s['name']}` | {s['passed']}/{s['total']} | {s['failed']} | {s['skipped']} |")
if failed_cases:
lines += ["", "<details open>", "<summary><b>Failed tests</b></summary>", ""]
for suite_name, label in failed_cases[:50]:
lines.append(f"- **{suite_name}** — `{label}`")
if len(failed_cases) > 50:
lines.append(f"- …and {len(failed_cases) - 50} more")
lines += ["", "</details>"]
lines += ["", f"[View workflow run]({run_url})"]
with open("comment.md", "w") as f:
f.write("\n".join(lines) + "\n")
PY
- name: Upsert sticky PR comment
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const body = fs.readFileSync('comment.md', 'utf8');
const marker = '<!-- browseros-agent-tests-summary -->';
const { owner, repo } = context.repo;
const issue_number = context.payload.pull_request.number;
const triggerSha = context.payload.pull_request.head.sha;
const { data: pr } = await github.rest.pulls.get({ owner, repo, pull_number: issue_number });
if (pr.head.sha !== triggerSha) {
core.info(`PR head has moved (${pr.head.sha} vs ${triggerSha}) — skipping stale comment.`);
return;
}
const comments = await github.paginate(github.rest.issues.listComments, {
owner, repo, issue_number, per_page: 100,
});
const existing = comments.find(c => c.body && c.body.includes(marker));
if (existing) {
await github.rest.issues.updateComment({ owner, repo, comment_id: existing.id, body });
} else {
await github.rest.issues.createComment({ owner, repo, issue_number, body });
}