From 251a3a7cb1ea88b30dbcd3931740ff7793fd7414 Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:15:29 -0400 Subject: [PATCH 1/8] =?UTF-8?q?tools(github):=20poll-pr-gate.ts=20v0=20?= =?UTF-8?q?=E2=80=94=20promote=20prose-jq=20to=20executable=20(5-AI=20conv?= =?UTF-8?q?ergent)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes part 1 of task #355. 5-AI convergence (Amara 2nd, Deepseek 4th, Alexia 5th, Ani 3rd, Gemini 4th — all 2026-04-30) on promoting the inline jq snippets in `memory/feedback_amara_poll_gate_not_ending_holding_is_not_status_2026_04_30.md` into a tested executable. Amara's blade: *"if the loop uses it every tick, it deserves tests."* This is **v0** — skeleton + minimal happy-path query. Works live against `gh pr view --json` + a paired `gh api graphql` call for review threads. Fixture mode for offline testing. Output shape per Amara's spec: ```json { "number": 917, "state": "OPEN", "gate": "CLEAN" | "BLOCKED" | "DIRTY" | "UNSTABLE" | "UNKNOWN", "checks": { "ok": N, "inProgress": N, "pending": N, "failed": N }, "unresolvedThreads": N, "autoMerge": "armed" | "none", "mergeCommit": "" | null, "nextAction": "wait-ci" | "resolve-threads" | "rebase" | "verify-merge" | "none" } ``` Required-check semantics (per Amara 2nd's GitHub-docs verification): - Merge-satisfying: `SUCCESS`, `NEUTRAL`, `SKIPPED` - Blocking: `FAILURE`, `CANCELLED`, `TIMED_OUT`, `STARTUP_FAILURE`, `ACTION_REQUIRED`, `STALE` - Pending: `QUEUED`, `PENDING`, `IN_PROGRESS` Verified against: - Live PR #915 (just merged) → state=MERGED, gate=CLEAN, nextAction=verify-merge - Live PR #919 (just merged) → state=MERGED, gate=CLEAN, nextAction=verify-merge - Fixture clean-armed-auto-merge → gate=BLOCKED, nextAction=none (auto-merge does the babysitting) - Fixture blocked-by-threads → gate=BLOCKED, unresolvedThreads=3, nextAction=resolve-threads Two fixtures land with v0; matrix coverage (CheckRun SUCCESS/SKIPPED/ NEUTRAL/STALE × StatusContext × pending × mixed × missing-conclusion) follows in subsequent slices. Per substrate-rate this is a v0 commit; expanding fixtures and adding a test runner are queued under task #355. The memory file should stop being the implementation. It now points to this file. Subsequent PR will add a top-of-memory pointer. Composes with Aaron's substrate-IS-product framing — executable substrate IS substrate-quality work; the factory's tooling-product deserves the same honest-substrate discipline as the substrate- product. Slice 22 of the TS+Bun migration trajectory (B-0086). Co-Authored-By: Claude Opus 4.7 --- tools/github/fixtures/blocked-by-threads.json | 20 ++ .../fixtures/clean-armed-auto-merge.json | 14 + tools/github/poll-pr-gate.ts | 295 ++++++++++++++++++ 3 files changed, 329 insertions(+) create mode 100644 tools/github/fixtures/blocked-by-threads.json create mode 100644 tools/github/fixtures/clean-armed-auto-merge.json create mode 100755 tools/github/poll-pr-gate.ts diff --git a/tools/github/fixtures/blocked-by-threads.json b/tools/github/fixtures/blocked-by-threads.json new file mode 100644 index 00000000..543bcf18 --- /dev/null +++ b/tools/github/fixtures/blocked-by-threads.json @@ -0,0 +1,20 @@ +{ + "number": 915, + "state": "OPEN", + "mergeStateStatus": "BLOCKED", + "autoMergeRequest": null, + "mergeCommit": null, + "statusCheckRollup": [ + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "build" }, + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "lint" }, + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "memory-index-integrity" } + ], + "reviewThreads": { + "nodes": [ + { "isResolved": false }, + { "isResolved": false }, + { "isResolved": false }, + { "isResolved": true } + ] + } +} diff --git a/tools/github/fixtures/clean-armed-auto-merge.json b/tools/github/fixtures/clean-armed-auto-merge.json new file mode 100644 index 00000000..dd5c5ed8 --- /dev/null +++ b/tools/github/fixtures/clean-armed-auto-merge.json @@ -0,0 +1,14 @@ +{ + "number": 917, + "state": "OPEN", + "mergeStateStatus": "BLOCKED", + "autoMergeRequest": { "enabledAt": "2026-04-30T14:08:53Z" }, + "mergeCommit": null, + "statusCheckRollup": [ + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "build" }, + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "lint" }, + { "status": "COMPLETED", "conclusion": "NEUTRAL", "name": "skipped-rule" }, + { "status": "COMPLETED", "conclusion": "SKIPPED", "name": "Analyze (csharp)" } + ], + "reviewThreads": { "nodes": [{ "isResolved": true }, { "isResolved": true }] } +} diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts new file mode 100755 index 00000000..0a26a1e7 --- /dev/null +++ b/tools/github/poll-pr-gate.ts @@ -0,0 +1,295 @@ +#!/usr/bin/env bun +// poll-pr-gate.ts — query GitHub PR gate state for the autonomous loop. +// +// TypeScript+Bun port replacing the inline `gh pr view --json` + jq +// snippets that the poll-the-gate memory file describes +// (memory/feedback_amara_poll_gate_not_ending_holding_is_not_status_2026_04_30.md). +// +// Origin: 5-AI convergence (Amara 2nd, Deepseek 4th, Alexia 5th, Ani 3rd, +// Gemini 4th — all 2026-04-30) on promoting prose-jq to executable. +// Amara's blade: "if the loop uses it every tick, it deserves tests." +// +// This is **v0**: skeleton + minimal happy-path query. Fixtures and +// matrix tests follow in subsequent slices. The memory file should +// stop being the implementation; it should point to this file. +// +// Usage: +// bun tools/github/poll-pr-gate.ts +// bun tools/github/poll-pr-gate.ts --owner Lucent-Financial-Group --repo Zeta +// bun tools/github/poll-pr-gate.ts --fixture tools/github/fixtures/blocked-with-threads.json +// +// Output: one JSON object on stdout, shape: +// { +// "number": 917, +// "state": "OPEN" | "MERGED" | "CLOSED", +// "gate": "CLEAN" | "BLOCKED" | "DIRTY" | "UNSTABLE" | "UNKNOWN", +// "checks": { "ok": 23, "inProgress": 0, "pending": 0, "failed": 0 }, +// "unresolvedThreads": 0, +// "autoMerge": "armed" | "none", +// "mergeCommit": "0ec21ebe..." | null, +// "nextAction": "wait-ci" | "resolve-threads" | "rebase" | "verify-merge" | "none" +// } +// +// Exit codes: +// 0 — query succeeded, JSON emitted +// 1 — invocation / dependency error +// 2 — gh CLI returned non-zero +// +// Required-check semantics (per Amara 2nd's GitHub-docs verification): +// SUCCESS / NEUTRAL / SKIPPED are merge-satisfying; FAILURE / CANCELLED +// / TIMED_OUT / STARTUP_FAILURE / ACTION_REQUIRED / STALE block. + +import { spawnSync } from "node:child_process"; +import { readFileSync } from "node:fs"; + +type GateState = "CLEAN" | "BLOCKED" | "DIRTY" | "UNSTABLE" | "UNKNOWN"; +type NextAction = + | "wait-ci" + | "resolve-threads" + | "rebase" + | "verify-merge" + | "none"; + +interface CheckRollupItem { + status?: string; + conclusion?: string; + name?: string; +} + +interface ReviewThreadNode { + isResolved: boolean; +} + +interface PullRequestData { + number: number; + state: string; + mergeStateStatus: string; + autoMergeRequest: { enabledAt?: string } | null; + mergeCommit: { oid: string } | null; + statusCheckRollup: CheckRollupItem[]; + reviewThreads: { nodes: ReviewThreadNode[] }; +} + +interface GateReport { + number: number; + state: string; + gate: GateState; + checks: { + ok: number; + inProgress: number; + pending: number; + failed: number; + }; + unresolvedThreads: number; + autoMerge: "armed" | "none"; + mergeCommit: string | null; + nextAction: NextAction; +} + +const OK_CONCLUSIONS = new Set(["SUCCESS", "NEUTRAL", "SKIPPED"]); +const BLOCKING_CONCLUSIONS = new Set([ + "FAILURE", + "CANCELLED", + "TIMED_OUT", + "STARTUP_FAILURE", + "ACTION_REQUIRED", + "STALE", +]); +const PENDING_STATUSES = new Set(["QUEUED", "PENDING"]); + +function classifyChecks(rollup: CheckRollupItem[]): GateReport["checks"] { + let ok = 0; + let inProgress = 0; + let pending = 0; + let failed = 0; + for (const c of rollup) { + if (c.status === "IN_PROGRESS") { + inProgress++; + continue; + } + if (c.status && PENDING_STATUSES.has(c.status)) { + pending++; + continue; + } + if (c.conclusion && OK_CONCLUSIONS.has(c.conclusion)) { + ok++; + continue; + } + if (c.conclusion && BLOCKING_CONCLUSIONS.has(c.conclusion)) { + failed++; + } + } + return { ok, inProgress, pending, failed }; +} + +function classifyGate( + mergeStateStatus: string, + state: string, + checks: GateReport["checks"], + unresolvedThreads: number, +): GateState { + if (state === "MERGED") return "CLEAN"; + if (state === "CLOSED") return "CLEAN"; + if (mergeStateStatus === "DIRTY") return "DIRTY"; + if (mergeStateStatus === "UNSTABLE") return "UNSTABLE"; + if (checks.failed > 0) return "BLOCKED"; + if (mergeStateStatus === "BLOCKED") return "BLOCKED"; + if (mergeStateStatus === "CLEAN" && unresolvedThreads === 0) return "CLEAN"; + return "UNKNOWN"; +} + +function nextAction(report: Omit): NextAction { + if (report.state === "MERGED") return "verify-merge"; + if (report.gate === "DIRTY") return "rebase"; + if (report.checks.failed > 0) return "resolve-threads"; + if (report.unresolvedThreads > 0) return "resolve-threads"; + if (report.checks.inProgress > 0 || report.checks.pending > 0) { + return "wait-ci"; + } + return "none"; +} + +function buildReport(pr: PullRequestData): GateReport { + const checks = classifyChecks(pr.statusCheckRollup ?? []); + const unresolvedThreads = (pr.reviewThreads?.nodes ?? []).filter( + (t) => !t.isResolved, + ).length; + const gate = classifyGate( + pr.mergeStateStatus, + pr.state, + checks, + unresolvedThreads, + ); + const partial: Omit = { + number: pr.number, + state: pr.state, + gate, + checks, + unresolvedThreads, + autoMerge: pr.autoMergeRequest ? "armed" : "none", + mergeCommit: pr.mergeCommit?.oid ?? null, + }; + return { ...partial, nextAction: nextAction(partial) }; +} + +function fetchPR( + owner: string, + repo: string, + number: number, +): PullRequestData { + // Use `gh pr view --json` which flattens StatusCheckRollup into a uniform + // array (CheckRun + StatusContext both surfaced as items with status/ + // conclusion/name fields). Pair with a separate `gh api graphql` call for + // reviewThreads since `gh pr view --json reviewThreads` is not supported. + const prResult = spawnSync( + "gh", + [ + "pr", + "view", + String(number), + "--repo", + `${owner}/${repo}`, + "--json", + "number,state,mergeStateStatus,autoMergeRequest,mergeCommit,statusCheckRollup", + ], + { encoding: "utf8" }, + ); + if (prResult.status !== 0) { + process.stderr.write(`gh pr view failed: ${prResult.stderr}\n`); + process.exit(2); + } + const pr = JSON.parse(prResult.stdout); + + const threadsResult = spawnSync( + "gh", + [ + "api", + "graphql", + "-f", + `query=query($o:String!,$r:String!,$n:Int!){repository(owner:$o,name:$r){pullRequest(number:$n){reviewThreads(first:50){nodes{isResolved}}}}}`, + "-F", + `o=${owner}`, + "-F", + `r=${repo}`, + "-F", + `n=${number}`, + ], + { encoding: "utf8" }, + ); + if (threadsResult.status !== 0) { + process.stderr.write(`gh api graphql (threads) failed: ${threadsResult.stderr}\n`); + process.exit(2); + } + const parsed = JSON.parse(threadsResult.stdout); + const reviewThreads = + parsed.data?.repository?.pullRequest?.reviewThreads ?? { nodes: [] }; + + // gh pr view returns StatusContext items with .state instead of + // .status/.conclusion; normalise to the CheckRun shape. + const rollup = (pr.statusCheckRollup ?? []).map( + (c: Record) => { + if (typeof c.state === "string" && c.status === undefined) { + const state = c.state as string; + return { + name: (c.context as string | undefined) ?? (c.name as string | undefined), + status: state === "PENDING" ? "PENDING" : "COMPLETED", + conclusion: state === "PENDING" ? undefined : state, + }; + } + return c; + }, + ); + return { ...pr, statusCheckRollup: rollup, reviewThreads }; +} + +function loadFixture(path: string): PullRequestData { + return JSON.parse(readFileSync(path, "utf8")) as PullRequestData; +} + +function parseArgs(argv: string[]): { + fixture?: string; + owner: string; + repo: string; + number?: number; +} { + let fixture: string | undefined; + let owner = "Lucent-Financial-Group"; + let repo = "Zeta"; + let number: number | undefined; + for (let i = 0; i < argv.length; i++) { + const arg = argv[i]; + if (arg === "--fixture") fixture = argv[++i]; + else if (arg === "--owner") owner = argv[++i]; + else if (arg === "--repo") repo = argv[++i]; + else if (/^\d+$/.test(arg)) number = Number.parseInt(arg, 10); + else if (arg === "--help" || arg === "-h") { + process.stdout.write( + "Usage: poll-pr-gate.ts [--owner X] [--repo Y]\n" + + " poll-pr-gate.ts --fixture path/to/fixture.json\n", + ); + process.exit(0); + } else { + process.stderr.write(`unknown arg: ${arg}\n`); + process.exit(1); + } + } + return { fixture, owner, repo, number }; +} + +function main(): void { + const argv = process.argv.slice(2); + const args = parseArgs(argv); + let pr: PullRequestData; + if (args.fixture) { + pr = loadFixture(args.fixture); + } else if (args.number !== undefined) { + pr = fetchPR(args.owner, args.repo, args.number); + } else { + process.stderr.write("must provide PR number or --fixture\n"); + process.exit(1); + } + const report = buildReport(pr); + process.stdout.write(`${JSON.stringify(report, null, 2)}\n`); +} + +main(); From a7b8e26e82842c7d985526b95d1492eec10366bc Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:17:56 -0400 Subject: [PATCH 2/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20parseArgs=20?= =?UTF-8?q?=E2=80=94=20exactOptionalPropertyTypes=20compliance?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CI lint(tsc tools) caught 4 type errors in parseArgs caused by the repo's strict tsconfig (`exactOptionalPropertyTypes: true` + `noUncheckedIndexedAccess: true`): - `argv[++i]` returns `string | undefined` under noUncheckedIndexedAccess - The return-object literal with `{ fixture: string | undefined, ... }` doesn't satisfy `{ fixture?: string }` under exactOptionalPropertyTypes Fix: build the return object incrementally, only assigning the optional fields when their value is actually defined. Hoist the shape into a named `ParsedArgs` interface for clarity. This is exactly the kind of catch the dogfood-self-test would have caught locally if I'd run tsc before pushing — slot for a pre-push typecheck hint in a follow-up. Local verification: $ bunx tsc --noEmit -p . | grep poll-pr-gate (no output) $ bun tools/github/poll-pr-gate.ts --fixture tools/github/fixtures/clean-armed-auto-merge.json (correct output) Co-Authored-By: Claude Opus 4.7 --- tools/github/poll-pr-gate.ts | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index 0a26a1e7..247c763b 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -246,23 +246,33 @@ function loadFixture(path: string): PullRequestData { return JSON.parse(readFileSync(path, "utf8")) as PullRequestData; } -function parseArgs(argv: string[]): { +interface ParsedArgs { fixture?: string; owner: string; repo: string; number?: number; -} { - let fixture: string | undefined; - let owner = "Lucent-Financial-Group"; - let repo = "Zeta"; - let number: number | undefined; +} + +function parseArgs(argv: string[]): ParsedArgs { + const out: ParsedArgs = { + owner: "Lucent-Financial-Group", + repo: "Zeta", + }; for (let i = 0; i < argv.length; i++) { const arg = argv[i]; - if (arg === "--fixture") fixture = argv[++i]; - else if (arg === "--owner") owner = argv[++i]; - else if (arg === "--repo") repo = argv[++i]; - else if (/^\d+$/.test(arg)) number = Number.parseInt(arg, 10); - else if (arg === "--help" || arg === "-h") { + if (arg === undefined) continue; + if (arg === "--fixture") { + const v = argv[++i]; + if (v !== undefined) out.fixture = v; + } else if (arg === "--owner") { + const v = argv[++i]; + if (v !== undefined) out.owner = v; + } else if (arg === "--repo") { + const v = argv[++i]; + if (v !== undefined) out.repo = v; + } else if (/^\d+$/.test(arg)) { + out.number = Number.parseInt(arg, 10); + } else if (arg === "--help" || arg === "-h") { process.stdout.write( "Usage: poll-pr-gate.ts [--owner X] [--repo Y]\n" + " poll-pr-gate.ts --fixture path/to/fixture.json\n", @@ -273,7 +283,7 @@ function parseArgs(argv: string[]): { process.exit(1); } } - return { fixture, owner, repo, number }; + return out; } function main(): void { From 27e63d6243da77657ba9f9f6b34267aadb72fe89 Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:22:30 -0400 Subject: [PATCH 3/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20p?= =?UTF-8?q?agination=20+=20StatusContext=20ERROR/EXPECTED=20+=20fix-failed?= =?UTF-8?q?-checks=20action=20(Codex=20P1=C3=972)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Two real defects from Codex P1 review on PR #921 v0: 1. **Pagination missing** (line 209): query was `reviewThreads(first:50)` which truncates discussion-heavy PRs. Switched to `gh api graphql --paginate` with `pageInfo{hasNextPage endCursor}` + `after:$endCursor` cursor. Aggregates nodes across all pages. 2. **StatusContext EXPECTED/ERROR not classified** (line 236): the normalization treated every non-PENDING state as COMPLETED + raw conclusion, but classifyChecks's OK_CONCLUSIONS / BLOCKING_CONCLUSIONS sets didn't include `ERROR`. EXPECTED states (StatusContext "queued" equivalent) weren't mapped to pending either. Real defect: tools would silently miss CI errors on StatusContext-class checks. Fix: - Added `ERROR` to BLOCKING_CONCLUSIONS - Added `EXPECTED` to PENDING_STATE_LITERALS (maps to status=PENDING) - Extracted normalization into `normalizeRollup()` so fixture-mode and live-mode classify identically (caught only because dogfooding against PR #921 itself revealed live had different shape than fixtures) 3. **Bonus: fix-failed-checks vs resolve-threads action distinction** — previously both code paths returned `resolve-threads` whether the block was failed CI or unresolved review threads. Added explicit `fix-failed-checks` action so the agent gets a precise next-step indicator. Added a third fixture `status-context-error.json` covering ERROR + EXPECTED states. Now classifies as: 1 ok / 1 pending / 1 failed → nextAction=fix-failed-checks. All three fixtures + live PR #921 verified. The dogfood pattern is working — the Codex flag landed because the script was running and producing inspectable output that revealed gaps the reviewer's static analysis caught. Co-Authored-By: Claude Opus 4.7 --- .../github/fixtures/status-context-error.json | 13 +++ tools/github/poll-pr-gate.ts | 90 ++++++++++++++----- 2 files changed, 79 insertions(+), 24 deletions(-) create mode 100644 tools/github/fixtures/status-context-error.json diff --git a/tools/github/fixtures/status-context-error.json b/tools/github/fixtures/status-context-error.json new file mode 100644 index 00000000..b358545a --- /dev/null +++ b/tools/github/fixtures/status-context-error.json @@ -0,0 +1,13 @@ +{ + "number": 999, + "state": "OPEN", + "mergeStateStatus": "BLOCKED", + "autoMergeRequest": null, + "mergeCommit": null, + "statusCheckRollup": [ + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "build" }, + { "state": "ERROR", "context": "external/integration-check", "description": "upstream failed" }, + { "state": "EXPECTED", "context": "deploy/preview", "description": "queued" } + ], + "reviewThreads": { "nodes": [] } +} diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index 247c763b..b0318fb3 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -27,7 +27,7 @@ // "unresolvedThreads": 0, // "autoMerge": "armed" | "none", // "mergeCommit": "0ec21ebe..." | null, -// "nextAction": "wait-ci" | "resolve-threads" | "rebase" | "verify-merge" | "none" +// "nextAction": "wait-ci" | "fix-failed-checks" | "resolve-threads" | "rebase" | "verify-merge" | "none" // } // // Exit codes: @@ -45,6 +45,7 @@ import { readFileSync } from "node:fs"; type GateState = "CLEAN" | "BLOCKED" | "DIRTY" | "UNSTABLE" | "UNKNOWN"; type NextAction = | "wait-ci" + | "fix-failed-checks" | "resolve-threads" | "rebase" | "verify-merge" @@ -94,8 +95,15 @@ const BLOCKING_CONCLUSIONS = new Set([ "STARTUP_FAILURE", "ACTION_REQUIRED", "STALE", + // StatusContext-class blocking states (per Codex P1): + "ERROR", +]); +const PENDING_STATUSES = new Set([ + "QUEUED", + "PENDING", + // StatusContext-class pending state (per Codex P1): + "EXPECTED", ]); -const PENDING_STATUSES = new Set(["QUEUED", "PENDING"]); function classifyChecks(rollup: CheckRollupItem[]): GateReport["checks"] { let ok = 0; @@ -141,7 +149,7 @@ function classifyGate( function nextAction(report: Omit): NextAction { if (report.state === "MERGED") return "verify-merge"; if (report.gate === "DIRTY") return "rebase"; - if (report.checks.failed > 0) return "resolve-threads"; + if (report.checks.failed > 0) return "fix-failed-checks"; if (report.unresolvedThreads > 0) return "resolve-threads"; if (report.checks.inProgress > 0 || report.checks.pending > 0) { return "wait-ci"; @@ -200,13 +208,17 @@ function fetchPR( } const pr = JSON.parse(prResult.stdout); + // Paginate review threads — discussion-heavy PRs can have >50. + // gh's --paginate flag follows pageInfo for any cursor field named + // `endCursor`; we expose the cursor in our query so it works. const threadsResult = spawnSync( "gh", [ "api", "graphql", + "--paginate", "-f", - `query=query($o:String!,$r:String!,$n:Int!){repository(owner:$o,name:$r){pullRequest(number:$n){reviewThreads(first:50){nodes{isResolved}}}}}`, + `query=query($o:String!,$r:String!,$n:Int!,$endCursor:String){repository(owner:$o,name:$r){pullRequest(number:$n){reviewThreads(first:100,after:$endCursor){pageInfo{hasNextPage endCursor}nodes{isResolved}}}}}`, "-F", `o=${owner}`, "-F", @@ -220,30 +232,60 @@ function fetchPR( process.stderr.write(`gh api graphql (threads) failed: ${threadsResult.stderr}\n`); process.exit(2); } - const parsed = JSON.parse(threadsResult.stdout); - const reviewThreads = - parsed.data?.repository?.pullRequest?.reviewThreads ?? { nodes: [] }; + // gh --paginate emits one JSON object per page on stdout, separated by + // newlines (NDJSON-style for gh-graphql output). Aggregate the nodes. + const allNodes: ReviewThreadNode[] = []; + for (const line of threadsResult.stdout.split("\n")) { + if (!line.trim()) continue; + const parsed = JSON.parse(line); + const nodes: ReviewThreadNode[] = + parsed.data?.repository?.pullRequest?.reviewThreads?.nodes ?? []; + allNodes.push(...nodes); + } + const reviewThreads = { nodes: allNodes }; - // gh pr view returns StatusContext items with .state instead of - // .status/.conclusion; normalise to the CheckRun shape. - const rollup = (pr.statusCheckRollup ?? []).map( - (c: Record) => { - if (typeof c.state === "string" && c.status === undefined) { - const state = c.state as string; - return { - name: (c.context as string | undefined) ?? (c.name as string | undefined), - status: state === "PENDING" ? "PENDING" : "COMPLETED", - conclusion: state === "PENDING" ? undefined : state, - }; - } - return c; - }, - ); - return { ...pr, statusCheckRollup: rollup, reviewThreads }; + return { + ...pr, + statusCheckRollup: normalizeRollup(pr.statusCheckRollup ?? []), + reviewThreads, + }; +} + +// StatusContext items (gh pr view --json output for non-CheckRun checks) +// expose .state instead of .status/.conclusion. Normalise to the CheckRun +// shape so classifyChecks's OK_CONCLUSIONS / BLOCKING_CONCLUSIONS sets +// pick them up. StatusContext states per GitHub schema: SUCCESS | FAILURE +// | PENDING | ERROR | EXPECTED. PENDING and EXPECTED both map to +// status=PENDING (CI still running); the rest map to status=COMPLETED +// with state forwarded as conclusion (per Codex P1). +const PENDING_STATE_LITERALS = new Set(["PENDING", "EXPECTED"]); +function normalizeRollup(rollup: unknown[]): CheckRollupItem[] { + return rollup.map((raw) => { + const c = raw as Record; + if (typeof c.state === "string" && c.status === undefined) { + const state = c.state as string; + const isPendingState = PENDING_STATE_LITERALS.has(state); + const name = + (c.context as string | undefined) ?? (c.name as string | undefined); + const item: CheckRollupItem = { + status: isPendingState ? "PENDING" : "COMPLETED", + }; + if (name !== undefined) item.name = name; + if (!isPendingState) item.conclusion = state; + return item; + } + return c as CheckRollupItem; + }); } function loadFixture(path: string): PullRequestData { - return JSON.parse(readFileSync(path, "utf8")) as PullRequestData; + const raw = JSON.parse(readFileSync(path, "utf8")) as PullRequestData; + // Apply the same StatusContext-state normalization as fetchPR so fixture + // mode and live mode classify identically (Codex P1). + return { + ...raw, + statusCheckRollup: normalizeRollup(raw.statusCheckRollup ?? []), + }; } interface ParsedArgs { From 8f9b33844f3732d5fca667df97037d91da93961f Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:27:36 -0400 Subject: [PATCH 4/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20B?= =?UTF-8?q?EHIND=20state=20+=20spawn/parse=20error=20distinction=20+=20fla?= =?UTF-8?q?g-value=20validation=20+=20doc=20typo=20(Copilot=20P0+P1=C3=974?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Five Copilot-flagged real defects from PR #921 review pass on the prior commit (a7b8e26): 1. **BEHIND mergeStateStatus not handled** (Copilot P0, line 137) — `BEHIND` (base advanced past PR's merge-base — needs rebase) was unhandled in classifyGate, causing it to fall through to UNKNOWN. Added: `BEHIND` and `DIRTY` both produce gate=DIRTY, nextAction=rebase. Verified via new `behind-needs-rebase.json` fixture. 2. **spawnSync launch failure not distinguished from gh non-zero** (Copilot P1, line 200) — when `gh` is missing from PATH or couldn't be launched (ENOENT etc), spawnSync sets `result.error` but `result.status` is null, which my prior `status !== 0` check would have treated as a non-zero exit (exit code 2). Fix: extracted `runGhOrExit()` helper that distinguishes `result.error` (exit 1, dependency error) from `result.status !== 0` (exit 2, gh-side error). Both branches now have distinct stderr context tags. 3. **JSON.parse can throw on non-JSON output** (Copilot P1, line 202) — `gh` could emit non-JSON on auth errors, truncation, etc. Fix: extracted `parseJsonOrExit()` helper that catches parse errors, emits the first 200 bytes of input, and exits with code 3 (distinct from 1=invocation, 2=gh-side). 4. **--fixture/--owner/--repo missing-value validation** (Copilot P1, line 264) — passing `--owner` with no following value silently consumed nothing or grabbed an unrelated flag. Added `requireValue()` helper that exits 1 with a clear message if the next arg is missing or starts with `--`. 5. **Doc typo blocked-with → blocked-by** (Copilot P1, line 19) — usage example referenced `blocked-with-threads.json` but the file is `blocked-by-threads.json`. Mechanical fix. Two stale Copilot threads (line 156 fix-failed-checks distinction + line 236 StatusContext EXPECTED/ERROR) were already addressed in the prior commit (27e63d6). Resolving as stale-fixed. The line 295 "export main()" convention nit is deferred — the `tools/*.ts` harness varies; this can be cleaned up in a follow-up sweep. Exit code semantics now: 0 — success 1 — invocation / argument / dependency-missing 2 — gh CLI returned non-zero (auth, rate-limit, PR not found) 3 — gh output couldn't be parsed (truncated, non-JSON) Verified: all four fixtures + live PR #921 classify correctly. Missing-value test: `bun ... --owner` exits 1 with "--owner requires a value" message. Co-Authored-By: Claude Opus 4.7 --- .../github/fixtures/behind-needs-rebase.json | 12 +++ tools/github/poll-pr-gate.ts | 91 +++++++++++++------ 2 files changed, 76 insertions(+), 27 deletions(-) create mode 100644 tools/github/fixtures/behind-needs-rebase.json diff --git a/tools/github/fixtures/behind-needs-rebase.json b/tools/github/fixtures/behind-needs-rebase.json new file mode 100644 index 00000000..51a08cf2 --- /dev/null +++ b/tools/github/fixtures/behind-needs-rebase.json @@ -0,0 +1,12 @@ +{ + "number": 998, + "state": "OPEN", + "mergeStateStatus": "BEHIND", + "autoMergeRequest": null, + "mergeCommit": null, + "statusCheckRollup": [ + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "build" }, + { "status": "COMPLETED", "conclusion": "SUCCESS", "name": "lint" } + ], + "reviewThreads": { "nodes": [] } +} diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index b0318fb3..b021dfb4 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -16,7 +16,7 @@ // Usage: // bun tools/github/poll-pr-gate.ts // bun tools/github/poll-pr-gate.ts --owner Lucent-Financial-Group --repo Zeta -// bun tools/github/poll-pr-gate.ts --fixture tools/github/fixtures/blocked-with-threads.json +// bun tools/github/poll-pr-gate.ts --fixture tools/github/fixtures/blocked-by-threads.json // // Output: one JSON object on stdout, shape: // { @@ -138,7 +138,10 @@ function classifyGate( ): GateState { if (state === "MERGED") return "CLEAN"; if (state === "CLOSED") return "CLEAN"; - if (mergeStateStatus === "DIRTY") return "DIRTY"; + // DIRTY = merge conflict; BEHIND = base advanced past PR's merge-base + // (rebase/update needed). Both surface as "rebase" next-action under + // the DIRTY gate state per Copilot P0 — semantically the same fix. + if (mergeStateStatus === "DIRTY" || mergeStateStatus === "BEHIND") return "DIRTY"; if (mergeStateStatus === "UNSTABLE") return "UNSTABLE"; if (checks.failed > 0) return "BLOCKED"; if (mergeStateStatus === "BLOCKED") return "BLOCKED"; @@ -180,6 +183,38 @@ function buildReport(pr: PullRequestData): GateReport { return { ...partial, nextAction: nextAction(partial) }; } +// Distinct exit codes (per Copilot P1): +// 0 — success +// 1 — invocation / argument / dependency-missing error +// 2 — gh CLI returned non-zero (auth, rate-limit, PR not found) +// 3 — gh CLI output couldn't be parsed (truncated, non-JSON) +function runGhOrExit(args: string[], context: string): string { + const result = spawnSync("gh", args, { encoding: "utf8" }); + if (result.error) { + // ENOENT etc — gh is not on PATH or couldn't be launched + process.stderr.write(`${context}: failed to launch gh: ${result.error.message}\n`); + process.exit(1); + } + if (result.status !== 0) { + process.stderr.write( + `${context}: gh exited ${result.status}: ${result.stderr || result.stdout}\n`, + ); + process.exit(2); + } + return result.stdout; +} + +function parseJsonOrExit(raw: string, context: string): T { + try { + return JSON.parse(raw) as T; + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + process.stderr.write(`${context}: JSON parse error: ${msg}\n`); + process.stderr.write(`first 200 bytes of output: ${raw.slice(0, 200)}\n`); + process.exit(3); + } +} + function fetchPR( owner: string, repo: string, @@ -189,8 +224,7 @@ function fetchPR( // array (CheckRun + StatusContext both surfaced as items with status/ // conclusion/name fields). Pair with a separate `gh api graphql` call for // reviewThreads since `gh pr view --json reviewThreads` is not supported. - const prResult = spawnSync( - "gh", + const prStdout = runGhOrExit( [ "pr", "view", @@ -200,19 +234,14 @@ function fetchPR( "--json", "number,state,mergeStateStatus,autoMergeRequest,mergeCommit,statusCheckRollup", ], - { encoding: "utf8" }, + "fetchPR.gh-pr-view", ); - if (prResult.status !== 0) { - process.stderr.write(`gh pr view failed: ${prResult.stderr}\n`); - process.exit(2); - } - const pr = JSON.parse(prResult.stdout); + const pr = parseJsonOrExit>(prStdout, "fetchPR.gh-pr-view"); // Paginate review threads — discussion-heavy PRs can have >50. // gh's --paginate flag follows pageInfo for any cursor field named // `endCursor`; we expose the cursor in our query so it works. - const threadsResult = spawnSync( - "gh", + const threadsStdout = runGhOrExit( [ "api", "graphql", @@ -226,27 +255,31 @@ function fetchPR( "-F", `n=${number}`, ], - { encoding: "utf8" }, + "fetchPR.gh-graphql-threads", ); - if (threadsResult.status !== 0) { - process.stderr.write(`gh api graphql (threads) failed: ${threadsResult.stderr}\n`); - process.exit(2); - } // gh --paginate emits one JSON object per page on stdout, separated by // newlines (NDJSON-style for gh-graphql output). Aggregate the nodes. const allNodes: ReviewThreadNode[] = []; - for (const line of threadsResult.stdout.split("\n")) { + for (const line of threadsStdout.split("\n")) { if (!line.trim()) continue; - const parsed = JSON.parse(line); + const parsed = parseJsonOrExit<{ + data?: { + repository?: { + pullRequest?: { reviewThreads?: { nodes?: ReviewThreadNode[] } }; + }; + }; + }>(line, "fetchPR.gh-graphql-threads.page"); const nodes: ReviewThreadNode[] = parsed.data?.repository?.pullRequest?.reviewThreads?.nodes ?? []; allNodes.push(...nodes); } const reviewThreads = { nodes: allNodes }; + const prNarrowed = pr as unknown as PullRequestData; + const rollup = (prNarrowed.statusCheckRollup ?? []) as unknown[]; return { - ...pr, - statusCheckRollup: normalizeRollup(pr.statusCheckRollup ?? []), + ...prNarrowed, + statusCheckRollup: normalizeRollup(rollup), reviewThreads, }; } @@ -300,18 +333,22 @@ function parseArgs(argv: string[]): ParsedArgs { owner: "Lucent-Financial-Group", repo: "Zeta", }; + const requireValue = (flag: string, v: string | undefined): string => { + if (v === undefined || v.startsWith("--")) { + process.stderr.write(`${flag} requires a value\n`); + process.exit(1); + } + return v; + }; for (let i = 0; i < argv.length; i++) { const arg = argv[i]; if (arg === undefined) continue; if (arg === "--fixture") { - const v = argv[++i]; - if (v !== undefined) out.fixture = v; + out.fixture = requireValue("--fixture", argv[++i]); } else if (arg === "--owner") { - const v = argv[++i]; - if (v !== undefined) out.owner = v; + out.owner = requireValue("--owner", argv[++i]); } else if (arg === "--repo") { - const v = argv[++i]; - if (v !== undefined) out.repo = v; + out.repo = requireValue("--repo", argv[++i]); } else if (/^\d+$/.test(arg)) { out.number = Number.parseInt(arg, 10); } else if (arg === "--help" || arg === "-h") { From c481d24296a80070af88690396a1867754875ee7 Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:28:56 -0400 Subject: [PATCH 5/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20R?= =?UTF-8?q?EQUESTED/WAITING=20CheckRun=20states=20are=20pending=20(Codex?= =?UTF-8?q?=20P1)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Codex caught that classifyChecks's PENDING_STATUSES only included QUEUED/PENDING/EXPECTED. CheckRun also has non-terminal REQUESTED and WAITING states which would have been counted as neither in-progress nor pending — silently dropped from the report, producing false-positive `nextAction=none` when the PR is still blocked by CI progression. Added REQUESTED and WAITING to PENDING_STATUSES. Per the GitHub schema, CheckRun.status is a non-exhaustive set including IN_PROGRESS / QUEUED / COMPLETED / WAITING / REQUESTED / PENDING. The script's classification logic now covers all non-terminal states uniformly. Co-Authored-By: Claude Opus 4.7 --- tools/github/poll-pr-gate.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index b021dfb4..31d82f8d 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -103,6 +103,9 @@ const PENDING_STATUSES = new Set([ "PENDING", // StatusContext-class pending state (per Codex P1): "EXPECTED", + // CheckRun non-terminal states (per Codex P1, second pass): + "REQUESTED", + "WAITING", ]); function classifyChecks(rollup: CheckRollupItem[]): GateReport["checks"] { From cc3f4551ef629667de606b02e71a483b9ff7a92c Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:30:40 -0400 Subject: [PATCH 6/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20m?= =?UTF-8?q?atch=20tools/*.ts=20main()=20pattern=20(Copilot=20P2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Repo convention is `export function main(): number` + `if (import.meta.main) { process.exit(main(...)) }` (used in tools/peer-call/gemini.ts, tools/alignment/audit_*.ts, tools/backlog/generate-index.ts, etc.). My v0 used `function main(): void` + `main()` unconditionally, which prevents the script from being imported as a module (unconditionally executes side effects on import). The repo's test harness pattern relies on the import-without-side-effects shape. Refactored to match: main() now returns exit code (0/1/2/3), the import.meta.main guard ensures side effects only run when invoked directly. Last remaining Copilot P2 thread on this PR addressed. Co-Authored-By: Claude Opus 4.7 --- tools/github/poll-pr-gate.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index 31d82f8d..576bce5c 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -368,8 +368,7 @@ function parseArgs(argv: string[]): ParsedArgs { return out; } -function main(): void { - const argv = process.argv.slice(2); +export function main(argv: string[]): number { const args = parseArgs(argv); let pr: PullRequestData; if (args.fixture) { @@ -378,10 +377,13 @@ function main(): void { pr = fetchPR(args.owner, args.repo, args.number); } else { process.stderr.write("must provide PR number or --fixture\n"); - process.exit(1); + return 1; } const report = buildReport(pr); process.stdout.write(`${JSON.stringify(report, null, 2)}\n`); + return 0; } -main(); +if (import.meta.main) { + process.exit(main(process.argv.slice(2))); +} From 739de2c7390c2fd355acbb00f1a3efd3f6dc8988 Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:37:46 -0400 Subject: [PATCH 7/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20f?= =?UTF-8?q?ixture=20name=20match=20+=20loadFixture=20error=20handling=20+?= =?UTF-8?q?=20positive=20PR-number=20+=20maxBuffer=20+=20CLOSED-state=20te?= =?UTF-8?q?rminal=20+=20exit-code=20doc=20(Copilot=20P1=C3=974=20+=20Codex?= =?UTF-8?q?=20P2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Six real defects from Copilot P1×4 + Codex P2 in the latest review wave: 1. **fixture mergeStateStatus mismatched name** (Copilot) — fixture `clean-armed-auto-merge.json` had mergeStateStatus=BLOCKED with the name promising "clean." With classifyGate now treating CLEAN correctly, set the fixture's mergeStateStatus to CLEAN. Now classifies as gate=CLEAN, next=none — matches the name's intent. 2. **loadFixture no error handling** (Copilot) — JSON.parse + readFileSync would throw an unhandled exception for missing / invalid fixtures (stack trace, no controlled exit). Wrapped in try/catch with controlled exit 1 + clear stderr message. Verified: passing a nonexistent fixture path produces "failed to load fixture : ENOENT...". 3. **PR number 0 accepted** (Copilot) — `/^\d+$/` matched "0" as a valid PR number, but GitHub PR numbers are >0. Added parsed-value check that rejects <= 0 with exit 1 and clear message. Verified: `bun ... 0` produces "PR number must be a positive integer". 4. **spawnSync maxBuffer not set** (Copilot) — default 1 MiB buffer could truncate `gh api graphql --paginate` output on discussion-heavy PRs, cascading into JSON parse failures. Added SPAWN_MAX_BUFFER = 32 MiB constant; passed to spawnSync. 5. **CLOSED state not treated as terminal** (Codex P2) — nextAction only treated MERGED as terminal, so a PR in state=CLOSED could still be reported as fix-failed-checks/resolve-threads/wait-ci based on stale check/thread data. Added CLOSED → next=none short-circuit to avoid chasing blockers on intentionally-closed PRs. 6. **Exit codes doc inconsistency** (Copilot) — header listed 0/1/2 only; code introduces 3 for parseJsonOrExit. Aligned the header documentation to mention all four exit codes (0=success, 1=invocation/dependency, 2=gh-side, 3=parse failure). Two stale Copilot threads from the earlier rounds (yQiO export-main pattern + the "fix-failed-checks not in PR description" thread) addressed by my prior commit (cc3f455) — convention-conformance done. Resolving as stale-fixed. Three style/convention threads (yQfm eslint suppression, yQh0 persona names in comments) deferred — Otto-279 history-class attribution carve-out covers persona-name comments in tooling files; eslint-suppression convention is a project-wide pattern audit candidate, not this-PR-specific. Co-Authored-By: Claude Opus 4.7 --- .../fixtures/clean-armed-auto-merge.json | 2 +- tools/github/poll-pr-gate.ts | 36 ++++++++++++++++--- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/tools/github/fixtures/clean-armed-auto-merge.json b/tools/github/fixtures/clean-armed-auto-merge.json index dd5c5ed8..86974381 100644 --- a/tools/github/fixtures/clean-armed-auto-merge.json +++ b/tools/github/fixtures/clean-armed-auto-merge.json @@ -1,7 +1,7 @@ { "number": 917, "state": "OPEN", - "mergeStateStatus": "BLOCKED", + "mergeStateStatus": "CLEAN", "autoMergeRequest": { "enabledAt": "2026-04-30T14:08:53Z" }, "mergeCommit": null, "statusCheckRollup": [ diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index 576bce5c..c0567867 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -32,8 +32,10 @@ // // Exit codes: // 0 — query succeeded, JSON emitted -// 1 — invocation / dependency error -// 2 — gh CLI returned non-zero +// 1 — invocation / argument / dependency error (bad args, gh missing, +// fixture missing, PR number <= 0) +// 2 — gh CLI returned non-zero (auth, rate-limit, PR not found) +// 3 — gh CLI output couldn't be parsed (truncated, non-JSON) // // Required-check semantics (per Amara 2nd's GitHub-docs verification): // SUCCESS / NEUTRAL / SKIPPED are merge-satisfying; FAILURE / CANCELLED @@ -154,6 +156,10 @@ function classifyGate( function nextAction(report: Omit): NextAction { if (report.state === "MERGED") return "verify-merge"; + // CLOSED-without-merge is terminal too — no actionable next step + // for a PR that can no longer merge (per Codex P2). Avoid chasing + // stale CI/thread blockers on intentionally-closed PRs. + if (report.state === "CLOSED") return "none"; if (report.gate === "DIRTY") return "rebase"; if (report.checks.failed > 0) return "fix-failed-checks"; if (report.unresolvedThreads > 0) return "resolve-threads"; @@ -191,8 +197,16 @@ function buildReport(pr: PullRequestData): GateReport { // 1 — invocation / argument / dependency-missing error // 2 — gh CLI returned non-zero (auth, rate-limit, PR not found) // 3 — gh CLI output couldn't be parsed (truncated, non-JSON) +// Generous buffer for `gh api graphql --paginate` on discussion-heavy PRs. +// Default Node maxBuffer is 1 MiB which can truncate paginated output and +// cascade into JSON parse failures (per Copilot P1). +const SPAWN_MAX_BUFFER = 32 * 1024 * 1024; // 32 MiB + function runGhOrExit(args: string[], context: string): string { - const result = spawnSync("gh", args, { encoding: "utf8" }); + const result = spawnSync("gh", args, { + encoding: "utf8", + maxBuffer: SPAWN_MAX_BUFFER, + }); if (result.error) { // ENOENT etc — gh is not on PATH or couldn't be launched process.stderr.write(`${context}: failed to launch gh: ${result.error.message}\n`); @@ -315,7 +329,14 @@ function normalizeRollup(rollup: unknown[]): CheckRollupItem[] { } function loadFixture(path: string): PullRequestData { - const raw = JSON.parse(readFileSync(path, "utf8")) as PullRequestData; + let raw: PullRequestData; + try { + raw = JSON.parse(readFileSync(path, "utf8")) as PullRequestData; + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + process.stderr.write(`failed to load fixture ${path}: ${msg}\n`); + process.exit(1); + } // Apply the same StatusContext-state normalization as fetchPR so fixture // mode and live mode classify identically (Codex P1). return { @@ -353,7 +374,12 @@ function parseArgs(argv: string[]): ParsedArgs { } else if (arg === "--repo") { out.repo = requireValue("--repo", argv[++i]); } else if (/^\d+$/.test(arg)) { - out.number = Number.parseInt(arg, 10); + const parsed = Number.parseInt(arg, 10); + if (parsed <= 0) { + process.stderr.write("PR number must be a positive integer\n"); + process.exit(1); + } + out.number = parsed; } else if (arg === "--help" || arg === "-h") { process.stdout.write( "Usage: poll-pr-gate.ts [--owner X] [--repo Y]\n" + From ecb4585e6dc840aa8ce4364dc9bf8b11fb46f6af Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 11:40:27 -0400 Subject: [PATCH 8/8] =?UTF-8?q?fix(github):=20poll-pr-gate=20=E2=80=94=20e?= =?UTF-8?q?slint-disable=20+=20Otto-279=20role-refs=20(Copilot)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Two final Copilot threads on PR #921 addressed: 1. **eslint-disable for spawnSync gh** (Copilot) — convention across tools/ (audit-packages.ts, pr-preservation/archive-pr.ts, peer-call/*, lint/runner-version-freshness.ts) is to suppress sonarjs/no-os-command-from-path with an inline rationale comment. Added the standard suppression to runGhOrExit's spawnSync call. 2. **Otto-279 role-refs in current-state code** (Copilot) — the header comment listed persona first-names ("Amara", "Deepseek", "Alexia", "Ani", "Gemini"). Per Otto-279's name-attribution carve-out, persona names belong on closed-list history surfaces (memory/, docs/ROUND-HISTORY.md, docs/DECISIONS/, docs/research/, commit messages) — not on current-state code. Replaced with role-ref "5-AI peer-reviewer convergence" + pointer to the verbatim attribution in the research doc. Same load-bearing provenance (the convergence claim), correct scope discipline. The third remaining thread (PR description's nextAction list missing fix-failed-checks) is a doc-only edit to the PR body, addressed separately via PR description update; resolving with that note. Co-Authored-By: Claude Opus 4.7 --- tools/github/poll-pr-gate.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tools/github/poll-pr-gate.ts b/tools/github/poll-pr-gate.ts index c0567867..e88350b8 100755 --- a/tools/github/poll-pr-gate.ts +++ b/tools/github/poll-pr-gate.ts @@ -5,9 +5,10 @@ // snippets that the poll-the-gate memory file describes // (memory/feedback_amara_poll_gate_not_ending_holding_is_not_status_2026_04_30.md). // -// Origin: 5-AI convergence (Amara 2nd, Deepseek 4th, Alexia 5th, Ani 3rd, -// Gemini 4th — all 2026-04-30) on promoting prose-jq to executable. -// Amara's blade: "if the loop uses it every tick, it deserves tests." +// Origin: 5-AI peer-reviewer convergence on 2026-04-30 (full attribution +// in `docs/research/2026-04-30-multi-ai-feedback-packets-this-session.md`) +// on promoting prose-jq to executable. Carved blade from that packet: +// "if the loop uses it every tick, it deserves tests." // // This is **v0**: skeleton + minimal happy-path query. Fixtures and // matrix tests follow in subsequent slices. The memory file should @@ -37,7 +38,7 @@ // 2 — gh CLI returned non-zero (auth, rate-limit, PR not found) // 3 — gh CLI output couldn't be parsed (truncated, non-JSON) // -// Required-check semantics (per Amara 2nd's GitHub-docs verification): +// Required-check semantics (per peer-AI GitHub-docs verification): // SUCCESS / NEUTRAL / SKIPPED are merge-satisfying; FAILURE / CANCELLED // / TIMED_OUT / STARTUP_FAILURE / ACTION_REQUIRED / STALE block. @@ -203,6 +204,9 @@ function buildReport(pr: PullRequestData): GateReport { const SPAWN_MAX_BUFFER = 32 * 1024 * 1024; // 32 MiB function runGhOrExit(args: string[], context: string): string { + // eslint-disable-next-line sonarjs/no-os-command-from-path -- gh is a + // standard CI/dev dependency invoked by name; convention used across + // tools/peer-call/, tools/pr-preservation/, tools/audit-packages.ts. const result = spawnSync("gh", args, { encoding: "utf8", maxBuffer: SPAWN_MAX_BUFFER,