From e11ddcf7de5bbd7a66eceeae7875f5f66992f82b Mon Sep 17 00:00:00 2001 From: Aaron Stainback Date: Thu, 30 Apr 2026 01:40:58 -0400 Subject: [PATCH] =?UTF-8?q?ts(B-0086):=20port=201=20budget=20script=20(.sh?= =?UTF-8?q?=E2=86=92.ts)=20=E2=80=94=20slice=2014=20of=20TS/Bun=20migratio?= =?UTF-8?q?n?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ts(slice-14, wip 1/N): port budget/snapshot-burn (.sh→.ts) First script of slice 14. Captures point-in-time LFG cost/burn snapshot via gh API + appends one JSON line to `docs/budget-history/snapshots.jsonl`. Composes with project-runway + daily-cost-report (Aaron's #287 visibility deadline). Byte-equivalent on argument-validation paths (--note, --help). Live --dry-run verified against GitHub API: produces snapshot with all the same fields as bash original. Mechanical changes: - bash `gh api ... | jq` pipelines → ghJson helper wrapping spawnSync - bash `mapfile` workaround (while-read for macOS bash 3.2 compat) → straightforward TS for-loop - bash heredoc `jq -n` snapshot composition → typed Snapshot interface + JSON.stringify - bash JSONL append (`printf '%s\n' >> "$out"`) → appendFileSync - Per-repo aggregation extracted into aggregateTimings + summarizePulls helpers under cognitive-complexity threshold - Optional fields elided via spread+conditional for exactOptionalPropertyTypes compliance Lint-clean: bun --bun tsc --noEmit + eslint strictTypeChecked + sonarjs all pass. The new lint (tsc tools) gate from #890 will validate this in CI. * trajectory(ts-bun): slice 14 audit substrate + RESUME tracker - slice-audits.md: append slice-14 audit (1 port — budget/snapshot-burn). Also flip slice-13 from PR-pending to merged-with-PR-number. - RESUME.md: bump slice-12-merged → slice-13-merged (#892, commit e9dc894). Milestone 34 → 35 (34 ported + 1 in-flight in slice-14). Bucket B 9 → 8. Bucket D ported list grew to 34 entries. --- .../typescript-bun-migration/RESUME.md | 14 +- .../typescript-bun-migration/slice-audits.md | 22 +- tools/budget/snapshot-burn.ts | 360 ++++++++++++++++++ 3 files changed, 388 insertions(+), 8 deletions(-) create mode 100644 tools/budget/snapshot-burn.ts diff --git a/docs/trajectories/typescript-bun-migration/RESUME.md b/docs/trajectories/typescript-bun-migration/RESUME.md index b9f863d0d..77150f143 100644 --- a/docs/trajectories/typescript-bun-migration/RESUME.md +++ b/docs/trajectories/typescript-bun-migration/RESUME.md @@ -1,9 +1,9 @@ # Trajectory — TypeScript / Bun migration -**Status**: Active (Lane B slice 12 merged — [#885](https://github.com/Lucent-Financial-Group/Zeta/pull/885), commit `cfb5964`) -**Milestone**: 33 ported + 1 in-flight = 34 total (2 from #849 + 3 from #866 + 3 from #868 + 3 from #870 + 2 from #872 + 3 from #874 + 3 from #876 + 3 from #878 + 3 from #880 + 3 from #882 + 2 from #883 + 2 from #884 + 1 from #885 = 33 merged; +1 in-flight in slice-13). Slice-13 opens **git-cluster** (git/push-with-retry). 9 Bucket B files remain. +**Status**: Active (Lane B slice 13 merged — [#892](https://github.com/Lucent-Financial-Group/Zeta/pull/892), commit `e9dc894`) +**Milestone**: 34 ported + 1 in-flight = 35 total (2 from #849 + 3 from #866 + 3 from #868 + 3 from #870 + 2 from #872 + 3 from #874 + 3 from #876 + 3 from #878 + 3 from #880 + 3 from #882 + 2 from #883 + 2 from #884 + 1 from #885 + 1 from #892 = 34 merged; +1 in-flight in slice-14). Slice-14 opens **budget-cluster** (budget/snapshot-burn). 8 Bucket B files remain. **Current blocker**: None. -**Next concrete action**: Pick a coherent next slice from Bucket B (9 files remaining). Per Gate B: read-only scope first, then re-verify the layered baseline currency before first mutating action. +**Next concrete action**: Pick a coherent next slice from Bucket B (8 files remaining). Per Gate B: read-only scope first, then re-verify the layered baseline currency before first mutating action. **Last updated**: 2026-04-30 ## Why this trajectory exists @@ -64,16 +64,15 @@ tools/profile.sh Rationale: TS/Bun is itself one of the things `install.sh` installs. These scripts cannot depend on Bun. -### Bucket B — Should become TypeScript (9 files remaining) +### Bucket B — Should become TypeScript (8 files remaining) -Post-install scripts that operate on the repo (lints, audits, hygiene checks, peer-call wrappers, budget reports, git ops). Same shape as the scripts ported in #849, #866, #868, #870, #872, #874, #876, #878, #880, #882, #883, #884, #885. The originally-listed audit/lint scripts have progressively ported (1 in slice-13 in flight — git/push-with-retry); the bash originals remain in-tree as the equivalence reference and will retire once the TS ports have soaked. +Post-install scripts that operate on the repo (lints, audits, hygiene checks, peer-call wrappers, budget reports, git ops). Same shape as the scripts ported in #849, #866, #868, #870, #872, #874, #876, #878, #880, #882, #883, #884, #885, #892. The originally-listed audit/lint scripts have progressively ported (1 in slice-14 in flight — budget/snapshot-burn); the bash originals remain in-tree as the equivalence reference and will retire once the TS ports have soaked. ```text tools/budget/daily-cost-report.sh tools/budget/project-runway.sh -tools/budget/snapshot-burn.sh +tools/budget/snapshot-burn.sh # in flight (slice 14) tools/git/batch-resolve-pr-threads.sh -tools/git/push-with-retry.sh # in flight (slice 13) tools/peer-call/codex.sh tools/peer-call/gemini.sh tools/peer-call/grok.sh @@ -133,6 +132,7 @@ tools/hygiene/append-tick-history-row.sh # ported in #883 tools/skill-catalog/backfill_dv2_frontmatter.sh # ported in #884 tools/audit-packages.sh # ported in #884 tools/backlog/generate-index.sh # ported in #885 +tools/git/push-with-retry.sh # ported in #892 ``` ## Recommended next slice diff --git a/docs/trajectories/typescript-bun-migration/slice-audits.md b/docs/trajectories/typescript-bun-migration/slice-audits.md index 656995e1d..9b9e7649c 100644 --- a/docs/trajectories/typescript-bun-migration/slice-audits.md +++ b/docs/trajectories/typescript-bun-migration/slice-audits.md @@ -411,7 +411,27 @@ Per-port pattern checklist: Slice 6 passes audit. No new patterns recorded — all reused from prior slices. -## Slice 13 — 1 port (git/push-with-retry — git-cluster opens) (PR pending — `lane-b/ts-bun-slice-13-push-with-retry-2026-04-30`) +## Slice 14 — 1 port (budget/snapshot-burn — budget-cluster opens) (PR pending — `lane-b/ts-bun-slice-14-snapshot-burn-2026-04-30`) + +**Slice files**: + +- `tools/budget/snapshot-burn.{sh→ts}` (point-in-time LFG cost/burn snapshot capture; appends one JSON line to `docs/budget-history/snapshots.jsonl`) + +**Comparison points**: identical to slice 13. Within Gate B 30-day window. tsc gate active in CI per PR #890. + +### Code-pattern audit (per-port) + +- **`snapshot-burn.ts`** (174 → 360 lines): bash `gh api ... | jq` pipelines → `ghJson` helper that wraps `spawnSync("gh", ["api", path])` + `JSON.parse`; defensive `ghJsonOrEmpty` for fault-tolerant capture. Bash `mapfile` workaround (`while read; do … done < <(...)` for macOS bash 3.2 compat) → straightforward TS for-loop. Bash heredoc-driven `jq -n` snapshot composition → typed `Snapshot` interface + `JSON.stringify`. Bash JSONL append (`printf '%s\n' >> "$out"`) → `appendFileSync(out, line + "\n")`. Per-repo aggregation extracted into `aggregateTimings` + `summarizePulls` helpers under cognitive-complexity threshold. Optional fields elided via spread+conditional (`...(row.name === undefined ? {} : { name: row.name })`) for `exactOptionalPropertyTypes` compliance. + +### Equivalence audit + +- **`snapshot-burn`**: byte-equivalent on argument-validation paths (`--note` without value → exit 2 with same message; `--help` → 0). Live `--dry-run` exercised against the GitHub API: produces a JSON snapshot with all the same fields as the bash original (ts, factory_git_sha, org, note, copilot_billing, repos[].agg, repos[].pr, repos[].last_20_runs, scope_coverage). Fault-tolerant warning behavior preserved (counts API failures + emits same warning summary line). Network-dependent path verified by spot-check. + +### Outcome + +Slice 14 passes audit. **Budget-cluster opens** (first of 3 budget scripts). Bucket B 9 → 8. The bash original remains in-tree as equivalence reference + production fallback until the TS port has soaked through several daily-cost-report runs. + +## Slice 13 — 1 port (git/push-with-retry — git-cluster opens) (PR #892, merged 2026-04-30, commit `e9dc894`) **Slice files**: diff --git a/tools/budget/snapshot-burn.ts b/tools/budget/snapshot-burn.ts new file mode 100644 index 000000000..275440f5c --- /dev/null +++ b/tools/budget/snapshot-burn.ts @@ -0,0 +1,360 @@ +#!/usr/bin/env bun +// snapshot-burn.ts — capture a point-in-time LFG cost/burn snapshot +// and append it to docs/budget-history/snapshots.jsonl as a single +// JSON line. Append-only; git is the time-series storage. +// +// TypeScript+Bun port of snapshot-burn.sh, slice 14 of the TS+Bun +// migration. See docs/best-practices/repo-scripting.md. +// +// Why this exists: the human maintainer 2026-04-22 scoped the +// three-repo-split Stage 1 gate as evidence-based budget tracking. +// The live cost graphs on github.com are for humans and disappear +// the moment we stop looking; the factory needs persisted evidence +// to project mid-swap credit-exhaustion risk. See +// docs/budget-history/README.md for the methodology + projection +// approach. +// +// Usage: +// bun tools/budget/snapshot-burn.ts # append a snapshot +// bun tools/budget/snapshot-burn.ts --dry-run # print only, no append +// bun tools/budget/snapshot-burn.ts --note "TEXT" # attach a human note +// +// Scopes required (current gh token has these): read:org, repo, workflow. +// admin:org scope would unlock /settings/billing/{actions,packages, +// shared-storage} too — without admin:org we capture run timing +// instead of pure billing totals. +// +// Exit codes: +// 0 success +// 1 if any required gh/git step fails +// 2 on CLI-argument errors + +import { spawnSync } from "node:child_process"; +import { appendFileSync } from "node:fs"; +import { fileURLToPath } from "node:url"; +import { dirname, resolve } from "node:path"; + +const SPAWN_MAX_BUFFER = 64 * 1024 * 1024; + +const ORG = "Lucent-Financial-Group"; +const REPOS: readonly string[] = [`${ORG}/Zeta`]; + +interface ParsedArgs { + readonly dryRun: boolean; + readonly note: string; +} + +interface ArgError { + readonly error: string; + readonly exitCode: 2; +} + +function parseArgs(argv: readonly string[]): ParsedArgs | ArgError { + let dryRun = false; + let note = ""; + for (let i = 0; i < argv.length; i++) { + const arg = argv[i] ?? ""; + if (arg === "--dry-run") { + dryRun = true; + } else if (arg === "--note") { + const next = argv[i + 1]; + if (next === undefined) { + return { error: "error: --note requires TEXT argument", exitCode: 2 }; + } + note = next; + i++; + } else if (arg === "-h" || arg === "--help") { + emitHelp(); + return { dryRun: false, note: "__HELP__" }; + } else { + return { + error: `error: unknown argument '${arg}'`, + exitCode: 2, + }; + } + } + return { dryRun, note }; +} + +function emitHelp(): void { + process.stdout.write( + `snapshot-burn.ts — capture a point-in-time LFG cost/burn snapshot\n` + + `\n` + + `Usage:\n` + + ` bun tools/budget/snapshot-burn.ts # append a snapshot\n` + + ` bun tools/budget/snapshot-burn.ts --dry-run # print only, no append\n` + + ` bun tools/budget/snapshot-burn.ts --note "TEXT" # attach a human note\n`, + ); +} + +function commandAvailable(cmd: string): boolean { + const result = spawnSync(cmd, ["--version"], { stdio: "ignore" }); + return result.status === 0; +} + +function ghJson(path: string): unknown { + // `gh api` shell-out: gh handles auth + pagination defaults; user + // passes only the API path, no shell-interpolated args. + // eslint-disable-next-line sonarjs/no-os-command-from-path + const result = spawnSync("gh", ["api", path], { + encoding: "utf8", + maxBuffer: SPAWN_MAX_BUFFER, + }); + if (result.status !== 0) { + throw new Error( + `gh api ${path} failed: ${result.stderr.length > 0 ? result.stderr : "unknown error"}`, + ); + } + return JSON.parse(result.stdout) as unknown; +} + +function ghJsonOrEmpty(path: string, fallback: unknown): { + data: unknown; + warning: boolean; +} { + try { + return { data: ghJson(path), warning: false }; + } catch { + process.stderr.write(`warning: gh api ${path} failed; using fallback\n`); + return { data: fallback, warning: true }; + } +} + +function repoRoot(): string { + const here = dirname(fileURLToPath(import.meta.url)); + return resolve(here, "..", ".."); +} + +function gitHeadSha(): string { + // eslint-disable-next-line sonarjs/no-os-command-from-path + const result = spawnSync("git", ["-C", repoRoot(), "rev-parse", "HEAD"], { + encoding: "utf8", + maxBuffer: SPAWN_MAX_BUFFER, + }); + return result.status === 0 ? result.stdout.trim() : "unknown"; +} + +function nowIsoUtc(): string { + return new Date().toISOString().replace(/\.\d{3}Z$/, "Z"); +} + +interface RunRow { + readonly id: number; + readonly name?: string; + readonly conclusion?: string | null; + readonly run_started_at?: string; + readonly updated_at?: string; +} + +interface RunsListResponse { + readonly workflow_runs?: readonly RunRow[]; +} + +interface TimingResponse { + readonly run_duration_ms?: number; + readonly billable?: { + readonly UBUNTU?: { readonly total_ms?: number }; + readonly MACOS?: { readonly total_ms?: number }; + readonly WINDOWS?: { readonly total_ms?: number }; + }; +} + +interface TimingEntry { + readonly id: number; + readonly timing: TimingResponse; +} + +interface AggregatedTiming { + readonly total_runs: number; + readonly total_duration_ms: number; + readonly billable_ubuntu_ms: number; + readonly billable_macos_ms: number; + readonly billable_windows_ms: number; +} + +function aggregateTimings(timings: readonly TimingEntry[]): AggregatedTiming { + const sum = (xs: readonly number[]): number => + xs.reduce((a: number, b: number) => a + b, 0); + return { + total_runs: timings.length, + total_duration_ms: sum(timings.map((t) => t.timing.run_duration_ms ?? 0)), + billable_ubuntu_ms: sum( + timings.map((t) => t.timing.billable?.UBUNTU?.total_ms ?? 0), + ), + billable_macos_ms: sum( + timings.map((t) => t.timing.billable?.MACOS?.total_ms ?? 0), + ), + billable_windows_ms: sum( + timings.map((t) => t.timing.billable?.WINDOWS?.total_ms ?? 0), + ), + }; +} + +interface PullRow { + readonly merged_at?: string | null; +} + +interface PrStats { + readonly recent_merged: number; + readonly last_merged_at: string | null; +} + +function summarizePulls(pulls: readonly PullRow[]): PrStats { + const merged = pulls.filter((p) => p.merged_at !== null && p.merged_at !== undefined); + const last = merged[0]; + return { + recent_merged: merged.length, + last_merged_at: last?.merged_at ?? null, + }; +} + +interface RepoEntry { + readonly repo: string; + readonly agg: AggregatedTiming; + readonly pr: PrStats; + readonly last_20_runs: readonly RunRow[]; +} + +interface CaptureResult { + readonly entries: readonly RepoEntry[]; + readonly warnings: number; +} + +function captureRepoStats(repos: readonly string[]): CaptureResult { + let warnings = 0; + const entries: RepoEntry[] = []; + for (const r of repos) { + const runsRes = ghJsonOrEmpty(`/repos/${r}/actions/runs?per_page=20`, { + workflow_runs: [], + }); + if (runsRes.warning) warnings++; + const runs = runsRes.data as RunsListResponse; + const runRows = runs.workflow_runs ?? []; + const timings: TimingEntry[] = []; + for (const row of runRows) { + const tRes = ghJsonOrEmpty(`/repos/${r}/actions/runs/${String(row.id)}/timing`, {}); + if (tRes.warning) warnings++; + timings.push({ id: row.id, timing: tRes.data as TimingResponse }); + } + const agg = aggregateTimings(timings); + + const prRes = ghJsonOrEmpty(`/repos/${r}/pulls?state=closed&per_page=10`, []); + if (prRes.warning) warnings++; + const prRows = prRes.data as readonly PullRow[]; + const pr = summarizePulls(prRows); + + const last20: RunRow[] = runRows.map((row) => ({ + id: row.id, + ...(row.name === undefined ? {} : { name: row.name }), + ...(row.conclusion === undefined ? {} : { conclusion: row.conclusion }), + ...(row.run_started_at === undefined ? {} : { run_started_at: row.run_started_at }), + ...(row.updated_at === undefined ? {} : { updated_at: row.updated_at }), + })); + entries.push({ repo: r, agg, pr, last_20_runs: last20 }); + } + return { entries, warnings }; +} + +interface Snapshot { + readonly ts: string; + readonly factory_git_sha: string; + readonly org: string; + readonly note: string | null; + readonly copilot_billing: unknown; + readonly repos: readonly RepoEntry[]; + readonly scope_coverage: { + readonly has_read_org: boolean; + readonly has_admin_org: boolean; + readonly covered: readonly string[]; + readonly missing_requires_admin_org: readonly string[]; + }; +} + +function buildSnapshot(args: { + readonly note: string; + readonly copilot: unknown; + readonly repos: readonly RepoEntry[]; +}): Snapshot { + return { + ts: nowIsoUtc(), + factory_git_sha: gitHeadSha(), + org: ORG, + note: args.note.length > 0 ? args.note : null, + copilot_billing: args.copilot, + repos: args.repos, + scope_coverage: { + has_read_org: true, + has_admin_org: false, + covered: ["copilot-seats", "actions-runs-per-run-timing"], + missing_requires_admin_org: [ + "actions-billing", + "packages-billing", + "shared-storage-billing", + ], + }, + }; +} + +export function main(argv: readonly string[]): number { + const parsed = parseArgs(argv); + if ("error" in parsed) { + process.stderr.write(`${parsed.error}\n`); + return parsed.exitCode; + } + if (parsed.note === "__HELP__") return 0; + + for (const cmd of ["gh", "git"] as const) { + if (!commandAvailable(cmd)) { + process.stderr.write(`error: '${cmd}' required but not on PATH\n`); + return 1; + } + } + + const copilot = ghJsonOrEmpty(`/orgs/${ORG}/copilot/billing`, {}); + const captured = captureRepoStats(REPOS); + const totalWarnings = captured.warnings + (copilot.warning ? 1 : 0); + + if (totalWarnings > 0) { + process.stderr.write( + `warning: ${String(totalWarnings)} GitHub API call(s) failed; snapshot is partial — review stderr above\n`, + ); + } + + const snapshot = buildSnapshot({ + note: parsed.note, + copilot: copilot.data, + repos: captured.entries, + }); + + const line = JSON.stringify(snapshot); + if (line.length === 0 || line === "null") { + process.stderr.write( + "error: snapshot compaction produced empty/null output — refusing to append\n", + ); + return 1; + } + + if (parsed.dryRun) { + process.stdout.write(`${JSON.stringify(snapshot, null, 2)}\n`); + return 0; + } + + const out = resolve(repoRoot(), "docs", "budget-history", "snapshots.jsonl"); + appendFileSync(out, `${line}\n`); + process.stdout.write(`appended snapshot to ${out}\n`); + const summary = { + ts: snapshot.ts, + org: snapshot.org, + repos: snapshot.repos.map((r) => ({ + repo: r.repo, + last_20_total_ms: r.agg.total_duration_ms, + recent_merged: r.pr.recent_merged, + })), + }; + process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`); + return 0; +} + +if (import.meta.main) { + process.exit(main(process.argv.slice(2))); +}