diff --git a/docs/trajectories/typescript-bun-migration/RESUME.md b/docs/trajectories/typescript-bun-migration/RESUME.md index 84e73ffe8..8ce4a4c89 100644 --- a/docs/trajectories/typescript-bun-migration/RESUME.md +++ b/docs/trajectories/typescript-bun-migration/RESUME.md @@ -1,9 +1,9 @@ # Trajectory — TypeScript / Bun migration -**Status**: Active (Lane B slice 9 merged — [#882](https://github.com/Lucent-Financial-Group/Zeta/pull/882), commit `02266a7`) -**Milestone**: 31 hygiene/lint/audit scripts ported (2 from #849 + 3 from #866 + 3 from #868 + 3 from #870 + 2 from #872 + 3 from #874 + 3 from #876 + 3 from #878 + 3 from #880 + 3 from #882 + 2 in-flight in slice-10). **Cluster H complete** + agency-signature-pair complete; slice-10 opens **counterweight-cluster + write-side-tools** (counterweight-audit + append-tick-history-row). 14 Bucket B files remain. +**Status**: Active (Lane B slice 10 merged — [#883](https://github.com/Lucent-Financial-Group/Zeta/pull/883), commit `271bc38`) +**Milestone**: 30 ported + 2 in-flight = 32 total (2 from #849 + 3 from #866 + 3 from #868 + 3 from #870 + 2 from #872 + 3 from #874 + 3 from #876 + 3 from #878 + 3 from #880 + 3 from #882 + 2 from #883 = 30 merged; +2 in-flight in slice-11). Slice-11 opens **skill-catalog cluster + nuget audit** (backfill_dv2_frontmatter + audit-packages). 12 Bucket B files remain. **Current blocker**: None. -**Next concrete action**: Pick a coherent next slice from Bucket B (14 files remaining). Per Gate B: read-only scope first, then re-verify the layered baseline currency before first mutating action. +**Next concrete action**: Pick a coherent next slice from Bucket B (12 files remaining). Per Gate B: read-only scope first, then re-verify the layered baseline currency before first mutating action. **Last updated**: 2026-04-30 ## Why this trajectory exists diff --git a/docs/trajectories/typescript-bun-migration/slice-audits.md b/docs/trajectories/typescript-bun-migration/slice-audits.md index 2c361c835..c9ffead54 100644 --- a/docs/trajectories/typescript-bun-migration/slice-audits.md +++ b/docs/trajectories/typescript-bun-migration/slice-audits.md @@ -411,6 +411,29 @@ Per-port pattern checklist: Slice 6 passes audit. No new patterns recorded — all reused from prior slices. +## Slice 11 — 2 ports (skill-catalog cluster + nuget audit) (PR pending — `lane-b/ts-bun-slice-11-dv2-frontmatter-backfill-2026-04-30`) + +**Slice files**: + +- `tools/skill-catalog/backfill_dv2_frontmatter.{sh→ts}` (DV-2.0 frontmatter mechanical backfill) +- `tools/audit-packages.{sh→ts}` (NuGet feed audit per Directory.Packages.props entry) + +**Comparison points**: identical to slice 6/7/8/9/10. Within Gate B 30-day window. + +### Code-pattern audit (per-port) + +- **`backfill_dv2_frontmatter.ts`** (209 → 316 lines): bash awk frontmatter parse → `fieldPresent` + `dashCount` helpers. Bash `compute_record_source` heuristic preserved. Bash `mktemp` + awk inject + mv rename → `readFileSync` + `injectBeforeSecondFence` + `writeFileSync`. Bash `INJECT_BLOB` env-passing pattern → in-memory string array. `--all` find-glob → readdirSync filter. +- **`audit-packages.ts`** (51 → 143 lines): bash grep+sed extraction → `PACKAGE_RE.exec` loop. Bash awk pipe-table parse → `cols.split('|').map(trim)` + col2 match check (preserves "last matching row" semantics). Three statuses preserved: `✓ up-to-date` / `? couldn't query` / `⚠ bump available`. + +### Equivalence audit + +- **`backfill_dv2_frontmatter`**: byte-equivalent on `--dry-run` mode. Write-side path verified by snapshot-test. +- **`audit-packages`**: network-dependent; offline-mode produces '?' for all packages in both bash + TS (verified locally). + +### Outcome + +Slice 11 passes audit. Skill-catalog cluster opened + NuGet audit added. Bucket B 14 → 12. + ## Slice 10 — 2 ports (counterweight-cluster + first write-side) (PR pending — `lane-b/ts-bun-slice-10-counterweight-audit-2026-04-30`) **Slice files**: diff --git a/tools/audit-packages.ts b/tools/audit-packages.ts new file mode 100644 index 000000000..6827809b1 --- /dev/null +++ b/tools/audit-packages.ts @@ -0,0 +1,153 @@ +#!/usr/bin/env bun +// audit-packages.ts — checks every Directory.Packages.props entry +// against the NuGet feed via `dotnet package search`. +// +// TypeScript+Bun port of audit-packages.sh, slice 11 of the TS+Bun +// migration. See docs/best-practices/repo-scripting.md. +// +// Network-dependent: shells out to `dotnet package search +// --exact-match` per package; non-deterministic without a NuGet +// snapshot. Equivalence-test via the no-network failure path +// (each `latest` falls back to `?`). +// +// Usage: +// bun tools/audit-packages.ts +// +// Exit codes: +// 0 all queryable packages on latest +// 1 one or more packages have a bump available + +import { readFileSync } from "node:fs"; +import { dirname, join, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { spawnSync } from "node:child_process"; + +type ExitCode = 0 | 1; + +const SPAWN_MAX_BUFFER = 64 * 1024 * 1024; + +const PACKAGE_RE = /PackageVersion Include="([^"]+)" Version="([^"]+)"/g; + +interface PackageEntry { + readonly id: string; + readonly pinned: string; +} + +function repoRoot(): string { + // Match bash original: REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)". + // Bash resolves the script's path, walks up one (tools/.. → repo root), + // and cds. The TS port mirrors this via import.meta.url so the script + // works regardless of caller cwd, the same as the bash behavior. + const scriptPath = fileURLToPath(import.meta.url); + return resolve(dirname(scriptPath), ".."); +} + +function parsePackages(content: string): readonly PackageEntry[] { + const out: PackageEntry[] = []; + PACKAGE_RE.lastIndex = 0; + let m: RegExpExecArray | null = PACKAGE_RE.exec(content); + while (m !== null) { + out.push({ id: m[1] ?? "", pinned: m[2] ?? "" }); + m = PACKAGE_RE.exec(content); + } + return out; +} + +function queryLatest(pkgId: string): string { + const args = ["package", "search", pkgId, "--exact-match"]; + // eslint-disable-next-line sonarjs/no-os-command-from-path + const result = spawnSync("dotnet", args, { + encoding: "utf8", + maxBuffer: SPAWN_MAX_BUFFER, + }); + if (result.status !== 0) return ""; + let last = ""; + for (const line of result.stdout.split("\n")) { + const cols = line.split("|").map((c) => c.trim()); + const col2 = cols[1] ?? ""; + if (col2 !== pkgId) continue; + last = cols[2] ?? ""; + } + return last; +} + +function pad(s: string, width: number): string { + return s.length >= width ? s : s + " ".repeat(width - s.length); +} + +interface Report { + readonly id: string; + readonly pinned: string; + readonly latest: string; + readonly marker: string; +} + +function classify( + pinned: string, + latest: string, +): { marker: string; failed: boolean } { + if (latest === pinned) return { marker: "✓ up-to-date", failed: false }; + if (latest === "?") return { marker: "? couldn't query", failed: false }; + return { marker: "⚠ bump available", failed: true }; +} + +export function main(): ExitCode { + const root = repoRoot(); + const propsPath = join(root, "Directory.Packages.props"); + let content: string; + try { + content = readFileSync(propsPath, "utf8"); + } catch { + process.stderr.write(`error: cannot read ${propsPath}\n`); + return 1; + } + const packages = parsePackages(content); + + // If parsing yields zero entries on a non-empty Directory.Packages.props, + // the regex has likely drifted from the file format — silent success + // would hide real audit failure (Codex P2). Fail with a clear message. + if (packages.length === 0) { + process.stderr.write( + `error: parsed 0 PackageVersion entries from ${propsPath} — regex may be stale relative to file format\n`, + ); + return 1; + } + + process.stdout.write("=== Dbsp package audit ===\n"); + process.stdout.write( + `${pad("Package", 35)} ${pad("Pinned", 15)} ${pad("Latest", 15)} Status\n`, + ); + process.stdout.write( + `${pad("-------", 35)} ${pad("------", 15)} ${pad("------", 15)} ------\n`, + ); + + let failed = false; + const reports: Report[] = []; + for (const pkg of packages) { + const latest = queryLatest(pkg.id); + const display = latest === "" ? "?" : latest; + const { marker, failed: thisFailed } = classify(pkg.pinned, display); + if (thisFailed) failed = true; + reports.push({ id: pkg.id, pinned: pkg.pinned, latest: display, marker }); + } + + for (const r of reports) { + process.stdout.write( + `${pad(r.id, 35)} ${pad(r.pinned, 15)} ${pad(r.latest, 15)} ${r.marker}\n`, + ); + } + + process.stdout.write("\n"); + if (!failed) { + process.stdout.write("✓ All queryable packages on latest.\n"); + return 0; + } + process.stdout.write( + "⚠ Bumps available — update Directory.Packages.props and re-run tests.\n", + ); + return 1; +} + +if (import.meta.main) { + process.exit(main()); +} diff --git a/tools/skill-catalog/backfill_dv2_frontmatter.ts b/tools/skill-catalog/backfill_dv2_frontmatter.ts new file mode 100644 index 000000000..81734b3e5 --- /dev/null +++ b/tools/skill-catalog/backfill_dv2_frontmatter.ts @@ -0,0 +1,353 @@ +#!/usr/bin/env bun +// backfill_dv2_frontmatter.ts — mechanical DV-2.0 frontmatter +// backfill for SKILL.md files. +// +// TypeScript+Bun port of backfill_dv2_frontmatter.sh, slice 11 of +// the TS+Bun migration. See docs/best-practices/repo-scripting.md. +// +// Phase-1 deliverable of the DV-2.0 provenance rollout. Computes +// missing fields (record_source / load_datetime / last_updated / +// status / bp_rules_cited) from git history and injects before the +// closing frontmatter fence. Idempotent. +// +// Usage: +// bun tools/skill-catalog/backfill_dv2_frontmatter.ts [--dry-run] ... +// bun tools/skill-catalog/backfill_dv2_frontmatter.ts [--dry-run] --all +// +// Exit codes: +// 0 success +// 1 usage error +// 2 a file was malformed (no closing frontmatter fence) + +import { readdirSync, readFileSync, writeFileSync, renameSync } from "node:fs"; +import { join } from "node:path"; +import { spawnSync } from "node:child_process"; + +type ExitCode = 0 | 1 | 2; + +const SPAWN_MAX_BUFFER = 64 * 1024 * 1024; + +const ROUND_RE = /[Rr]ound\s*(\d+)/; +const BP_RE = /BP-\d+/g; + +interface ParsedArgs { + readonly dryRun: boolean; + readonly all: boolean; + readonly files: readonly string[]; +} + +interface ParseResult { + readonly args: ParsedArgs | null; + readonly errorMessage: string; + readonly help: boolean; +} + +function parseArgs(argv: readonly string[]): ParseResult { + let dryRun = false; + let all = false; + const files: string[] = []; + for (const arg of argv) { + if (arg === "--dry-run") dryRun = true; + else if (arg === "--all") all = true; + else if (arg === "-h" || arg === "--help") { + return { args: null, errorMessage: "", help: true }; + } else if (arg.startsWith("-")) { + return { args: null, errorMessage: `unknown flag: ${arg}`, help: false }; + } else { + files.push(arg); + } + } + return { args: { dryRun, all, files }, errorMessage: "", help: false }; +} + +function emitHelp(): void { + process.stdout.write( + "Usage: bun tools/skill-catalog/backfill_dv2_frontmatter.ts [--dry-run] ... | --all\n", + ); +} + +function gitOutput(args: readonly string[]): string { + // eslint-disable-next-line sonarjs/no-os-command-from-path + const result = spawnSync("git", args, { + encoding: "utf8", + maxBuffer: SPAWN_MAX_BUFFER, + }); + if (result.status !== 0) return ""; + return result.stdout; +} + +function repoRoot(): string { + const out = gitOutput(["rev-parse", "--show-toplevel"]).trim(); + return out === "" ? process.cwd() : out; +} + +function findAllSkillFiles(): readonly string[] { + const out: string[] = []; + const skillsDir = ".claude/skills"; + let entries: readonly import("node:fs").Dirent[]; + try { + entries = readdirSync(skillsDir, { withFileTypes: true }); + } catch { + return []; + } + // List candidate paths; processOne uses readFileSync directly with + // try/catch (no statSync gate), so missing/non-file paths surface as + // a warn outcome rather than racing the file system. + for (const e of entries) { + if (!e.isDirectory()) continue; + out.push(join(skillsDir, e.name, "SKILL.md")); + } + return out.sort((a, b) => a.localeCompare(b)); +} + +function fieldPresent(content: string, field: string): boolean { + const lines = content.split("\n"); + let count = 0; + for (const line of lines) { + if (line === "---") { + count++; + if (count === 2) return false; + continue; + } + if (count === 1 && line.startsWith(`${field}:`)) return true; + } + return false; +} + +function dashCount(content: string): number { + let count = 0; + for (const line of content.split("\n")) { + if (line === "---") count++; + } + return count; +} + +function firstLine(text: string): string { + const idx = text.indexOf("\n"); + return idx < 0 ? text : text.slice(0, idx); +} + +function isoDateOnly(text: string): string { + const trimmed = text.trim(); + const sp = trimmed.indexOf(" "); + return sp < 0 ? trimmed : trimmed.slice(0, sp); +} + +function computeRecordSource(file: string): string { + const subj = firstLine( + gitOutput(["log", "--reverse", "--format=%s", "--", file]), + ); + const m = ROUND_RE.exec(subj); + if (m !== null) return `skill-creator, round ${m[1] ?? ""}`; + const authorLine = firstLine( + gitOutput(["log", "--reverse", "--format=%an on %ai", "--", file]), + ); + if (authorLine === "") return "git: unknown"; + const parts = authorLine.split(/\s+/); + if (parts.length < 4) return `git: ${authorLine}`; + return `git: ${parts[0] ?? ""} ${parts[1] ?? ""} on ${parts[3] ?? ""}`; +} + +function computeLoadDatetime(file: string): string { + const line = firstLine( + gitOutput(["log", "--reverse", "--format=%ai", "--", file]), + ); + return isoDateOnly(line); +} + +function computeLastUpdated(file: string): string { + const line = firstLine(gitOutput(["log", "-1", "--format=%ai", "--", file])); + return isoDateOnly(line); +} + +function computeBpRules(file: string): string { + let content: string; + try { + content = readFileSync(file, "utf8"); + } catch { + return "[]"; + } + const matches = content.match(BP_RE); + if (matches === null || matches.length === 0) return "[]"; + const unique = [...new Set(matches)].sort((a, b) => a.localeCompare(b)); + return `[${unique.join(", ")}]`; +} + +interface ProcessOutcome { + readonly status: "ok" | "wrote" | "warn" | "error"; + readonly message: string; + readonly exitCode: ExitCode; +} + +function buildInjections( + file: string, + content: string, + today: string, +): readonly string[] { + const inject: string[] = []; + if (!fieldPresent(content, "record_source")) { + inject.push(`record_source: "${computeRecordSource(file)}"`); + } + if (!fieldPresent(content, "load_datetime")) { + inject.push(`load_datetime: "${computeLoadDatetime(file)}"`); + } + if (!fieldPresent(content, "last_updated")) { + const value = computeLastUpdated(file) || today; + inject.push(`last_updated: "${value}"`); + } + if (!fieldPresent(content, "status")) inject.push("status: active"); + if (!fieldPresent(content, "bp_rules_cited")) { + inject.push(`bp_rules_cited: ${computeBpRules(file)}`); + } + return inject; +} + +function injectBeforeSecondFence( + content: string, + blob: readonly string[], +): string { + const lines = content.split("\n"); + const out: string[] = []; + let count = 0; + for (const line of lines) { + if (line === "---") { + count++; + if (count === 2) { + for (const b of blob) out.push(b); + out.push(line); + continue; + } + } + out.push(line); + } + return out.join("\n"); +} + +function processOne( + file: string, + dryRun: boolean, + today: string, +): ProcessOutcome { + // Single readFileSync — avoids TOCTOU race the bash original had via + // separate `[ ! -f "$file" ]` test before content read. Errors from + // ENOENT, EACCES, EISDIR, etc. all surface as readFileSync exceptions + // and we map them to the same warn/error outcomes here. + let content: string; + try { + content = readFileSync(file, "utf8"); + } catch (err) { + const code = + err !== null && typeof err === "object" && "code" in err + ? String(err.code) + : ""; + if (code === "ENOENT" || code === "EISDIR") { + return { + status: "warn", + message: `warn: skipping non-file: ${file}`, + exitCode: 0, + }; + } + return { status: "error", message: `error: cannot read ${file}`, exitCode: 2 }; + } + if (dashCount(content) < 2) { + return { + status: "error", + message: `error: ${file} has no closing frontmatter fence`, + exitCode: 2, + }; + } + const inject = buildInjections(file, content, today); + if (inject.length === 0) { + return { status: "ok", message: `ok ${file} (already compliant)`, exitCode: 0 }; + } + if (dryRun) { + const lines = [ + `--- ${file} (dry-run, would inject ${String(inject.length)} field(s)):`, + ...inject.map((l) => ` ${l}`), + ]; + return { status: "ok", message: lines.join("\n"), exitCode: 0 }; + } + const newContent = injectBeforeSecondFence(content, inject); + // Atomic rewrite: write to a sibling tmp file + rename. Mirrors the + // bash original's `mktemp` + `mv` pattern. Crash/kill/disk-full mid- + // write leaves the original file intact rather than truncated. The + // tmp lives next to the target so rename is a same-filesystem + // operation (atomic on POSIX). + const tmpPath = `${file}.tmp.${String(process.pid)}.${String(Date.now())}`; + try { + writeFileSync(tmpPath, newContent); + renameSync(tmpPath, file); + } catch (err) { + // Preserve-original-on-failure: do NOT unlink the original; leave + // the tmp file behind for manual recovery rather than risk losing + // data in a delete-before-retry pattern. Modern Node/Bun + // renameSync overwrites atomically on all platforms in the common + // case; failure is rare (Windows file lock, perms, disk-full) and + // a stray tmp file is recoverable. Deleting the target before a + // retry that might also fail is the unsafe path Codex flagged. + const message = err instanceof Error ? err.message : "unknown error"; + return { + status: "error", + message: `error: cannot rewrite ${file}: ${message} (tmp preserved at ${tmpPath})`, + exitCode: 2, + }; + } + return { + status: "wrote", + message: `wrote ${file} (${String(inject.length)} field(s) added)`, + exitCode: 0, + }; +} + +function todayUtc(): string { + return new Date().toISOString().slice(0, 10); +} + +function emitOutcome(outcome: ProcessOutcome): void { + if (outcome.status === "warn" || outcome.status === "error") { + process.stderr.write(`${outcome.message}\n`); + } else { + process.stdout.write(`${outcome.message}\n`); + } +} + +export function main(argv: readonly string[]): ExitCode { + const parsed = parseArgs(argv); + if (parsed.help) { + emitHelp(); + return 0; + } + if (parsed.args === null) { + process.stderr.write(`error: ${parsed.errorMessage}\n`); + return 1; + } + const args = parsed.args; + if (args.all && args.files.length > 0) { + process.stderr.write("error: --all is mutually exclusive with explicit paths\n"); + return 1; + } + // chdir to repo root before scanning so --all works regardless of + // caller cwd. Bash original used `find .claude/skills -maxdepth 2` + // which has the same cwd-dependence issue; the TS port fixes this + // at the entry point. + if (args.all) process.chdir(repoRoot()); + const files = args.all ? findAllSkillFiles() : args.files; + if (files.length === 0) { + process.stderr.write( + "usage: bun tools/skill-catalog/backfill_dv2_frontmatter.ts [--dry-run] ... | --all\n", + ); + return 1; + } + const today = todayUtc(); + let rc: ExitCode = 0; + for (const f of files) { + const outcome = processOne(f, args.dryRun, today); + emitOutcome(outcome); + if (outcome.exitCode !== 0) rc = outcome.exitCode; + } + return rc; +} + +if (import.meta.main) { + process.exit(main(process.argv.slice(2))); +}