Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions .github/actions/setup-api-client/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,22 @@ runs:
if [ -d "node_modules/@octokit/rest" ]; then
echo "✅ @octokit/rest already installed"
else
# Snapshot vendored package metadata before npm install.
# npm may overwrite transitive deps (e.g. minimatch) that are
# committed as vendored packages with intentional version pins.
VENDORED_SNAPSHOT=""
if [ -f "node_modules/minimatch/package.json" ]; then
VENDORED_SNAPSHOT=$(mktemp -d)
for pkg_dir in node_modules/*/; do
if [ -f "${pkg_dir}package.json" ]; then
pkg_name=$(basename "$pkg_dir")
mkdir -p "${VENDORED_SNAPSHOT}/${pkg_name}"
cp "${pkg_dir}package.json" "${VENDORED_SNAPSHOT}/${pkg_name}/package.json"
fi
done
echo "📸 Snapshotted vendored package metadata"
fi

# Install with pinned versions for consistency
# Capture stderr for debugging if the command fails
npm_output=$(mktemp)
Expand All @@ -122,6 +138,20 @@ runs:
@octokit/plugin-paginate-rest@9.1.5 \
@octokit/auth-app@6.0.3
fi

# Restore vendored package metadata that npm may have overwritten
if [ -n "${VENDORED_SNAPSHOT:-}" ] && [ -d "${VENDORED_SNAPSHOT}" ]; then
for pkg_backup in "${VENDORED_SNAPSHOT}"/*/; do
pkg_name=$(basename "$pkg_backup")
if [ -f "node_modules/${pkg_name}/package.json" ] && \
[ -f "${pkg_backup}package.json" ]; then
cp "${pkg_backup}package.json" "node_modules/${pkg_name}/package.json"
fi
done
rm -rf "${VENDORED_SNAPSHOT}"
echo "📸 Restored vendored package metadata"
fi

echo "✅ @octokit dependencies installed"
fi

Expand Down
62 changes: 62 additions & 0 deletions .github/scripts/__tests__/detect-changes.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,3 +82,65 @@ test('detectChanges fetches files via callback', async () => {
assert.equal(result.outputs.run_core, 'true');
assert.equal(result.outputs.workflow_changed, 'false');
});

test('detectChanges falls back to conservative defaults when listFiles is inaccessible', async () => {
const warnings = [];
const result = await detectChanges({
core: {
warning(message) {
warnings.push(String(message));
},
setOutput() {},
},
context: {
eventName: 'pull_request',
repo: { owner: 'octo', repo: 'demo' },
payload: { pull_request: { number: 42 } },
},
github: {
rest: {
pulls: {
listFiles: async () => ({ data: [] }),
},
},
paginate: {
iterator: () => {
const error = new Error('Resource not accessible by integration');
error.status = 403;
throw error;
},
},
},
});

assert.equal(result.outputs.doc_only, 'false');
assert.equal(result.outputs.run_core, 'true');
assert.equal(result.outputs.reason, 'rate_limited');
assert.equal(result.outputs.docker_changed, 'false');
assert.equal(result.outputs.workflow_changed, 'true');
assert.equal(warnings.length, 1);
assert.match(warnings[0], /Unable to determine changed files via API/);
});

test('detectChanges supports clients without paginate.iterator', async () => {
const result = await detectChanges({
context: {
eventName: 'pull_request',
repo: { owner: 'octo', repo: 'demo' },
payload: { pull_request: { number: 1 } },
},
github: {
rest: {
pulls: {
listFiles: async () => ({ data: [] }),
},
},
paginate: async () => [{ filename: 'docs/README.md' }],
},
});

assert.equal(result.outputs.doc_only, 'true');
assert.equal(result.outputs.run_core, 'false');
assert.equal(result.outputs.reason, 'docs_only');
assert.equal(result.outputs.workflow_changed, 'false');
});
57 changes: 48 additions & 9 deletions .github/scripts/detect-changes.js
Original file line number Diff line number Diff line change
Expand Up @@ -168,32 +168,71 @@ function isRateLimitError(error) {
return message.includes('rate limit') || message.includes('ratelimit');
}

function isNonFatalListFilesError(error) {
if (!error) {
return false;
}
if (isRateLimitError(error)) {
return true;
}
const status = error.status || error?.response?.status;
if ([401, 403, 404, 422].includes(status)) {
return true;
}
const message = String(error.message || error?.response?.data?.message || '').toLowerCase();
return (
message.includes('resource not accessible by integration') ||
message.includes('insufficient permission') ||
message.includes('requires higher permissions') ||
message.includes('not found') ||
message.includes('unprocessable') ||
message.includes('validation failed')
);
}

async function listChangedFiles({ github, context }) {
const pull = context?.payload?.pull_request;
const number = pull?.number;
if (!github || !context || !number) {
return [];
}
try {
const iterator = github.paginate.iterator(github.rest.pulls.listFiles, {
const files = [];
const params = {
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: number,
per_page: 100,
});
const files = [];
for await (const page of iterator) {
if (Array.isArray(page.data)) {
for (const item of page.data) {
};
if (typeof github?.paginate?.iterator === 'function') {
const iterator = github.paginate.iterator(github.rest.pulls.listFiles, params);
for await (const page of iterator) {
if (Array.isArray(page.data)) {
for (const item of page.data) {
if (item && typeof item.filename === 'string') {
files.push(item.filename);
}
}
}
}
return files;
}

if (typeof github?.paginate === 'function') {
const items = await github.paginate(github.rest.pulls.listFiles, params);
if (Array.isArray(items)) {
for (const item of items) {
if (item && typeof item.filename === 'string') {
files.push(item.filename);
}
}
}
return files;
}
return files;

throw new Error('GitHub paginate API is unavailable');
} catch (error) {
if (isRateLimitError(error)) {
if (isNonFatalListFilesError(error)) {
return null;
}
throw error;
Expand Down Expand Up @@ -261,7 +300,7 @@ async function detectChanges({ github, context, core, files, fetchFiles } = {})
workflow_changed: 'true',
};
const warn = core?.warning ? core.warning.bind(core) : console.warn.bind(console);
warn('Rate limit reached while determining changed files; assuming code changes (but not docker).');
warn('Unable to determine changed files via API; assuming code changes (but not docker).');
if (core) {
for (const [key, value] of Object.entries(outputs)) {
core.setOutput(key, value);
Expand Down
25 changes: 22 additions & 3 deletions .github/workflows/agents-72-codex-belt-worker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -903,9 +903,12 @@ jobs:

gh_output = os.environ.get('GITHUB_OUTPUT')
if gh_output:
task = start_info['task'] or {}
task_title = (task.get('title') or '').replace('\r', ' ').replace('\n', ' ')
with open(gh_output, 'a', encoding='utf-8') as handle:
handle.write(f"task_id={start_info['task']['id'] if start_info['task'] else ''}\n")
handle.write(f"task_status={start_info['task']['current_status'] if start_info['task'] else ''}\n")
handle.write(f"task_id={task.get('id', '')}\n")
handle.write(f"task_title={task_title}\n")
handle.write(f"task_status={task.get('current_status', '')}\n")
handle.write(f"ledger_changed={'true' if changed else 'false'}\n")
handle.write(f"ledger_created={'true' if start_info['created'] else 'false'}\n")
handle.write(f"ledger_base_aligned={'true' if base_aligned else 'false'}\n")
Expand Down Expand Up @@ -1187,12 +1190,28 @@ jobs:
const prNumber = Number('${{ steps.pr.outputs.number }}');
const branch = ('${{ steps.ctx.outputs.branch }}' || '').trim() || '(unknown branch)';
const dryRun = '${{ steps.mode.outputs.dry_run }}' === 'true';
const taskId = ('${{ steps.ledger_start.outputs.task_id }}' || '').trim();
const taskTitle = ('${{ steps.ledger_start.outputs.task_title }}' || '').trim();
const { owner, repo } = context.repo;
const marker = '<!-- codex-activation-marker -->';
const summary = dryRun
? `Codex Worker activated for branch \`${branch}\` (dry run preview).`
: `Codex Worker activated for branch \`${branch}\`.`;
const body = `${marker}\n${summary}\n\n@codex start\n\nAutomated belt worker prepared this PR. Please continue implementing the requested changes.`;
// Direct Codex to focus on the single next ledger task for higher
// first-commit success probability. Full issue context is in the
// PR body; this comment narrows the immediate scope.
let taskDirective = '';
if (taskId && taskTitle) {
taskDirective = [
'',
`**Focus on this task first:** \`${taskId}\` — ${taskTitle}`,
'',
'Implement **only** this task in your first commit.',
'Ensure the code compiles and existing tests pass before moving on.',
'The keepalive loop will assign subsequent tasks after this one is complete.',
].join('\n');
}
const body = `${marker}\n${summary}\n\n@codex start${taskDirective}`;

try {
const comments = await paginateWithRetry(
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/agents-autofix-loop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ jobs:
appendix: '',
stop_reason: '',
attempts: '0',
max_attempts: '3',
max_attempts: '2',
trigger_reason: 'unknown',
trigger_job: '',
trigger_step: '',
Expand Down Expand Up @@ -287,7 +287,7 @@ jobs:
// Reduce attempts for auto-escalated PRs (they weren't agent-initiated)
const isEscalated = labels.includes('autofix:escalated');
const maxAttempts = isEscalated
? Math.min(2, Number(outputs.max_attempts))
? 1
: Number(outputs.max_attempts);
const previousRuns = await paginateWithRetry(
github,
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/agents-pr-meta-v4.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,11 @@ concurrency:
&& github.event.comment
&& github.event.comment.id
&& format('agents-pr-meta-comment-{0}', github.event.comment.id)
|| github.event_name == 'pull_request'
&& format('agents-pr-meta-pr-{0}', github.event.pull_request.number)
|| format('agents-pr-meta-run-{0}', github.run_id)
}}
cancel-in-progress: false
cancel-in-progress: ${{ github.event_name == 'pull_request' }}

jobs:
comment_event_context:
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/reusable-18-autofix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -794,6 +794,8 @@ jobs:
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
# Unstage vendored node_modules that may have been modified by npm install
git reset HEAD -- .github/scripts/node_modules node_modules .workflows-lib/.github/scripts/node_modules 2>/dev/null || true
git commit -m "${AUTOFIX_COMMIT_PREFIX} formatting/lint"
echo "AUTOFIX_COMMIT_SHA=$(git rev-parse HEAD)" >> "$GITHUB_ENV"

Expand Down Expand Up @@ -865,6 +867,8 @@ jobs:
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
# Unstage vendored node_modules that may have been modified by npm install
git reset HEAD -- .github/scripts/node_modules node_modules .workflows-lib/.github/scripts/node_modules 2>/dev/null || true
git commit -m "${AUTOFIX_COMMIT_PREFIX} formatting/lint (patch)" || true
git format-patch -1 --stdout > autofix.patch

Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/reusable-agents-issue-bridge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,9 @@ jobs:
git checkout -B "$HEAD_BRANCH" "origin/${BASE_BRANCH}"
mkdir -p agents
printf "<!-- bootstrap for %s on issue #%s -->\n" "$AGENT" "$ISSUE_NUM" > "agents/${AGENT}-${ISSUE_NUM}.md"
git add -A || true
# Stage only the intended bootstrap file — 'git add -A' would capture
# vendored node_modules changes made by setup-api-client npm install.
git add "agents/${AGENT}-${ISSUE_NUM}.md" || true
if ! git diff --cached --quiet; then
git commit -m "chore(${AGENT}): bootstrap PR for issue #${ISSUE_NUM}"
else
Expand Down
4 changes: 2 additions & 2 deletions scripts/langchain/capability_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,8 @@ def _is_multi_action_task(task: str) -> bool:
def _requires_admin_access(task: str) -> bool:
patterns = [
r"\bgithub\s+secrets?\b",
r"\b(?:manage|configure|set|create|update|delete|add|modify|rotate)\s+secrets?\b",
r"\bsecrets?\s+(?:management|configuration|rotation)\b",
r"\b(?:manage|configure|set|create|update|delete|add|modify|rotate)\b.{0,30}\bsecrets?\b",
r"\bsecrets?\b.{0,30}\b(?:management|configuration|rotation)\b",
r"\brepository\s+settings\b",
r"\brepo\s+settings\b",
r"\bbranch\s+protection\b",
Expand Down
8 changes: 4 additions & 4 deletions scripts/langchain/verdict_policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"fail": 3,
}

CONCERNS_NEEDS_HUMAN_THRESHOLD = 0.50
CONCERNS_NEEDS_HUMAN_THRESHOLD = 0.85


@dataclass(frozen=True)
Expand Down Expand Up @@ -193,11 +193,11 @@ def evaluate_verdict_policy(
needs_human_reason = ""
if split_verdict:
confidence_value = concerns_confidence or 0.0
if confidence_value < CONCERNS_NEEDS_HUMAN_THRESHOLD:
if confidence_value >= CONCERNS_NEEDS_HUMAN_THRESHOLD:
needs_human = True
needs_human_reason = (
"Provider verdicts split with low-confidence concerns; "
f"dissenting confidence {confidence_value:.2f} < "
"Provider verdicts split with high-confidence concerns; "
f"dissenting confidence {confidence_value:.2f} >= "
f"{CONCERNS_NEEDS_HUMAN_THRESHOLD:.2f}. "
"Requires human review before starting another automated follow-up."
)
Expand Down
15 changes: 14 additions & 1 deletion scripts/ledger_migrate_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,14 +174,27 @@ def main(argv: Iterable[str] | None = None) -> int:

mismatches: list[LedgerResult] = []
updated: list[LedgerResult] = []
skipped: list[tuple[Path, str]] = []
for ledger_path in ledgers:
result = migrate_ledger(ledger_path, default_branch, check=args.check)
try:
result = migrate_ledger(ledger_path, default_branch, check=args.check)
except (MigrationError, yaml.YAMLError) as exc:
# One corrupt ledger must not block processing of the remaining files.
reason = str(exc).replace("\n", " ").replace("\r", " ")
print(f"::warning::Skipping {ledger_path.name}: {reason}")
skipped.append((ledger_path, reason))
continue
if args.check:
if result.previous != default_branch:
mismatches.append(result)
elif result.changed:
updated.append(result)

if skipped:
print(f"Skipped {len(skipped)} corrupt ledger(s):")
for path, reason in skipped:
print(f" - {path.name}: {reason}")

if args.check:
if mismatches:
print("Found ledgers with stale base values:")
Expand Down
Loading
Loading