Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 92 additions & 6 deletions .github/workflows/autofix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,72 @@ jobs:
return true;
};

const hasRelevantFailureJobForRun = async ({ owner, repo, runId }) => {
let page = 1;
let jobs = [];
let totalCount = null;

while (page <= 25) {
const { data } = await withRetry((client) =>
client.rest.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: runId,
per_page: 100,
page,
})
);

const jobsPage = data?.jobs || [];
jobs = jobs.concat(jobsPage);

if (typeof data?.total_count === 'number') {
totalCount = data.total_count;
}

if (jobsPage.length < 100) {
break;
}

if (totalCount !== null && jobs.length >= totalCount) {
break;
}

page += 1;
}

if (totalCount !== null && jobs.length < totalCount) {
core.warning(
`Only retrieved ${jobs.length}/${totalCount} jobs for run ${runId}`
);
}
const failedJobs = jobs.filter(
(job) => (job.conclusion || '').toLowerCase() === 'failure'
);
const relevantFailures = failedJobs.filter((job) => {
const name = String(job.name || '').toLowerCase();
return name.includes('lint-format') || name.includes('lint-ruff');
});

if (relevantFailures.length === 0) {
core.info(
'No lint-format/lint-ruff job failures found; skipping autofix.'
);
return false;
}

core.info(
`Relevant failures: ${relevantFailures.map((job) => job.name).join(', ')}`
);
return true;
};

// --- workflow_run trigger (after Gate/CI completes) ---
if (context.eventName === 'workflow_run') {
const run = context.payload.workflow_run;
const workflowName = run?.name || 'workflow';
const triggerHeadSha = String(run?.head_sha || run?.head_commit?.id || '');
const runId = Number(run?.id || 0);

// Only proceed when the upstream workflow failed
if (run.conclusion !== 'failure') {
Expand Down Expand Up @@ -178,12 +239,37 @@ jobs:
return;
}

const refForChecks = triggerHeadSha || headSha;
let shouldAutofix = await hasRelevantFailureCheck({
owner: context.repo.owner,
repo: context.repo.repo,
ref: refForChecks,
});
let shouldAutofix = false;
if (runId) {
try {
shouldAutofix = await hasRelevantFailureJobForRun({
owner: context.repo.owner,
repo: context.repo.repo,
runId,
});
} catch (error) {
const message = String(error?.message || error || '');
const status = Number(error?.status || error?.response?.status || 0);
if (status === 403 && message.toLowerCase().includes('rate limit')) {
core.warning(
'Rate limited listing workflow jobs; falling back to check runs.'
);
} else {
core.warning(
`Failed to list workflow jobs; falling back to check runs: ${message}`
);
}
}
}

if (!shouldAutofix) {
const refForChecks = triggerHeadSha || headSha;
shouldAutofix = await hasRelevantFailureCheck({
owner: context.repo.owner,
repo: context.repo.repo,
ref: refForChecks,
});
}

if (!shouldAutofix && triggerHeadSha && triggerHeadSha !== headSha) {
core.info(
Expand Down
31 changes: 27 additions & 4 deletions scripts/langchain/followup_issue_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -987,22 +987,45 @@
issue_number: int | None,
) -> str:
"""Invoke LLM and return response text."""
from langchain_core.messages import HumanMessage
try:
from langchain_core.messages import HumanMessage
except ModuleNotFoundError:
HumanMessage = None # type: ignore[assignment]

Check failure on line 993 in scripts/langchain/followup_issue_generator.py

View workflow job for this annotation

GitHub Actions / Python CI / lint-ruff

Ruff (N806)

scripts/langchain/followup_issue_generator.py:993:9: N806 Variable `HumanMessage` in function should be lowercase

Check failure on line 993 in scripts/langchain/followup_issue_generator.py

View workflow job for this annotation

GitHub Actions / Python CI / lint-ruff

Ruff (N806)

scripts/langchain/followup_issue_generator.py:993:9: N806 Variable `HumanMessage` in function should be lowercase

config = _build_llm_config(
operation=operation,
pr_number=pr_number,
issue_number=issue_number,
)

if HumanMessage is not None:
messages: list[Any] = [HumanMessage(content=prompt)]
try:
response = client.invoke(messages, config=config)
except TypeError as exc:
LOGGER.warning(
"LLM invoke failed with config/metadata; using config/metadata fallback. Error: %s",
exc,
)
response = client.invoke(messages)
return getattr(response, "content", None) or str(response)

# langchain_core isn't available. Prefer non-message invoke signatures first.
try:
response = client.invoke([HumanMessage(content=prompt)], config=config)
response = client.invoke(prompt, config=config)
except TypeError as exc:
LOGGER.warning(
"LLM invoke failed with config/metadata; using config/metadata fallback. Error: %s",
exc,
)
response = client.invoke([HumanMessage(content=prompt)])
return response.content
try:
response = client.invoke(prompt)
except Exception as inner_exc:
raise RuntimeError(
"Unable to invoke client without langchain_core installed. "
"Install langchain-core or provide a client that accepts plain string prompts."
) from inner_exc
return getattr(response, "content", None) or str(response)


def _extract_json(text: str) -> dict[str, Any]:
Expand Down
3 changes: 1 addition & 2 deletions tests/test_validate_release_workflow_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ def _write_workflow(path: Path, *, extra: str = "") -> None:
name: release-${{ env.RELEASE_VERSION }}
path: release/${{ env.RELEASE_VERSION }}/
retention-days: 7
"""
+ extra,
""" + extra,
encoding="utf-8",
)

Expand Down
Loading