diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index c8192f831..fad271a2e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,9 +1,12 @@ { "name": "workflows-env", "image": "mcr.microsoft.com/devcontainers/python:3.11", - "onCreateCommand": "sudo apt-get update && sudo apt-get install -y python3-venv curl tar && sudo mkdir -p /usr/local/bin && curl -sSL 'https://github.com/rhysd/actionlint/releases/download/v1.7.3/actionlint_1.7.3_linux_amd64.tar.gz' | sudo tar -xz -C /usr/local/bin actionlint && sudo chmod +x /usr/local/bin/actionlint", + "onCreateCommand": "sudo apt-get update && sudo apt-get install -y python3-venv curl tar && sudo mkdir -p /usr/local/bin && curl -sSL 'https://github.com/rhysd/actionlint/releases/download/v1.7.3/actionlint_1.7.3_linux_amd64.tar.gz' | sudo tar -xz -C /usr/local/bin actionlint && sudo chmod +x /usr/local/bin/actionlint && sudo rm -f /usr/local/py-utils/bin/black /usr/local/py-utils/bin/ruff /usr/local/py-utils/bin/isort /usr/local/py-utils/bin/mypy", "postCreateCommand": "pip install -e '.[dev]' && pre-commit install --install-hooks --hook-type pre-commit --hook-type pre-push", "postStartCommand": "pre-commit install --install-hooks --hook-type pre-commit --hook-type pre-push", + "containerEnv": { + "PATH": "/home/vscode/.local/bin:${containerEnv:PATH}" + }, "features": { "ghcr.io/devcontainers/features/github-cli:1": {}, "ghcr.io/devcontainers/features/node:1": { diff --git a/.github/workflows/health-72-template-sync.yml b/.github/workflows/health-72-template-sync.yml index 1bd6e4b9e..7bee68e12 100644 --- a/.github/workflows/health-72-template-sync.yml +++ b/.github/workflows/health-72-template-sync.yml @@ -16,12 +16,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 - + - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - + - name: Validate template sync run: | if ! python scripts/validate_template_sync.py; then diff --git a/manager-database-pr327-fix.patch b/manager-database-pr327-fix.patch new file mode 100644 index 000000000..d81e1dd61 --- /dev/null +++ b/manager-database-pr327-fix.patch @@ -0,0 +1,236 @@ +diff --git a/adapters/base.py b/adapters/base.py +index 93e845b..f4a4a64 100644 +--- a/adapters/base.py ++++ b/adapters/base.py +@@ -10,9 +10,11 @@ from importlib import import_module + from typing import Any, Protocol + + try: +- import psycopg ++ import psycopg as _psycopg + except ImportError: # pragma: no cover - optional dependency +- psycopg = None ++ _psycopg = None # type: ignore[assignment] ++ ++psycopg = _psycopg + + + class AdapterProtocol(Protocol): +@@ -77,7 +79,8 @@ async def tracked_call(source: str, endpoint: str, *, db_path: str | None = None + status = getattr(resp, "status_code", 0) + size = len(getattr(resp, "content", b"")) + conn = connect_db(db_path) +- conn.execute("""CREATE TABLE IF NOT EXISTS api_usage ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS api_usage ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + source TEXT, +@@ -86,7 +89,8 @@ async def tracked_call(source: str, endpoint: str, *, db_path: str | None = None + bytes INT, + latency_ms INT, + cost_usd REAL +- )""") ++ )""" ++ ) + if isinstance(conn, sqlite3.Connection): + conn.execute( + "CREATE VIEW IF NOT EXISTS monthly_usage AS " +diff --git a/adapters/edgar.py b/adapters/edgar.py +index 4d4ba61..1d80aec 100644 +--- a/adapters/edgar.py ++++ b/adapters/edgar.py +@@ -45,6 +45,10 @@ async def _request_with_retry( + extra={"url": url, "attempt": attempt, "max_retries": max_retries}, + ) + await asyncio.sleep(wait) ++ # Unreachable but satisfies type checker ++ raise RuntimeError("Unreachable") # pragma: no cover ++ # Unreachable but satisfies type checker ++ raise RuntimeError("Unreachable") # pragma: no cover + + + async def list_new_filings(cik: str, since: str) -> list[dict[str, str]]: +diff --git a/api/chat.py b/api/chat.py +index e2be31d..0c81d91 100644 +--- a/api/chat.py ++++ b/api/chat.py +@@ -96,12 +96,7 @@ def chat( + q: str = Query( + ..., + description="User question", +- examples={ +- "basic": { +- "summary": "Holdings question", +- "value": "What is the latest holdings update?", +- } +- }, ++ examples=["What is the latest holdings update?"], + ) + ): + """Return a naive answer built from stored documents.""" +diff --git a/api/managers.py b/api/managers.py +index 2a19661..5128f82 100644 +--- a/api/managers.py ++++ b/api/managers.py +@@ -88,19 +88,23 @@ def _ensure_manager_table(conn) -> None: + """Create the managers table if it does not exist.""" + # Use dialect-specific schema to keep SQLite and Postgres aligned. + if isinstance(conn, sqlite3.Connection): +- conn.execute("""CREATE TABLE IF NOT EXISTS managers ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS managers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT NOT NULL, + department TEXT NOT NULL +- )""") ++ )""" ++ ) + else: +- conn.execute("""CREATE TABLE IF NOT EXISTS managers ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS managers ( + id bigserial PRIMARY KEY, + name text NOT NULL, + email text NOT NULL, + department text NOT NULL +- )""") ++ )""" ++ ) + + + def _insert_manager(conn, payload: ManagerCreate) -> int: +@@ -111,7 +115,11 @@ def _insert_manager(conn, payload: ManagerCreate) -> int: + (payload.name, payload.email, payload.department), + ) + conn.commit() +- return int(cursor.lastrowid) ++ return ( ++ int(cursor.lastrowid) ++ if cursor.lastrowid is not None ++ else 0 if cursor.lastrowid is not None else 0 ++ ) + cursor = conn.execute( + "INSERT INTO managers(name, email, department) VALUES (%s, %s, %s) RETURNING id", + (payload.name, payload.email, payload.department), +diff --git a/embeddings.py b/embeddings.py +index e75e3f8..2b7f76e 100644 +--- a/embeddings.py ++++ b/embeddings.py +@@ -51,22 +51,26 @@ def store_document(text: str, db_path: str | None = None) -> None: + if register_vector: + register_vector(conn) + conn.execute("CREATE EXTENSION IF NOT EXISTS vector") +- conn.execute("""CREATE TABLE IF NOT EXISTS documents ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS documents ( + id SERIAL PRIMARY KEY, + content TEXT, + embedding vector(384) +- )""") ++ )""" ++ ) + emb = Vector(embed_text(text)) if register_vector else embed_text(text) + conn.execute( + "INSERT INTO documents(content, embedding) VALUES (%s,%s)", + (text, emb), + ) + else: +- conn.execute("""CREATE TABLE IF NOT EXISTS documents ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS documents ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + content TEXT, + embedding TEXT +- )""") ++ )""" ++ ) + emb = json.dumps(embed_text(text)) + conn.execute( + "INSERT INTO documents(content, embedding) VALUES (?, ?)", +diff --git a/etl/daily_diff_flow.py b/etl/daily_diff_flow.py +index 2cca646..918a0b9 100644 +--- a/etl/daily_diff_flow.py ++++ b/etl/daily_diff_flow.py +@@ -20,12 +20,14 @@ def compute(cik: str, date: str, db_path: str) -> None: + try: + additions, exits = diff_holdings(cik, db_path) + conn = connect_db(db_path) +- conn.execute("""CREATE TABLE IF NOT EXISTS daily_diff ( ++ conn.execute( ++ """CREATE TABLE IF NOT EXISTS daily_diff ( + date TEXT, + cik TEXT, + cusip TEXT, + change TEXT +- )""") ++ )""" ++ ) + for cusip in additions: + conn.execute( + "INSERT INTO daily_diff VALUES (?,?,?,?)", +diff --git a/etl/edgar_flow.py b/etl/edgar_flow.py +index 6bd964d..6860cb0 100644 +--- a/etl/edgar_flow.py ++++ b/etl/edgar_flow.py +@@ -38,7 +38,8 @@ logger = logging.getLogger(__name__) + async def fetch_and_store(cik: str, since: str): + filings = await ADAPTER.list_new_filings(cik, since) + conn = connect_db(DB_PATH) +- conn.execute(""" ++ conn.execute( ++ """ + CREATE TABLE IF NOT EXISTS holdings ( + cik TEXT, + accession TEXT, +@@ -48,7 +49,8 @@ async def fetch_and_store(cik: str, since: str): + value INTEGER, + sshPrnamt INTEGER + ) +- """) ++ """ ++ ) + results = [] + for filing in filings: + raw = await ADAPTER.download(filing) +diff --git a/etl/logging_setup.py b/etl/logging_setup.py +index d05d4d6..b49c500 100644 +--- a/etl/logging_setup.py ++++ b/etl/logging_setup.py +@@ -10,9 +10,11 @@ from typing import Any + import boto3 + + try: # pragma: no cover - optional dependency for structured logs +- from pythonjsonlogger import jsonlogger ++ from pythonjsonlogger import jsonlogger as _jsonlogger + except ImportError: # pragma: no cover +- jsonlogger = None ++ _jsonlogger = None # type: ignore[assignment] ++ ++jsonlogger = _jsonlogger + + _LOGGING_CONFIGURED = False + +diff --git a/tests/test_open_issues.py b/tests/test_open_issues.py +index 2531bfc..f68beae 100644 +--- a/tests/test_open_issues.py ++++ b/tests/test_open_issues.py +@@ -4,13 +4,15 @@ from scripts.open_issues import parse_tasks + + + def test_parse_tasks(tmp_path): +- md = textwrap.dedent(""" ++ md = textwrap.dedent( ++ """ + ### 4.1 Stage 0 — Bootstrap + 1. Create docker-compose + 2. Create schema + ### 4.2 Stage 1 — Proof + * Implement adapter +- """) ++ """ ++ ) + file = tmp_path / "a.md" + file.write_text(md) + tasks = parse_tasks(str(file)) diff --git a/scripts/langchain/followup_issue_generator.py b/scripts/langchain/followup_issue_generator.py index 94ba13d73..97d864c48 100755 --- a/scripts/langchain/followup_issue_generator.py +++ b/scripts/langchain/followup_issue_generator.py @@ -531,19 +531,19 @@ def _resolve_section(label: str) -> str | None: def _parse_sections(body: str) -> dict[str, list[str]]: """Parse issue body into recognized sections. - Splits the body by top-level headings (# or ##) and maps content to known section keys. - Unrecognized top-level headings terminate the current section. - Subheadings (###, ####, etc.) within a section are preserved as content. + Splits the body by headings (#, ##, ###) and maps content to known section keys. + Unrecognized headings terminate the current section. + Deeper subheadings (####, #####, etc.) within a section are preserved as content. """ sections: dict[str, list[str]] = {key: [] for key in SECTION_TITLES} current: str | None = None for line in body.splitlines(): - # Only match top-level section headings (# or ## but not ### or deeper) - # Subheadings (###, ####) are kept as content within the current section - heading_match = re.match(r"^\s*#{1,2}\s+(.*)$", line) + # Match section headings (#, ##, ###) - GitHub issue forms use ### for fields + # Deeper headings (####, #####) are kept as content within the current section + heading_match = re.match(r"^\s*#{1,3}\s+(.*)$", line) if heading_match: section_key = _resolve_section(heading_match.group(1)) - # Update current - set to None for unrecognized top-level headings + # Update current - set to None for unrecognized headings # This prevents content under "## Random Notes" etc. from being # appended to the previous recognized section current = section_key diff --git a/scripts/sync_tool_versions.py b/scripts/sync_tool_versions.py index a8fa84d08..482a35673 100644 --- a/scripts/sync_tool_versions.py +++ b/scripts/sync_tool_versions.py @@ -14,6 +14,7 @@ PIN_FILE = Path(".github/workflows/autofix-versions.env") PYPROJECT_FILE = Path("pyproject.toml") +TEMPLATE_FILE = Path("templates/consumer-repo/.github/workflows/autofix-versions.env") @dataclasses.dataclass(frozen=True) @@ -167,18 +168,34 @@ def main(argv: Iterable[str]) -> int: pyproject_content, TOOL_CONFIGS, env_values, apply_changes ) - if project_mismatches and not apply_changes: - for package, message in project_mismatches.items(): + # Check template file is in sync with source + template_mismatches: dict[str, str] = {} + if TEMPLATE_FILE.exists(): + template_content = TEMPLATE_FILE.read_text(encoding="utf-8") + source_content = PIN_FILE.read_text(encoding="utf-8") + if template_content != source_content: + template_mismatches["template"] = ( + "templates/consumer-repo autofix-versions.env differs from source" + ) + if apply_changes: + TEMPLATE_FILE.write_text(source_content, encoding="utf-8") + + all_mismatches = {**project_mismatches, **template_mismatches} + if all_mismatches and not apply_changes: + for package, message in all_mismatches.items(): print(f"✗ {package}: {message}", file=sys.stderr) print( - "Use --apply to rewrite pyproject.toml with the pinned versions.", + "Use --apply to sync tool versions to pyproject.toml and template.", file=sys.stderr, ) return 1 - if apply_changes and pyproject_updated != pyproject_content: - PYPROJECT_FILE.write_text(pyproject_updated, encoding="utf-8") - print("✓ tool pins synced to pyproject.toml") + if apply_changes: + if pyproject_updated != pyproject_content: + PYPROJECT_FILE.write_text(pyproject_updated, encoding="utf-8") + print("✓ tool pins synced to pyproject.toml") + if template_mismatches: + print("✓ template autofix-versions.env synced from source") return 0 diff --git a/templates/consumer-repo/.github/workflows/autofix-versions.env b/templates/consumer-repo/.github/workflows/autofix-versions.env index fab8da96e..f71ad7f41 100644 --- a/templates/consumer-repo/.github/workflows/autofix-versions.env +++ b/templates/consumer-repo/.github/workflows/autofix-versions.env @@ -4,8 +4,8 @@ # NOTE: Only include dev tools here (linters, formatters, test runners). # Runtime dependencies (PyYAML, Pydantic, Hypothesis) should be managed via Dependabot # in each consumer repo's pyproject.toml directly, NOT synced from this file. -BLACK_VERSION=25.12.0 -RUFF_VERSION=0.14.11 +BLACK_VERSION=26.1.0 +RUFF_VERSION=0.14.13 ISORT_VERSION=7.0.0 DOCFORMATTER_VERSION=1.7.7 MYPY_VERSION=1.19.1 @@ -13,4 +13,3 @@ PYTEST_VERSION=9.0.2 PYTEST_COV_VERSION=7.0.0 PYTEST_XDIST_VERSION=3.8.0 COVERAGE_VERSION=7.13.1 - diff --git a/tests/scripts/test_sync_tool_versions.py b/tests/scripts/test_sync_tool_versions.py index 55735024e..551437361 100644 --- a/tests/scripts/test_sync_tool_versions.py +++ b/tests/scripts/test_sync_tool_versions.py @@ -7,6 +7,14 @@ from scripts import sync_tool_versions +@pytest.fixture(autouse=True) +def _disable_template_sync(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Disable template sync in tests by pointing to non-existent path.""" + monkeypatch.setattr( + sync_tool_versions, "TEMPLATE_FILE", tmp_path / "nonexistent" / "template.env" + ) + + def _write_env_file(path: Path, versions: dict[str, str]) -> None: lines = [] for cfg in sync_tool_versions.TOOL_CONFIGS: @@ -252,3 +260,57 @@ def test_main_default_ok( def test_main_rejects_check_and_apply_together() -> None: with pytest.raises(SystemExit): sync_tool_versions.main(["--check", "--apply"]) + + +def test_template_sync_detects_mismatch( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + """Test that template file mismatch is detected.""" + env_path = tmp_path / "pins.env" + pyproject_path = tmp_path / "pyproject.toml" + template_path = tmp_path / "templates" / "autofix-versions.env" + template_path.parent.mkdir(parents=True) + + env_versions = {cfg.env_key: "12.0" for cfg in sync_tool_versions.TOOL_CONFIGS} + _write_env_file(env_path, env_versions) + pyproject_path.write_text(_make_pyproject_content(env_versions), encoding="utf-8") + # Template has different content + template_path.write_text("BLACK_VERSION=11.0\n", encoding="utf-8") + + monkeypatch.setattr(sync_tool_versions, "PIN_FILE", env_path) + monkeypatch.setattr(sync_tool_versions, "PYPROJECT_FILE", pyproject_path) + monkeypatch.setattr(sync_tool_versions, "TEMPLATE_FILE", template_path) + + exit_code = sync_tool_versions.main(["--check"]) + captured = capsys.readouterr() + + assert exit_code == 1 + assert "template" in captured.err + + +def test_template_sync_apply_updates_template( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + """Test that --apply syncs template from source.""" + env_path = tmp_path / "pins.env" + pyproject_path = tmp_path / "pyproject.toml" + template_path = tmp_path / "templates" / "autofix-versions.env" + template_path.parent.mkdir(parents=True) + + env_versions = {cfg.env_key: "13.0" for cfg in sync_tool_versions.TOOL_CONFIGS} + _write_env_file(env_path, env_versions) + pyproject_path.write_text(_make_pyproject_content(env_versions), encoding="utf-8") + # Template has different content + template_path.write_text("OLD_CONTENT\n", encoding="utf-8") + + monkeypatch.setattr(sync_tool_versions, "PIN_FILE", env_path) + monkeypatch.setattr(sync_tool_versions, "PYPROJECT_FILE", pyproject_path) + monkeypatch.setattr(sync_tool_versions, "TEMPLATE_FILE", template_path) + + exit_code = sync_tool_versions.main(["--apply"]) + captured = capsys.readouterr() + + assert exit_code == 0 + assert "template" in captured.out + # Template should now match source + assert template_path.read_text() == env_path.read_text()