Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
{
"name": "workflows-env",
"image": "mcr.microsoft.com/devcontainers/python:3.11",
"onCreateCommand": "sudo apt-get update && sudo apt-get install -y python3-venv curl tar && sudo mkdir -p /usr/local/bin && curl -sSL 'https://github.com/rhysd/actionlint/releases/download/v1.7.3/actionlint_1.7.3_linux_amd64.tar.gz' | sudo tar -xz -C /usr/local/bin actionlint && sudo chmod +x /usr/local/bin/actionlint",
"onCreateCommand": "sudo apt-get update && sudo apt-get install -y python3-venv curl tar && sudo mkdir -p /usr/local/bin && curl -sSL 'https://github.com/rhysd/actionlint/releases/download/v1.7.3/actionlint_1.7.3_linux_amd64.tar.gz' | sudo tar -xz -C /usr/local/bin actionlint && sudo chmod +x /usr/local/bin/actionlint && sudo rm -f /usr/local/py-utils/bin/black /usr/local/py-utils/bin/ruff /usr/local/py-utils/bin/isort /usr/local/py-utils/bin/mypy",
"postCreateCommand": "pip install -e '.[dev]' && pre-commit install --install-hooks --hook-type pre-commit --hook-type pre-push",
"postStartCommand": "pre-commit install --install-hooks --hook-type pre-commit --hook-type pre-push",
"containerEnv": {
"PATH": "/home/vscode/.local/bin:${containerEnv:PATH}"
},
"features": {
"ghcr.io/devcontainers/features/github-cli:1": {},
"ghcr.io/devcontainers/features/node:1": {
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/health-72-template-sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6

- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.11'

- name: Validate template sync
run: |
if ! python scripts/validate_template_sync.py; then
Expand Down
236 changes: 236 additions & 0 deletions manager-database-pr327-fix.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,236 @@
diff --git a/adapters/base.py b/adapters/base.py
index 93e845b..f4a4a64 100644
--- a/adapters/base.py
+++ b/adapters/base.py
@@ -10,9 +10,11 @@ from importlib import import_module
from typing import Any, Protocol

try:
- import psycopg
+ import psycopg as _psycopg
except ImportError: # pragma: no cover - optional dependency
- psycopg = None
+ _psycopg = None # type: ignore[assignment]
+
+psycopg = _psycopg


class AdapterProtocol(Protocol):
@@ -77,7 +79,8 @@ async def tracked_call(source: str, endpoint: str, *, db_path: str | None = None
status = getattr(resp, "status_code", 0)
size = len(getattr(resp, "content", b""))
conn = connect_db(db_path)
- conn.execute("""CREATE TABLE IF NOT EXISTS api_usage (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS api_usage (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
source TEXT,
@@ -86,7 +89,8 @@ async def tracked_call(source: str, endpoint: str, *, db_path: str | None = None
bytes INT,
latency_ms INT,
cost_usd REAL
- )""")
+ )"""
+ )
if isinstance(conn, sqlite3.Connection):
conn.execute(
"CREATE VIEW IF NOT EXISTS monthly_usage AS "
diff --git a/adapters/edgar.py b/adapters/edgar.py
index 4d4ba61..1d80aec 100644
--- a/adapters/edgar.py
+++ b/adapters/edgar.py
@@ -45,6 +45,10 @@ async def _request_with_retry(
extra={"url": url, "attempt": attempt, "max_retries": max_retries},
)
await asyncio.sleep(wait)
+ # Unreachable but satisfies type checker
+ raise RuntimeError("Unreachable") # pragma: no cover
+ # Unreachable but satisfies type checker
+ raise RuntimeError("Unreachable") # pragma: no cover


async def list_new_filings(cik: str, since: str) -> list[dict[str, str]]:
diff --git a/api/chat.py b/api/chat.py
index e2be31d..0c81d91 100644
--- a/api/chat.py
+++ b/api/chat.py
@@ -96,12 +96,7 @@ def chat(
q: str = Query(
...,
description="User question",
- examples={
- "basic": {
- "summary": "Holdings question",
- "value": "What is the latest holdings update?",
- }
- },
+ examples=["What is the latest holdings update?"],
)
):
"""Return a naive answer built from stored documents."""
diff --git a/api/managers.py b/api/managers.py
index 2a19661..5128f82 100644
--- a/api/managers.py
+++ b/api/managers.py
@@ -88,19 +88,23 @@ def _ensure_manager_table(conn) -> None:
"""Create the managers table if it does not exist."""
# Use dialect-specific schema to keep SQLite and Postgres aligned.
if isinstance(conn, sqlite3.Connection):
- conn.execute("""CREATE TABLE IF NOT EXISTS managers (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS managers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
email TEXT NOT NULL,
department TEXT NOT NULL
- )""")
+ )"""
+ )
else:
- conn.execute("""CREATE TABLE IF NOT EXISTS managers (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS managers (
id bigserial PRIMARY KEY,
name text NOT NULL,
email text NOT NULL,
department text NOT NULL
- )""")
+ )"""
+ )


def _insert_manager(conn, payload: ManagerCreate) -> int:
@@ -111,7 +115,11 @@ def _insert_manager(conn, payload: ManagerCreate) -> int:
(payload.name, payload.email, payload.department),
)
conn.commit()
- return int(cursor.lastrowid)
+ return (
+ int(cursor.lastrowid)
+ if cursor.lastrowid is not None
+ else 0 if cursor.lastrowid is not None else 0
+ )
cursor = conn.execute(
"INSERT INTO managers(name, email, department) VALUES (%s, %s, %s) RETURNING id",
(payload.name, payload.email, payload.department),
diff --git a/embeddings.py b/embeddings.py
index e75e3f8..2b7f76e 100644
--- a/embeddings.py
+++ b/embeddings.py
@@ -51,22 +51,26 @@ def store_document(text: str, db_path: str | None = None) -> None:
if register_vector:
register_vector(conn)
conn.execute("CREATE EXTENSION IF NOT EXISTS vector")
- conn.execute("""CREATE TABLE IF NOT EXISTS documents (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS documents (
id SERIAL PRIMARY KEY,
content TEXT,
embedding vector(384)
- )""")
+ )"""
+ )
emb = Vector(embed_text(text)) if register_vector else embed_text(text)
conn.execute(
"INSERT INTO documents(content, embedding) VALUES (%s,%s)",
(text, emb),
)
else:
- conn.execute("""CREATE TABLE IF NOT EXISTS documents (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS documents (
id INTEGER PRIMARY KEY AUTOINCREMENT,
content TEXT,
embedding TEXT
- )""")
+ )"""
+ )
emb = json.dumps(embed_text(text))
conn.execute(
"INSERT INTO documents(content, embedding) VALUES (?, ?)",
diff --git a/etl/daily_diff_flow.py b/etl/daily_diff_flow.py
index 2cca646..918a0b9 100644
--- a/etl/daily_diff_flow.py
+++ b/etl/daily_diff_flow.py
@@ -20,12 +20,14 @@ def compute(cik: str, date: str, db_path: str) -> None:
try:
additions, exits = diff_holdings(cik, db_path)
conn = connect_db(db_path)
- conn.execute("""CREATE TABLE IF NOT EXISTS daily_diff (
+ conn.execute(
+ """CREATE TABLE IF NOT EXISTS daily_diff (
date TEXT,
cik TEXT,
cusip TEXT,
change TEXT
- )""")
+ )"""
+ )
for cusip in additions:
conn.execute(
"INSERT INTO daily_diff VALUES (?,?,?,?)",
diff --git a/etl/edgar_flow.py b/etl/edgar_flow.py
index 6bd964d..6860cb0 100644
--- a/etl/edgar_flow.py
+++ b/etl/edgar_flow.py
@@ -38,7 +38,8 @@ logger = logging.getLogger(__name__)
async def fetch_and_store(cik: str, since: str):
filings = await ADAPTER.list_new_filings(cik, since)
conn = connect_db(DB_PATH)
- conn.execute("""
+ conn.execute(
+ """
CREATE TABLE IF NOT EXISTS holdings (
cik TEXT,
accession TEXT,
@@ -48,7 +49,8 @@ async def fetch_and_store(cik: str, since: str):
value INTEGER,
sshPrnamt INTEGER
)
- """)
+ """
+ )
results = []
for filing in filings:
raw = await ADAPTER.download(filing)
diff --git a/etl/logging_setup.py b/etl/logging_setup.py
index d05d4d6..b49c500 100644
--- a/etl/logging_setup.py
+++ b/etl/logging_setup.py
@@ -10,9 +10,11 @@ from typing import Any
import boto3

try: # pragma: no cover - optional dependency for structured logs
- from pythonjsonlogger import jsonlogger
+ from pythonjsonlogger import jsonlogger as _jsonlogger
except ImportError: # pragma: no cover
- jsonlogger = None
+ _jsonlogger = None # type: ignore[assignment]
+
+jsonlogger = _jsonlogger

_LOGGING_CONFIGURED = False

diff --git a/tests/test_open_issues.py b/tests/test_open_issues.py
index 2531bfc..f68beae 100644
--- a/tests/test_open_issues.py
+++ b/tests/test_open_issues.py
@@ -4,13 +4,15 @@ from scripts.open_issues import parse_tasks


def test_parse_tasks(tmp_path):
- md = textwrap.dedent("""
+ md = textwrap.dedent(
+ """
### 4.1 Stage 0 — Bootstrap
1. Create docker-compose
2. Create schema
### 4.2 Stage 1 — Proof
* Implement adapter
- """)
+ """
+ )
file = tmp_path / "a.md"
file.write_text(md)
tasks = parse_tasks(str(file))
14 changes: 7 additions & 7 deletions scripts/langchain/followup_issue_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,19 +531,19 @@ def _resolve_section(label: str) -> str | None:
def _parse_sections(body: str) -> dict[str, list[str]]:
"""Parse issue body into recognized sections.

Splits the body by top-level headings (# or ##) and maps content to known section keys.
Unrecognized top-level headings terminate the current section.
Subheadings (###, ####, etc.) within a section are preserved as content.
Splits the body by headings (#, ##, ###) and maps content to known section keys.
Unrecognized headings terminate the current section.
Deeper subheadings (####, #####, etc.) within a section are preserved as content.
"""
sections: dict[str, list[str]] = {key: [] for key in SECTION_TITLES}
current: str | None = None
for line in body.splitlines():
# Only match top-level section headings (# or ## but not ### or deeper)
# Subheadings (###, ####) are kept as content within the current section
heading_match = re.match(r"^\s*#{1,2}\s+(.*)$", line)
# Match section headings (#, ##, ###) - GitHub issue forms use ### for fields
# Deeper headings (####, #####) are kept as content within the current section
heading_match = re.match(r"^\s*#{1,3}\s+(.*)$", line)
if heading_match:
section_key = _resolve_section(heading_match.group(1))
# Update current - set to None for unrecognized top-level headings
# Update current - set to None for unrecognized headings
# This prevents content under "## Random Notes" etc. from being
# appended to the previous recognized section
current = section_key
Expand Down
29 changes: 23 additions & 6 deletions scripts/sync_tool_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

PIN_FILE = Path(".github/workflows/autofix-versions.env")
PYPROJECT_FILE = Path("pyproject.toml")
TEMPLATE_FILE = Path("templates/consumer-repo/.github/workflows/autofix-versions.env")


@dataclasses.dataclass(frozen=True)
Expand Down Expand Up @@ -167,18 +168,34 @@ def main(argv: Iterable[str]) -> int:
pyproject_content, TOOL_CONFIGS, env_values, apply_changes
)

if project_mismatches and not apply_changes:
for package, message in project_mismatches.items():
# Check template file is in sync with source
template_mismatches: dict[str, str] = {}
if TEMPLATE_FILE.exists():
template_content = TEMPLATE_FILE.read_text(encoding="utf-8")
source_content = PIN_FILE.read_text(encoding="utf-8")
if template_content != source_content:
template_mismatches["template"] = (
"templates/consumer-repo autofix-versions.env differs from source"
)
if apply_changes:
TEMPLATE_FILE.write_text(source_content, encoding="utf-8")

all_mismatches = {**project_mismatches, **template_mismatches}
if all_mismatches and not apply_changes:
for package, message in all_mismatches.items():
print(f"✗ {package}: {message}", file=sys.stderr)
print(
"Use --apply to rewrite pyproject.toml with the pinned versions.",
"Use --apply to sync tool versions to pyproject.toml and template.",
file=sys.stderr,
)
return 1

if apply_changes and pyproject_updated != pyproject_content:
PYPROJECT_FILE.write_text(pyproject_updated, encoding="utf-8")
print("✓ tool pins synced to pyproject.toml")
if apply_changes:
if pyproject_updated != pyproject_content:
PYPROJECT_FILE.write_text(pyproject_updated, encoding="utf-8")
print("✓ tool pins synced to pyproject.toml")
if template_mismatches:
print("✓ template autofix-versions.env synced from source")

return 0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
# NOTE: Only include dev tools here (linters, formatters, test runners).
# Runtime dependencies (PyYAML, Pydantic, Hypothesis) should be managed via Dependabot
# in each consumer repo's pyproject.toml directly, NOT synced from this file.
BLACK_VERSION=25.12.0
RUFF_VERSION=0.14.11
BLACK_VERSION=26.1.0
RUFF_VERSION=0.14.13
ISORT_VERSION=7.0.0
DOCFORMATTER_VERSION=1.7.7
MYPY_VERSION=1.19.1
PYTEST_VERSION=9.0.2
PYTEST_COV_VERSION=7.0.0
PYTEST_XDIST_VERSION=3.8.0
COVERAGE_VERSION=7.13.1

Loading
Loading