diff --git a/.archon/workflows/defaults/archon-workflow-builder.yaml b/.archon/workflows/defaults/archon-workflow-builder.yaml index a311b8d970..d10b6a2566 100644 --- a/.archon/workflows/defaults/archon-workflow-builder.yaml +++ b/.archon/workflows/defaults/archon-workflow-builder.yaml @@ -201,23 +201,52 @@ nodes: echo "VALID" depends_on: [generate-yaml] + - id: resolve-workflow-name + bash: | + FILE="$ARTIFACTS_DIR/generated-workflow.yaml" + + if [ ! -f "$FILE" ]; then + echo "ERROR: generated-workflow.yaml not found at $FILE" + exit 1 + fi + + WORKFLOW_NAME="$(sed -n 's/^name:[[:space:]]*//p' "$FILE" | head -1 | tr -d '\r')" + + if [ -z "$WORKFLOW_NAME" ]; then + echo "ERROR: generated workflow is missing a non-empty top-level name" + exit 1 + fi + + if ! printf '%s' "$WORKFLOW_NAME" | grep -Eq '^[a-z0-9]+(-[a-z0-9]+)*$'; then + echo "ERROR: generated workflow name must be kebab-case, got: $WORKFLOW_NAME" + exit 1 + fi + + printf '%s\n' "$WORKFLOW_NAME" + depends_on: [validate-yaml] + - id: save-or-report prompt: | You are a workflow installer. Save the generated workflow and report to the user. ## Workflow Details - - **Name**: $extract-intent.output.workflow_name + - **Resolved Name**: $resolve-workflow-name.output + - **Extracted Intent Name**: $extract-intent.output.workflow_name - **Trigger Phrases**: $extract-intent.output.trigger_phrases ## Instructions 1. Read the generated workflow from `$ARTIFACTS_DIR/generated-workflow.yaml` - 2. Create the directory `.archon/workflows/` if it doesn't exist (use Bash: `mkdir -p .archon/workflows/`) - 3. Save the workflow to `.archon/workflows/$extract-intent.output.workflow_name.yaml` + 2. Treat `$resolve-workflow-name.output` as the source of truth for the filename and reported workflow name + 3. If the YAML file's `name:` does not match `$resolve-workflow-name.output`, stop and report the mismatch instead of saving + 4. If `$resolve-workflow-name.output` is empty, stop and report that the generated YAML is invalid + 5. Create the directory `$CANONICAL_REPO_PATH/.archon/workflows/` if it doesn't exist (use Bash: `mkdir -p "$CANONICAL_REPO_PATH/.archon/workflows"`). + Do NOT save into the current worktree when `$CANONICAL_REPO_PATH` points to the primary repository root. + 6. Save the workflow to `$CANONICAL_REPO_PATH/.archon/workflows/$resolve-workflow-name.output.yaml` Use the Write tool to write the file. - 4. Report to the user: + 7. Report to the user: - Workflow name and file location - Trigger phrases that will invoke it - - How to run it: `bun run cli workflow run $extract-intent.output.workflow_name "your input"` - - How to test it: `bun run cli validate workflows $extract-intent.output.workflow_name` - depends_on: [validate-yaml] + - How to run it: `bun run cli workflow run $resolve-workflow-name.output "your input"` + - How to test it: `bun run cli validate workflows $resolve-workflow-name.output` + depends_on: [resolve-workflow-name] diff --git a/.codex/config.toml b/.codex/config.toml new file mode 100644 index 0000000000..4c198969e7 --- /dev/null +++ b/.codex/config.toml @@ -0,0 +1,5 @@ +[shell_environment_policy] +inherit = "core" + +[shell_environment_policy.set] +CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS = "1" diff --git a/.env.example b/.env.example index 325e49a6fb..ed7b2c3bbc 100644 --- a/.env.example +++ b/.env.example @@ -1,9 +1,12 @@ -# Database (OPTIONAL) -# Default: SQLite at ~/.archon/archon.db (no setup required) -# Recommended: PostgreSQL for heavy parallel usage (20+ concurrent workflows) -# docker compose --profile with-db up -d -# Uncomment for PostgreSQL: -# DATABASE_URL=postgresql://postgres:postgres@localhost:5432/remote_coding_agent +# Database +# Docker Compose default: local PostgreSQL container on service name `postgres` +# Non-Docker / CLI default: SQLite at ~/.archon/archon.db when DATABASE_URL is omitted +# Override DATABASE_URL only if you want to use an external PostgreSQL instance. +# DATABASE_URL=postgresql://postgres:postgres@localhost:5432/archon +ARCHON_PORT=3000 +POSTGRES_DB=archon +POSTGRES_USER=postgres +POSTGRES_PASSWORD=change-me # AI Assistants # Claude Auth Options: diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000000..dcbbc32aa9 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,844 @@ +## Project Overview + +**Remote Agentic Coding Platform**: Control AI coding assistants (Codex SDK, Codex SDK) remotely from Slack, Telegram, and GitHub. Built with **Bun + TypeScript + SQLite/PostgreSQL**, single-developer tool for AI-assisted development practitioners. Architecture prioritizes simplicity, flexibility, and user control. + +## Core Principles + +**Single-Developer Tool** + +- No multi-tenant complexity + +**Platform Agnostic** + +- Unified conversation interface across Slack/Telegram/GitHub/cli/web +- Platform adapters implement `IPlatformAdapter` +- Stream/batch AI responses in real-time to all platforms + +**Type Safety (CRITICAL)** + +- Strict TypeScript configuration enforced +- All functions must have complete type annotations +- No `any` types without explicit justification +- Interfaces for all major abstractions + +**Zod Schema Conventions** + +- Schema naming: camelCase, descriptive suffix (e.g., `workflowRunSchema`, `errorSchema`) +- Type derivation: always use `z.infer` — never write parallel hand-crafted interfaces +- Import `z` from `@hono/zod-openapi` (not from `zod` directly) +- All new/modified API routes must use `registerOpenApiRoute(createRoute({...}), handler)` — the local wrapper handles the TypedResponse bypass +- Route schemas live in `packages/server/src/routes/schemas/` — one file per domain +- Engine schemas live in `packages/workflows/src/schemas/` — one file per concern (dag-node, workflow, workflow-run, retry, loop, hooks); `index.ts` re-exports all +- Engine schema naming: camelCase (e.g., `dagNodeSchema`, `workflowBaseSchema`, `nodeOutputSchema`) +- `TRIGGER_RULES` and `WORKFLOW_HOOK_EVENTS` are derived from schema `.options` — never duplicate as a plain array (exception: `@archon/web` must define a local constant since `api.generated.d.ts` is type-only and cannot export runtime values) +- `loader.ts` uses `dagNodeSchema.safeParse()` for node validation; graph-level checks (cycles, deps, `$nodeId.output` refs) remain as imperative code in `validateDagStructure()` + +**Git Workflow and Releases** + +- `main` is the release branch. Never commit directly to `main`. +- `dev` is the working branch. All feature work branches off `dev` and merges back into `dev`. +- To release, use the `/release` skill. It compares `dev` to `main`, generates changelog entries, bumps the version, and creates a PR to merge `dev` into `main`. +- Releases follow Semantic Versioning: `/release` (patch), `/release minor`, `/release major`. +- Changelog lives in `CHANGELOG.md` and follows Keep a Changelog format. +- Version is the single `version` field in the root `package.json`. + +**Git as First-Class Citizen** + +- Let git handle what git does best (conflicts, uncommitted changes, branch management) +- Surface git errors to users for actionable issues (conflicts, uncommitted changes) +- Handle expected failure cases gracefully (missing directories during cleanup) +- Trust git's natural guardrails (e.g., refuse to remove worktree with uncommitted changes) +- Use `@archon/git` functions for git operations; use `execFileAsync` (not `exec`) when calling git directly +- Worktrees enable parallel development per conversation without branch conflicts +- Workspaces automatically sync with origin before worktree creation (ensures latest code) +- **NEVER run `git clean -fd`** - it permanently deletes untracked files (use `git checkout .` instead) + +## Engineering Principles + +These are implementation constraints, not slogans. Apply them by default. + +**KISS — Keep It Simple, Stupid** + +- Prefer straightforward control flow over clever meta-programming +- Prefer explicit branches and typed interfaces over hidden dynamic behavior +- Keep error paths obvious and localized + +**YAGNI — You Aren't Gonna Need It** + +- Do not add config keys, interface methods, feature flags, or workflow branches without a concrete accepted use case +- Do not introduce speculative abstractions without at least one current caller +- Keep unsupported paths explicit (error out) rather than adding partial fake support + +**DRY + Rule of Three** + +- Duplicate small, local logic when it preserves clarity +- Extract shared utilities only after the same pattern appears at least three times and has stabilized +- When extracting, preserve module boundaries and avoid hidden coupling + +**SRP + ISP — Single Responsibility + Interface Segregation** + +- Keep each module and package focused on one concern +- Extend behavior by implementing existing narrow interfaces (`IPlatformAdapter`, `IAssistantClient`, `IDatabase`, `IWorkflowStore`) whenever possible +- Avoid fat interfaces and "god modules" that mix policy, transport, and storage +- Do not add unrelated methods to an existing interface — define a new one + +**Fail Fast + Explicit Errors** — Silent fallback in agent runtimes can create unsafe or costly behavior + +- Prefer throwing early with a clear error for unsupported or unsafe states — never silently swallow errors +- Never silently broaden permissions or capabilities +- Document fallback behavior with a comment when a fallback is intentional and safe; otherwise throw + +**Determinism + Reproducibility** + +- Prefer reproducible commands and locked dependency behavior in CI-sensitive paths +- Keep tests deterministic — no flaky timing or network dependence without guardrails +- Ensure local validation commands (`bun run validate`) map directly to CI expectations + +**Reversibility + Rollback-First Thinking** + +- Keep changes easy to revert: small scope, clear blast radius +- For risky changes, define the rollback path before merging +- Avoid mixed mega-patches that block safe rollback + +## Essential Commands + +### Development + +```bash +# Start server + Web UI together (hot reload for both) +bun run dev + +# Or start individually +bun run dev:server # Backend only (port 3090) +bun run dev:web # Frontend only (port 5173) +``` + +Regenerating frontend API types (requires server to be running at port 3090): + +```bash +bun run dev:server # must be running first +bun --filter @archon/web generate:types +``` + +Optional: Use PostgreSQL instead of SQLite by setting `DATABASE_URL` in `.env`: + +```bash +docker-compose --profile with-db up -d postgres +# Set DATABASE_URL=postgresql://postgres:postgres@localhost:5432/remote_coding_agent in .env +``` + +### Testing + +```bash +bun run test # Run all tests (per-package, isolated processes) +bun test --watch # Watch mode (single package) +bun test packages/core/src/handlers/command-handler.test.ts # Single file +``` + +**Test isolation (mock.module pollution):** Bun's `mock.module()` permanently replaces modules in the process-wide cache — `mock.restore()` does NOT undo it ([oven-sh/bun#7823](https://github.com/oven-sh/bun/issues/7823)). To prevent cross-file pollution, packages that have conflicting `mock.module()` calls split their tests into separate `bun test` invocations: `@archon/core` (7 batches), `@archon/workflows` (5), `@archon/adapters` (4), `@archon/isolation` (3). See each package's `package.json` for the exact splits. + +**Do NOT run `bun test` from the repo root** — it discovers all test files across all packages and runs them in one process, causing ~135 mock pollution failures. Always use `bun run test` (which uses `bun --filter '*' test` for per-package isolation). + +### Type Checking & Linting + +```bash +bun run type-check +bun run lint +bun run lint:fix +bun run format +bun run format:check +``` + +### Pre-PR Validation + +**Always run before creating a pull request:** + +```bash +bun run validate +``` + +This runs type-check, lint, format check, and tests. All four must pass for CI to succeed. + +### ESLint Guidelines + +**Zero-tolerance policy**: CI enforces `--max-warnings 0`. No warnings allowed. + +**When to use inline disable comments** (`// eslint-disable-next-line`): + +- **Almost never** - fix the issue instead +- Only acceptable when: + 1. External SDK types are incorrect (document which SDK and why) + 2. Intentional type assertion after validation (must include comment explaining the validation) + +**Never acceptable:** + +- Disabling `no-explicit-any` without justification +- Disabling rules to "make CI pass" +- Bulk disabling at file level (`/* eslint-disable */`) + +### Database + +**Auto-Detection (SQLite is the default — zero setup):** + +- **Without `DATABASE_URL`**: Uses SQLite at `~/.archon/archon.db` (auto-initialized, recommended for most users) +- **With `DATABASE_URL` set**: Uses PostgreSQL (optional, for cloud/advanced deployments) + +```bash +# PostgreSQL only: Run SQL migrations (manual) +psql $DATABASE_URL < migrations/000_combined.sql +``` + +### CLI (Command Line) + +Run workflows directly from the command line without needing the server. Workflow and isolation commands require running from within a git repository (subdirectories work - resolves to repo root). + +```bash +# List available workflows (requires git repo) +bun run cli workflow list + +# Machine-readable JSON output +bun run cli workflow list --json + +# Run a workflow +bun run cli workflow run assist "What does the orchestrator do?" + +# Run in a specific directory +bun run cli workflow run plan --cwd /path/to/repo "Add dark mode" + +# Default: auto-creates worktree with generated branch name (isolation by default) +bun run cli workflow run implement "Add auth" + +# Explicit branch name for the worktree +bun run cli workflow run implement --branch feature-auth "Add auth" + +# Opt out of isolation (run in live checkout) +bun run cli workflow run quick-fix --no-worktree "Fix typo" + +# Grant env-leak-gate consent during auto-registration (for repos whose .env +# contains sensitive keys). Audit-logged with actor: 'user-cli'. +bun run cli workflow run plan --cwd /path/to/leaky/repo --allow-env-keys "..." + +# Show running workflows +bun run cli workflow status + +# Resume a failed workflow (re-runs, skipping completed nodes) +bun run cli workflow resume + +# Discard a non-terminal run +bun run cli workflow abandon + +# Delete old workflow run records (default: 7 days) +bun run cli workflow cleanup +bun run cli workflow cleanup 30 # Custom days + +# Emit a workflow event (used inside workflow loop prompts) +bun run cli workflow event emit --run-id --type [--data ] + +# List active worktrees/environments +bun run cli isolation list + +# Clean up stale environments (default: 7 days) +bun run cli isolation cleanup +bun run cli isolation cleanup 14 # Custom days + +# Clean up environments with branches merged into main (also deletes remote branches) +bun run cli isolation cleanup --merged + +# Also remove environments with closed (abandoned) PRs +bun run cli isolation cleanup --merged --include-closed + +# Validate workflow definitions and their referenced resources +bun run cli validate workflows # All workflows +bun run cli validate workflows my-workflow # Single workflow +bun run cli validate workflows my-workflow --json # Machine-readable output + +# Validate command files +bun run cli validate commands # All commands +bun run cli validate commands my-command # Single command + +# Complete branch lifecycle (remove worktree + local/remote branches) +bun run cli complete +bun run cli complete --force # Skip uncommitted-changes check + +# Start the web UI server (compiled binary only, downloads web UI on first run) +bun run cli serve +bun run cli serve --port 4000 +bun run cli serve --download-only # Download without starting + +# Show version +bun run cli version +``` + +## Architecture + +### Directory Structure + +**Monorepo Layout (Bun Workspaces):** + +``` +packages/ +├── cli/ # @archon/cli - Command-line interface +│ └── src/ +│ ├── adapters/ # CLI adapter (stdout output) +│ ├── commands/ # CLI command implementations +│ └── cli.ts # CLI entry point +├── core/ # @archon/core - Shared business logic +│ └── src/ +│ ├── clients/ # AI SDK clients (Codex, Codex) +│ ├── config/ # YAML config loading +│ ├── db/ # Database connection, queries +│ ├── handlers/ # Command handler (slash commands) +│ ├── orchestrator/ # AI conversation management +│ ├── services/ # Background services (cleanup) +│ ├── state/ # Session state machine +│ ├── types/ # TypeScript types and interfaces +│ ├── utils/ # Shared utilities +│ ├── workflows/ # Store adapter (createWorkflowStore) bridging core DB → IWorkflowStore +│ └── index.ts # Package exports +├── workflows/ # @archon/workflows - Workflow engine (depends on @archon/git + @archon/paths) +│ └── src/ +│ ├── schemas/ # Zod schemas for engine types +│ ├── loader.ts # YAML parsing + validation (parseWorkflow) +│ ├── workflow-discovery.ts # Workflow filesystem discovery (discoverWorkflows, discoverWorkflowsWithConfig) +│ ├── executor-shared.ts # Shared executor infrastructure (error classification, variable substitution) +│ ├── router.ts # Prompt building + invocation parsing +│ ├── executor.ts # Workflow execution orchestrator (executeWorkflow) +│ ├── dag-executor.ts # DAG-specific execution logic +│ ├── store.ts # IWorkflowStore interface (database abstraction) +│ ├── deps.ts # WorkflowDeps injection types (IWorkflowPlatform, IWorkflowAssistantClient) +│ ├── event-emitter.ts # Workflow observability events +│ ├── logger.ts # JSONL file logger +│ ├── validator.ts # Resource validation (command files, MCP configs, skill dirs) +│ ├── defaults/ # Bundled default commands and workflows +│ └── utils/ # Variable substitution, tool formatting, execution utilities +├── git/ # @archon/git - Git operations (no @archon/core dep) +│ └── src/ +│ ├── branch.ts # Branch operations (checkout, merge detection, etc.) +│ ├── exec.ts # execFileAsync and mkdirAsync wrappers +│ ├── repo.ts # Repository operations (clone, sync, remote URL) +│ ├── types.ts # Branded types (RepoPath, BranchName, etc.) +│ ├── worktree.ts # Worktree operations (create, remove, list) +│ └── index.ts # Package exports +├── isolation/ # @archon/isolation - Worktree isolation (depends on @archon/git + @archon/paths) +│ └── src/ +│ ├── types.ts # Isolation types and interfaces +│ ├── errors.ts # Error classifiers (classifyIsolationError, IsolationBlockedError) +│ ├── factory.ts # Provider factory (getIsolationProvider, configureIsolation) +│ ├── resolver.ts # IsolationResolver (request → environment resolution) +│ ├── store.ts # IIsolationStore interface +│ ├── worktree-copy.ts # File copy utilities for worktrees +│ ├── providers/ +│ │ └── worktree.ts # WorktreeProvider implementation +│ └── index.ts # Package exports +├── paths/ # @archon/paths - Path resolution and logger (zero @archon/* deps) +│ └── src/ +│ ├── archon-paths.ts # Archon directory path utilities +│ ├── logger.ts # Pino logger factory +│ └── index.ts # Package exports +├── adapters/ # @archon/adapters - Platform adapters (Slack, Telegram, GitHub, Discord) +│ └── src/ +│ ├── chat/ # Chat platform adapters (Slack, Telegram) +│ ├── forge/ # Forge adapters (GitHub) +│ ├── community/ # Community adapters (Discord) +│ ├── utils/ # Shared adapter utilities (message splitting) +│ └── index.ts # Package exports +├── server/ # @archon/server - HTTP server + Web adapter +│ └── src/ +│ ├── adapters/ # Web platform adapter (SSE streaming) +│ ├── routes/ # API routes (REST + SSE) +│ └── index.ts # Hono server entry point +└── web/ # @archon/web - React frontend (Web UI) + └── src/ + ├── components/ # React components (chat, layout, projects, ui, workflows) + ├── hooks/ # Custom hooks (useSSE, etc.) + ├── lib/ # API client, types, utilities + ├── stores/ # Zustand stores (workflow-store) + ├── routes/ # Route pages (ChatPage, WorkflowsPage, WorkflowBuilderPage, etc.) + └── App.tsx # Router + layout +``` + +**Import Patterns:** + +**IMPORTANT**: Always use typed imports - never use generic `import *` for the main package. + +```typescript +// ✅ CORRECT: Use `import type` for type-only imports +import type { IPlatformAdapter, Conversation, MergedConfig } from '@archon/core'; + +// ✅ CORRECT: Use specific named imports for values +import { handleMessage, ConversationLockManager, pool } from '@archon/core'; + +// ✅ CORRECT: Namespace imports for submodules with many exports +import * as conversationDb from '@archon/core/db/conversations'; +import * as git from '@archon/git'; + +// ✅ CORRECT: Import workflow engine types/functions from direct subpaths +import type { WorkflowDeps } from '@archon/workflows/deps'; +import type { IWorkflowStore } from '@archon/workflows/store'; +import type { WorkflowDefinition } from '@archon/workflows/schemas/workflow'; +import { executeWorkflow } from '@archon/workflows/executor'; +import { discoverWorkflowsWithConfig } from '@archon/workflows/workflow-discovery'; +import { findWorkflow } from '@archon/workflows/router'; + +// ❌ WRONG: Never use generic import for main package +import * as core from '@archon/core'; // Don't do this + +// ❌ WRONG: In @archon/web, never import from @archon/workflows (it's a server package) +import type { DagNode } from '@archon/workflows/schemas/dag-node'; // Don't do this from @archon/web +// ✅ CORRECT: Use re-exports from api.ts (derived from generated OpenAPI spec) +import type { DagNode, WorkflowDefinition } from '@/lib/api'; +``` + +### Database Schema + +**8 Tables (all prefixed with `remote_agent_`):** + +1. **`codebases`** - Repository metadata and commands (JSONB) +2. **`conversations`** - Track platform conversations with titles and soft-delete support +3. **`sessions`** - Track AI SDK sessions with resume capability +4. **`isolation_environments`** - Git worktree isolation tracking +5. **`workflow_runs`** - Workflow execution tracking and state +6. **`workflow_events`** - Step-level workflow event log (step transitions, artifacts, errors) +7. **`messages`** - Conversation message history with tool call metadata (JSONB) +8. **`codebase_env_vars`** - Per-project env vars injected into Codex SDK subprocess env (managed via Web UI or `env:` in config) + +**Key Patterns:** + +- Conversation ID format: Platform-specific (`thread_ts`, `chat_id`, `user/repo#123`) +- One active session per conversation +- Codebase commands stored in filesystem, paths in `codebases.commands` JSONB + +**Session Transitions:** + +- Sessions are immutable - transitions create new linked sessions +- Each transition has explicit `TransitionTrigger` reason (first-message, plan-to-execute, reset-requested, etc.) +- Audit trail: `parent_session_id` links to previous session, `transition_reason` records why +- Only plan→execute creates new session immediately; other triggers deactivate current session + +### Architecture Layers + +**Package Split:** + +- **@archon/paths**: Path resolution utilities, Pino logger factory, web dist cache path (`getWebDistDir`) (no @archon/\* deps) +- **@archon/git**: Git operations - worktrees, branches, repos, exec wrappers (depends only on @archon/paths) +- **@archon/isolation**: Worktree isolation types, providers, resolver, error classifiers (depends only on @archon/git + @archon/paths) +- **@archon/workflows**: Workflow engine - loader, router, executor, DAG, logger, bundled defaults (depends only on @archon/git + @archon/paths + @hono/zod-openapi + zod; DB/AI/config injected via `WorkflowDeps`) +- **@archon/cli**: Command-line interface for running workflows and starting the web UI server (depends on @archon/server + @archon/adapters for the serve command) +- **@archon/core**: Business logic, database, orchestration, AI clients (provides `createWorkflowStore()` adapter bridging core DB → `IWorkflowStore`) +- **@archon/adapters**: Platform adapters for Slack, Telegram, GitHub, Discord (depends on @archon/core) +- **@archon/server**: OpenAPIHono HTTP server (Zod + OpenAPI spec generation via `@hono/zod-openapi`), Web adapter (SSE), API routes, Web UI static serving (depends on @archon/adapters) +- **@archon/web**: React frontend (Vite + Tailwind v4 + shadcn/ui + Zustand), SSE streaming to server. `WorkflowRunStatus`, `WorkflowDefinition`, and `DagNode` are all derived from `src/lib/api.generated.d.ts` (generated from the OpenAPI spec via `bun generate:types`; never import from `@archon/workflows`) + +**1. Platform Adapters** + +- Implement `IPlatformAdapter` interface +- Handle platform-specific message formats +- **Web** (`packages/server/src/adapters/web/`): Server-Sent Events (SSE) streaming, conversation ID = user-provided string +- **Slack** (`packages/adapters/src/chat/slack/`): SDK with polling (not webhooks), conversation ID = `thread_ts` +- **Telegram** (`packages/adapters/src/chat/telegram/`): Bot API with polling, conversation ID = `chat_id` +- **GitHub** (`packages/adapters/src/forge/github/`): Webhooks + GitHub CLI, conversation ID = `owner/repo#number` +- **Discord** (`packages/adapters/src/community/chat/discord/`): discord.js WebSocket, conversation ID = channel ID + +**Adapter Authorization Pattern:** + +- Auth checks happen INSIDE adapters (encapsulation, consistency) +- Auth utilities co-located with each adapter (e.g., `packages/adapters/src/chat/slack/auth.ts`) +- Parse whitelist from env var in constructor (e.g., `TELEGRAM_ALLOWED_USER_IDS`) +- Check authorization in message handler (before calling `onMessage` callback) +- Silent rejection for unauthorized users (no error response) +- Log unauthorized attempts with masked user IDs for privacy +- Adapters expose `onMessage(handler)` callback; errors handled by caller + +**2. Command Handler** (`packages/core/src/handlers/`) + +- Process slash commands (deterministic, no AI) +- Commands: `/command-set`, `/load-commands`, `/clone`, `/getcwd`, `/setcwd`, `/repos`, `/repo`, `/repo-remove`, `/worktree`, `/workflow`, `/status`, `/commands`, `/help`, `/reset`, `/reset-context`, `/init` +- Update database, perform operations, return responses + +**3. Orchestrator** (`packages/core/src/orchestrator/`) + +- Manage AI conversations +- Load conversation + codebase context from database +- Variable substitution: `$1`, `$2`, `$3`, `$ARGUMENTS` +- Session management: Create new or resume existing +- Stream AI responses to platform + +**4. AI Assistant Clients** (`packages/core/src/clients/`) + +- Implement `IAssistantClient` interface +- **ClaudeClient**: `@anthropic-ai/Codex-agent-sdk` +- **CodexClient**: `@openai/codex-sdk` +- Streaming: `for await (const event of events) { await platform.send(event) }` + +### Configuration + +**Environment Variables:** + +see .env.example +see .archon/config.yaml setup as needed + +**Assistant Defaults:** + +The system supports configuring default models and options per assistant in `.archon/config.yaml`: + +```yaml +assistants: + Codex: + model: sonnet # or 'opus', 'haiku', 'Codex-*', 'inherit' + settingSources: # Controls which AGENTS.md files Codex SDK loads + - project # Default: only project-level AGENTS.md + - user # Optional: also load ~/.Codex/AGENTS.md + codex: + model: gpt-5.4 + modelReasoningEffort: medium # 'minimal' | 'low' | 'medium' | 'high' | 'xhigh' + webSearchMode: live # 'disabled' | 'cached' | 'live' + additionalDirectories: + - /absolute/path/to/other/repo + codexBinaryPath: /usr/local/bin/codex # Optional: custom Codex CLI binary path + + +# docs: +# path: docs # Optional: default is docs/ +``` + +**Configuration Priority:** + +1. Workflow-level options (in YAML `model`, `modelReasoningEffort`, etc.) +2. Config file defaults (`.archon/config.yaml` `assistants.*`) +3. SDK defaults + +**Model Validation:** + +- Workflows are validated at load time for provider/model compatibility +- Codex models: `sonnet`, `opus`, `haiku`, `Codex-*`, `inherit` +- Codex models: Any model except Codex-specific aliases +- Invalid combinations fail workflow loading with clear error messages + +### Running the App in Worktrees + +Agents working in worktrees can run the app for self-testing (make changes → run app → test via curl → fix). Ports are automatically allocated to avoid conflicts: + +```bash +# Run in worktree (port auto-allocated based on path) +bun dev & +# [Hono] Worktree detected (/path/to/worktree) +# [Hono] Auto-allocated port: 3637 (base: 3090, offset: +547) + +# Test via web API (production path) +# 1) Create a conversation +curl -X POST http://localhost:3637/api/conversations \ + -H "Content-Type: application/json" \ + -d '{}' + +# 2) Send a message +curl -X POST http://localhost:3637/api/conversations//message \ + -H "Content-Type: application/json" \ + -d '{"message":"/status"}' + +# 3) Fetch messages (polling) +curl http://localhost:3637/api/conversations//messages + +# Note: SSE streaming is available at /api/stream/ +``` + +**Port Allocation:** + +- Worktrees: Automatic unique port (3190-4089 range, hash-based on path) +- Main repo: Default 3090 +- Override: `PORT=4000 bun dev` (works in both contexts) +- Same worktree always gets same port (deterministic) + +**Important:** + +- Use the web API routes for manual validation (avoid running multiple platform adapters) +- Database is shared (same conversations/codebases available) +- Kill the server when done: `pkill -f "bun.*dev"` or use the specific port + +### Archon Directory Structure + +**User-level (`~/.archon/`):** + +``` +~/.archon/ +├── workspaces/owner/repo/ # Project-centric layout +│ ├── source/ # Clone (from /clone) or symlink → local path +│ ├── worktrees/ # Git worktrees for this project +│ ├── artifacts/ # Workflow artifacts (NEVER in git) +│ │ ├── runs/{id}/ # Per-run artifacts ($ARTIFACTS_DIR) +│ │ └── uploads/{convId}/ # Web UI file uploads (ephemeral) +│ └── logs/ # Workflow execution logs +├── vendor/codex/ # Codex native binary (binary builds, user-placed) +├── web-dist// # Cached web UI dist (archon serve, binary only) +├── update-check.json # Update check cache (binary builds, 24h TTL) +├── archon.db # SQLite database (when DATABASE_URL not set) +└── config.yaml # Global configuration (non-secrets) +``` + +**Repo-level (`.archon/` in any repository):** + +``` +.archon/ +├── commands/ # Custom commands +├── workflows/ # Workflow definitions (YAML files) +├── scripts/ # Named scripts for script: nodes (.ts/.js for bun, .py for uv) +└── config.yaml # Repo-specific configuration +``` + +- `ARCHON_HOME` - Override the base directory (default: `~/.archon`) +- Docker: Paths automatically set to `/.archon/` + +## Development Guidelines + +### When Creating New Features + +**Quick reference:** + +- **Platform Adapters**: Implement `IPlatformAdapter`, handle auth, polling/webhooks +- **AI Clients**: Implement `IAssistantClient`, session management, streaming +- **Slash Commands**: Add to command-handler.ts, update database, no AI +- **Database Operations**: Use `IDatabase` interface (supports PostgreSQL and SQLite via adapters) + +### SDK Type Patterns + +When working with external SDKs (Codex Agent SDK, Codex SDK), prefer importing and using SDK types directly: + +```typescript +// ✅ CORRECT - Import SDK types directly +import { query, type Options } from '@anthropic-ai/Codex-agent-sdk'; + +const options: Options = { + cwd, + permissionMode: 'bypassPermissions', + // ... +}; + +// Use type assertions for SDK response structures +const message = msg as { message: { content: ContentBlock[] } }; +``` + +```typescript +// ❌ AVOID - Defining duplicate types +interface MyQueryOptions { // Don't duplicate SDK types + cwd: string; + // ... +} +const options: MyQueryOptions = { ... }; +query({ prompt, options: options as any }); // Avoid 'as any' +``` + +This ensures type compatibility with SDK updates and eliminates `as any` casts. + +### Testing + +**Unit Tests:** + +- Test pure functions (variable substitution, command parsing) +- Mock external dependencies (database, AI SDKs, platform APIs) + +**Integration Tests:** + +- Test database operations with test database +- Test end-to-end flows (mock platforms/AI but use real orchestrator) +- Clean up test data after each test + +**Mock isolation rules (IMPORTANT):** + +- Bun's `mock.module()` is process-global and irreversible — `mock.restore()` does NOT undo it +- Do NOT add `afterAll(() => mock.restore())` for `mock.module()` cleanup — it has no effect +- Use `spyOn()` for internal modules that other test files import directly (e.g., `spyOn(git, 'checkout')`) — `spy.mockRestore()` DOES work for spies +- Never `mock.module()` a module path that another test file also `mock.module()`s with a different implementation +- When adding a new test file with `mock.module()`, ensure its package.json test script runs it in a separate `bun test` invocation from any conflicting files + +**Manual Validation:** Use the web API (`curl`) or CLI commands directly for end-to-end testing of new features. + +### Logging + +**Structured logging with Pino** (`packages/paths/src/logger.ts`): + +```typescript +import { createLogger } from '@archon/paths'; + +const log = createLogger('orchestrator'); + +// Event naming: {domain}.{action}_{state} +// Standard states: _started, _completed, _failed, _validated, _rejected +async function createSession(conversationId: string, codebaseId: string) { + log.info({ conversationId, codebaseId }, 'session.create_started'); + + try { + const session = await doCreate(); + log.info({ conversationId, codebaseId, sessionId: session.id }, 'session.create_completed'); + return session; + } catch (e) { + const err = e as Error; + log.error( + { conversationId, error: err.message, errorType: err.constructor.name, err }, + 'session.create_failed' + ); + throw err; + } +} +``` + +**Event naming rules:** + +- Format: `{domain}.{action}_{state}` — e.g. `workflow.step_started`, `isolation.create_failed` +- Avoid generic events like `processing` or `handling` +- Always pair `_started` with `_completed` or `_failed` +- Include context: IDs, durations, error details + +**Log Levels:** `fatal` > `error` > `warn` > `info` (default) > `debug` > `trace` + +**Verbosity:** + +- CLI: `archon --quiet` (errors only) — suppresses Pino logs and workflow progress output +- CLI: `archon --verbose` (debug) — enables debug Pino logs and tool-level workflow progress events +- Server: `LOG_LEVEL=debug bun run start` + +**Never log:** API keys or tokens (mask: `token.slice(0, 8) + '...'`), user message content, PII. + +### Command System + +**Variable Substitution:** + +- `$1`, `$2`, `$3` - Positional arguments +- `$ARGUMENTS` - All arguments as single string +- `$ARTIFACTS_DIR` - External artifacts directory for the current workflow run (pre-created by executor) +- `$WORKFLOW_ID` - The workflow run ID +- `$BASE_BRANCH` - Base branch; auto-detected from git when `worktree.baseBranch` is not set; fails only if referenced in a prompt and auto-detection also fails +- `$DOCS_DIR` - Documentation directory path; configured via `docs.path` in `.archon/config.yaml`. Defaults to `docs/`. Never throws. +- `$LOOP_USER_INPUT` - User feedback provided via `/workflow approve ` at an interactive loop gate. Only populated on the first iteration of a resumed interactive loop; empty string on all other iterations. +- `$REJECTION_REASON` - Reviewer feedback provided via `/workflow reject ` at an approval gate. Only populated in `on_reject` prompts; empty string elsewhere. + +**Command Types:** + +1. **Codebase Commands** (per-repo): + - Stored in `.archon/commands/` (plain text/markdown) + - Auto-detected via `/clone` or `/load-commands ` + - Loaded by `/clone` or `/load-commands`, invoked by AI via orchestrator routing + +2. **Workflows** (YAML-based): + - Stored in `.archon/workflows/` (searched recursively) + - Multi-step AI execution chains, discovered at runtime + - **`nodes:` (DAG format)**: Nodes with explicit `depends_on` edges; independent nodes in the same topological layer run concurrently. Node types: `command:` (named command file), `prompt:` (inline prompt), `bash:` (shell script, stdout captured as `$nodeId.output`, no AI), `loop:` (iterative AI prompt until completion signal), `approval:` (human gate; pauses until user approves or rejects; `capture_response: true` stores the user's comment as `$.output` for downstream nodes, default false), `script:` (inline TypeScript/Python or named script from `.archon/scripts/`, runs via `bun` or `uv`, stdout captured as `$nodeId.output`, no AI, supports `deps:` for dependency installation and `timeout:` in ms, requires `runtime: bun` or `runtime: uv`) . Supports `when:` conditions, `trigger_rule` join semantics, `$nodeId.output` substitution, `output_format` for structured JSON output (Codex and Codex), `allowed_tools`/`denied_tools` for per-node tool restrictions (Codex only), `hooks` for per-node SDK hook callbacks (Codex only), `mcp` for per-node MCP server config files (Codex only, env vars expanded at execution time), and `skills` for per-node skill preloading via AgentDefinition wrapping (Codex only), and `effort`/`thinking`/`maxBudgetUsd`/`systemPrompt`/`fallbackModel`/`betas`/`sandbox` for Codex SDK advanced options (Codex only, also settable at workflow level) + - Provider inherited from `.archon/config.yaml` unless explicitly set; per-node `provider` and `model` overrides supported + - Model and options can be set per workflow or inherited from config defaults + - `interactive: true` at the workflow level forces foreground execution on web (required for approval-gate workflows in the web UI) + - Model validation ensures provider/model compatibility at load time + - Commands: `/workflow list`, `/workflow reload`, `/workflow status`, `/workflow cancel`, `/workflow resume ` (re-runs failed workflow, skipping completed nodes), `/workflow abandon `, `/workflow cleanup [days]` (CLI only — deletes old run records) + - Resilient loading: One broken YAML doesn't abort discovery; errors shown in `/workflow list` + - `resolveWorkflowName()` (in `router.ts`) resolves workflow names via a 4-tier fallback — exact, case-insensitive, suffix (`-name`), substring — with ambiguity detection; used by both the CLI and all chat platforms + - Router fallback: if no `/invoke-workflow` is produced, falls back to `archon-assist` (with "Routing unclear" notice); raw AI response returned only when `archon-assist` is unavailable + - Codex routing calls use `tools: []` to prevent tool use at the API level; Codex tool bypass is detected and triggers the same fallback + +**Defaults:** + +- Bundled in `.archon/commands/defaults/` and `.archon/workflows/defaults/` +- Binary builds: Embedded at compile time (no filesystem access needed) +- Source builds: Loaded from filesystem at runtime +- Merged with repo-specific commands/workflows (repo overrides defaults by name) +- Opt-out: Set `defaults.loadDefaultCommands: false` or `defaults.loadDefaultWorkflows: false` in `.archon/config.yaml` + +**Global workflows** (user-level, applies to every project): + +- Path: `~/.archon/.archon/workflows/` (or `$ARCHON_HOME/.archon/workflows/`) +- Load priority: bundled < global < repo-specific (repo overrides global by filename) +- See the docs site at `packages/docs-web/` for details + +### Error Handling + +**Database Errors:** + +```typescript +// INSERT operations +try { + await db.query('INSERT INTO conversations ...', params); +} catch (error) { + log.error({ err: error, params }, 'db_insert_failed'); + throw new Error('Failed to create conversation'); +} + +// UPDATE operations - verify rowCount to catch missing records +try { + await db.updateConversation(conversationId, { codebase_id: codebaseId }); +} catch (error) { + // updateConversation throws if no rows matched (conversation not found) + log.error({ err: error, conversationId }, 'db_update_failed'); + throw error; // Re-throw to surface the issue +} +``` + +**Git Operation Errors (don't fail silently):** + +```typescript +// When isolation environment creation fails: +try { + // ... isolation creation logic ... +} catch (error) { + const err = error as Error; + const userMessage = classifyIsolationError(err); + log.error({ err, codebaseId, codebaseName }, 'isolation_creation_failed'); + await platform.sendMessage(conversationId, userMessage); +} +``` + +Pattern: Use `classifyIsolationError()` (from `@archon/isolation`) to map git errors (permission denied, timeout, no space, not a git repo) to user-friendly messages. Always log the raw error for debugging and send a classified message to the user. + +### API Endpoints + +**Web UI REST API** (`packages/server/src/routes/api.ts`): + +**Workflow Management:** + +- `GET /api/workflows` - List available workflows; optional `?cwd=`; returns `{ workflows: [...], errors?: [...] }` +- `POST /api/workflows/validate` - Validate a workflow definition in-memory (no save); body: `{ definition: object }`; returns `{ valid: boolean, errors?: string[] }` +- `GET /api/workflows/:name` - Fetch a single workflow by name; optional `?cwd=` query param; returns `{ workflow, filename, source: 'project' | 'bundled' }` +- `PUT /api/workflows/:name` - Save (create or update) a workflow YAML; body: `{ definition: object }`; validates before writing; requires `?cwd=` or registered codebase +- `DELETE /api/workflows/:name` - Delete a user-defined workflow; bundled defaults cannot be deleted + +**Workflow Run Lifecycle:** + +- `POST /api/workflows/runs/{runId}/resume` - Mark a failed run as ready for auto-resume on next invocation +- `POST /api/workflows/runs/{runId}/abandon` - Abandon a non-terminal run (marks as cancelled) +- `DELETE /api/workflows/runs/{runId}` - Delete a terminal workflow run and its events + +**Codebases:** + +- `GET /api/codebases` / `GET /api/codebases/:id` - List / fetch codebases +- `POST /api/codebases` - Register a codebase (clone or local path); body accepts `allowEnvKeys` for the env-leak gate +- `PATCH /api/codebases/:id` - Flip the `allow_env_keys` consent bit; body: `{ allowEnvKeys: boolean }`. Audit-logged at `warn` level on every grant/revoke (`env_leak_consent_granted` / `env_leak_consent_revoked`) with `codebaseId`, `path`, `files`, `keys`, `scanStatus`, `actor` +- `DELETE /api/codebases/:id` - Delete a codebase and clean up resources + +**Artifact Files:** + +- `GET /api/artifacts/:runId/*` - Serve a workflow artifact file by run ID and relative path; returns `text/markdown` for `.md` files, `text/plain` otherwise; 400 on path traversal (`..`), 404 if run or file not found + +**Command Listing:** + +- `GET /api/commands` - List available command names (bundled + project-defined); optional `?cwd=`; returns `{ commands: [{ name, source: 'bundled' | 'project' }] }` + +**System:** + +- `GET /api/update-check` - Check for available updates; returns `{ updateAvailable, currentVersion, latestVersion, releaseUrl }`; skips GitHub API call for non-binary builds + +**OpenAPI Spec:** + +- `GET /api/openapi.json` - Generated OpenAPI 3.0 spec for all Zod-validated routes + +**Webhooks:** + +- `POST /webhooks/github` - GitHub webhook events +- Signature verification required (HMAC SHA-256) +- Return 200 immediately, process async + +**Security:** + +- Verify webhook signatures (GitHub: `X-Hub-Signature-256`) +- Use `c.req.text()` for raw webhook body (signature verification) +- Never log or expose tokens in responses + +**@Mention Detection:** + +- Parse `@archon` in issue/PR **comments only** (not descriptions) +- Events: `issue_comment` only +- Note: Descriptions often contain example commands or documentation - these are NOT command invocations (see #96) diff --git a/CLAUDE.md b/CLAUDE.md index f38cb29a98..9e6b373be2 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -464,7 +464,7 @@ assistants: - project # Default: only project-level CLAUDE.md - user # Optional: also load ~/.claude/CLAUDE.md codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: medium # 'minimal' | 'low' | 'medium' | 'high' | 'xhigh' webSearchMode: live # 'disabled' | 'cached' | 'live' additionalDirectories: diff --git a/bun.lock b/bun.lock index 43f419a191..2ac608165a 100644 --- a/bun.lock +++ b/bun.lock @@ -23,7 +23,7 @@ }, "packages/adapters": { "name": "@archon/adapters", - "version": "0.1.0", + "version": "0.3.5", "dependencies": { "@archon/core": "workspace:*", "@archon/git": "workspace:*", @@ -41,7 +41,7 @@ }, "packages/cli": { "name": "@archon/cli", - "version": "0.2.13", + "version": "0.3.5", "bin": { "archon": "./src/cli.ts", }, @@ -62,14 +62,14 @@ }, "packages/core": { "name": "@archon/core", - "version": "0.2.0", + "version": "0.3.5", "dependencies": { "@anthropic-ai/claude-agent-sdk": "^0.2.89", "@archon/git": "workspace:*", "@archon/isolation": "workspace:*", "@archon/paths": "workspace:*", "@archon/workflows": "workspace:*", - "@openai/codex-sdk": "^0.116.0", + "@openai/codex-sdk": "^0.120.0", "pg": "^8.11.0", "zod": "^3", }, @@ -83,7 +83,7 @@ }, "packages/docs-web": { "name": "@archon/docs-web", - "version": "0.2.12", + "version": "0.3.5", "dependencies": { "@astrojs/starlight": "^0.38.0", "astro": "^6.1.0", @@ -92,7 +92,7 @@ }, "packages/git": { "name": "@archon/git", - "version": "0.1.0", + "version": "0.3.5", "dependencies": { "@archon/paths": "workspace:*", }, @@ -102,7 +102,7 @@ }, "packages/isolation": { "name": "@archon/isolation", - "version": "0.1.0", + "version": "0.3.5", "dependencies": { "@archon/git": "workspace:*", "@archon/paths": "workspace:*", @@ -113,7 +113,7 @@ }, "packages/paths": { "name": "@archon/paths", - "version": "0.2.0", + "version": "0.3.5", "dependencies": { "pino": "^9", "pino-pretty": "^13", @@ -124,7 +124,7 @@ }, "packages/server": { "name": "@archon/server", - "version": "0.2.0", + "version": "0.3.5", "dependencies": { "@archon/adapters": "workspace:*", "@archon/core": "workspace:*", @@ -142,7 +142,7 @@ }, "packages/web": { "name": "@archon/web", - "version": "0.2.0", + "version": "0.3.5", "dependencies": { "@dagrejs/dagre": "^2.0.4", "@radix-ui/react-alert-dialog": "^1.1.15", @@ -194,7 +194,7 @@ }, "packages/workflows": { "name": "@archon/workflows", - "version": "0.1.0", + "version": "0.3.5", "dependencies": { "@archon/git": "workspace:*", "@archon/paths": "workspace:*", @@ -564,21 +564,21 @@ "@open-draft/until": ["@open-draft/until@2.1.0", "", {}, "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg=="], - "@openai/codex": ["@openai/codex@0.116.0", "", { "optionalDependencies": { "@openai/codex-darwin-arm64": "npm:@openai/codex@0.116.0-darwin-arm64", "@openai/codex-darwin-x64": "npm:@openai/codex@0.116.0-darwin-x64", "@openai/codex-linux-arm64": "npm:@openai/codex@0.116.0-linux-arm64", "@openai/codex-linux-x64": "npm:@openai/codex@0.116.0-linux-x64", "@openai/codex-win32-arm64": "npm:@openai/codex@0.116.0-win32-arm64", "@openai/codex-win32-x64": "npm:@openai/codex@0.116.0-win32-x64" }, "bin": { "codex": "bin/codex.js" } }, "sha512-K6q9P2ZmpnzGmpS6Ybjvsdtvu8AbJx3f/Z4KmjH1u85StSS9TWMSQB8z0PPObKMejbtiIkHwhGyEIHi4iBYjig=="], + "@openai/codex": ["@openai/codex@0.120.0", "", { "optionalDependencies": { "@openai/codex-darwin-arm64": "npm:@openai/codex@0.120.0-darwin-arm64", "@openai/codex-darwin-x64": "npm:@openai/codex@0.120.0-darwin-x64", "@openai/codex-linux-arm64": "npm:@openai/codex@0.120.0-linux-arm64", "@openai/codex-linux-x64": "npm:@openai/codex@0.120.0-linux-x64", "@openai/codex-win32-arm64": "npm:@openai/codex@0.120.0-win32-arm64", "@openai/codex-win32-x64": "npm:@openai/codex@0.120.0-win32-x64" }, "bin": { "codex": "bin/codex.js" } }, "sha512-e2P1Gya3dwsRe9IPOiswVz5JfR700u+/sWCqDc3jkqv2QViPkNiBmZoGhFnZL5jBpKakSjehC4/Fpspg70nHTw=="], - "@openai/codex-darwin-arm64": ["@openai/codex@0.116.0-darwin-arm64", "", { "os": "darwin", "cpu": "arm64" }, "sha512-WkdL083p8uMeASpg8bwV0DPGgzkm48LjN3MyU2m/YukujbiLnknAmG29O2q2rFCLm0oLSDIGUK8EnXA4ZcAF9Q=="], + "@openai/codex-darwin-arm64": ["@openai/codex@0.120.0-darwin-arm64", "", { "os": "darwin", "cpu": "arm64" }, "sha512-7CU+I5kBaMuoqfG3xisq0mUWzxoEHvfu34cB8a0KpBiIhAgu12fKpmYgZ4/DvRP6Wm9Fu6LJYKVF5apUHFp8nQ=="], - "@openai/codex-darwin-x64": ["@openai/codex@0.116.0-darwin-x64", "", { "os": "darwin", "cpu": "x64" }, "sha512-Ax8uTwYSNIwGrzcNRcn0jJQhZzNcKGDbbn00Emde7gGOemjSLhRALjUaKjckAaW5xWnNqHTGdtzzPB4phNlDYg=="], + "@openai/codex-darwin-x64": ["@openai/codex@0.120.0-darwin-x64", "", { "os": "darwin", "cpu": "x64" }, "sha512-d7joNYuwrmd5iIdp/xAE5f8bZT1r82MnmU6Hzgxq3G+xClwEyhxU737ZWnstFSpnZNfxJ5zXCuFUJh4CAkHNtQ=="], - "@openai/codex-linux-arm64": ["@openai/codex@0.116.0-linux-arm64", "", { "os": "linux", "cpu": "arm64" }, "sha512-X7cL8rBSGDB+RSZc2FoKiqcMVeLPMmo06bkss/en4lLQsV1XG2DZI56WuXg92IOX3SjYl6Av/eOWgsb1t3UeLQ=="], + "@openai/codex-linux-arm64": ["@openai/codex@0.120.0-linux-arm64", "", { "os": "linux", "cpu": "arm64" }, "sha512-sVYY25/URlpZPtb0Q0ryLh+lcq9UTEtHAkdZKa0a/R7mAdyPuhpU9V6jWmxwiUh7s53XZOEVFoKmLfH8YIDWCQ=="], - "@openai/codex-linux-x64": ["@openai/codex@0.116.0-linux-x64", "", { "os": "linux", "cpu": "x64" }, "sha512-S9InOgJT3tj6uQp55NqrCA1k5tklwFaH00JdC2ElbRmxchm7ard4WxHSJZX9TiY8enj4cQoLIC04NFTUCO+/PQ=="], + "@openai/codex-linux-x64": ["@openai/codex@0.120.0-linux-x64", "", { "os": "linux", "cpu": "x64" }, "sha512-VcP9B/c/O+EFEgqoetCzvHrLfAdo8vrt09Gx1lJ8ikewctqAuJ/ozj/6wuvlz7XaaK64ib5cge01pOAeCyt2Sg=="], - "@openai/codex-sdk": ["@openai/codex-sdk@0.116.0", "", { "dependencies": { "@openai/codex": "0.116.0" } }, "sha512-qrn1Pu5G1GJ9w4m/Lk3L3466ulMGG9SfyR0LPAaXdisuQI1rqgoUOuoZ4byX7cCzn0x1g2+WPc0apZgjMEK04Q=="], + "@openai/codex-sdk": ["@openai/codex-sdk@0.120.0", "", { "dependencies": { "@openai/codex": "0.120.0" } }, "sha512-Y6y3EyLpSSJjRGqIFxxb1G9X6Hod+B1CnWzGoO7qrg3URPnjBL/DLLQWdZSENU5yIlRjHEQDSu7y1rKBlx+jUA=="], - "@openai/codex-win32-arm64": ["@openai/codex@0.116.0-win32-arm64", "", { "os": "win32", "cpu": "arm64" }, "sha512-kX2oAUzkgZX9OsYpd4omv9IGf+9VWj4Vy3UtIAnQKBu1DTSzmTJmXDuDn87mkyUciSZadm2QbeqQQzm2NC0NYw=="], + "@openai/codex-win32-arm64": ["@openai/codex@0.120.0-win32-arm64", "", { "os": "win32", "cpu": "arm64" }, "sha512-SAaTQU1XHa1qDnmQldmbyROIY5SiaspF+Cw3ziWeeTgyAET3rWusm4ELOElx6QiY1ugYW5ZD+7AFufS2z1xtpQ=="], - "@openai/codex-win32-x64": ["@openai/codex@0.116.0-win32-x64", "", { "os": "win32", "cpu": "x64" }, "sha512-6sBIMOoA9FNuxQvCCnK0P548Wqrlk3I9SMdtOCUg2zYzYU7jOF2mWS1VpRQ6R+Jvo2x50dxeJZ+W37dBmXfprw=="], + "@openai/codex-win32-x64": ["@openai/codex@0.120.0-win32-x64", "", { "os": "win32", "cpu": "x64" }, "sha512-zja1GNrbHyOUTvOy5FVMa+rAYIs3m+FOS8rAXftxMEhodMmkMw2O8zcvso657SHhZR0hIEiZ6T70lcyH2YX0mQ=="], "@oslojs/encoding": ["@oslojs/encoding@1.1.0", "", {}, "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ=="], diff --git a/deploy/.env.example b/deploy/.env.example index 9e2d5f521f..bd7bb89b51 100644 --- a/deploy/.env.example +++ b/deploy/.env.example @@ -10,8 +10,16 @@ # For cloud/production deployments, use a managed PostgreSQL (Supabase, Neon, etc.): DATABASE_URL=postgresql://user:password@host:5432/dbname -# Or uncomment postgres service in docker-compose.yml and use: +# Or copy deploy/docker-compose.override.example.yml to docker-compose.override.yml +# for a local PostgreSQL container plus a local build from Dockerfile, then use: # DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent +# Optional when using the override: +# POSTGRES_DB=remote_coding_agent +# POSTGRES_USER=postgres +# POSTGRES_PASSWORD=postgres +# POSTGRES_PORT=5432 +# ARCHON_DOCKERFILE=Dockerfile +# ARCHON_DOCKERFILE=Dockerfile.user # ============================================ # Required: AI Assistant (at least one) diff --git a/deploy/docker-compose.override.example.yml b/deploy/docker-compose.override.example.yml index 4fc35fdabf..f3f82363be 100644 --- a/deploy/docker-compose.override.example.yml +++ b/deploy/docker-compose.override.example.yml @@ -1,17 +1,52 @@ -# Docker Compose override for user-extended builds (server / deploy). +# Docker Compose override for local server builds (server / deploy). # Copy this file to docker-compose.override.yml — do NOT modify docker-compose.override.example.yml. # docker-compose.override.yml is gitignored so your customizations stay local. # # Docker Compose automatically merges docker-compose.override.yml with docker-compose.yml # when both files are present — no extra flags needed: -# docker compose up -d +# docker compose up -d --build # -# NOTE: The base docker-compose.yml uses `image:` (no build section). This override -# adds a full `build:` section so Compose builds from Dockerfile.user instead of -# pulling the pre-built image. +# NOTE: +# - The base docker-compose.yml uses `image:` (no build section). This override +# switches the app to a local build from the tracked Dockerfile instead of +# pulling the pre-built image. +# - To build from Dockerfile.user instead, set ARCHON_DOCKERFILE=Dockerfile.user +# in .env or edit the dockerfile path below. +# - This override also adds a local PostgreSQL service and wires DATABASE_URL to it. services: app: + image: archon-local build: context: . - dockerfile: Dockerfile.user + dockerfile: ${ARCHON_DOCKERFILE:-Dockerfile} + environment: + DATABASE_URL: postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@postgres:5432/${POSTGRES_DB:-remote_coding_agent} + depends_on: + postgres: + condition: service_healthy + + postgres: + image: postgres:17-alpine + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB:-remote_coding_agent} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + volumes: + - postgres_data:/var/lib/postgresql/data + - ./migrations/000_combined.sql:/docker-entrypoint-initdb.d/000_combined.sql:ro + - ./migrations:/migrations:ro + ports: + - "127.0.0.1:${POSTGRES_PORT:-5432}:5432" + healthcheck: + test: + - CMD-SHELL + - pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-remote_coding_agent} + interval: 10s + timeout: 5s + retries: 10 + start_period: 10s + +volumes: + postgres_data: diff --git a/docker-compose.yml b/docker-compose.yml index e1b4290e3c..6ba0d160d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,16 +3,15 @@ # ============================================================================= # # Usage: -# docker compose up -d # App with SQLite (default) -# docker compose --profile with-db up -d # App + local PostgreSQL -# docker compose --profile cloud up -d # App + Caddy HTTPS reverse proxy -# docker compose --profile with-db --profile cloud up -d # All three +# docker compose up -d # App + local PostgreSQL (default) +# docker compose --profile cloud up -d # App + local PostgreSQL + Caddy HTTPS reverse proxy +# docker compose --profile cloud --profile auth up -d # Add form-based auth # # Database: -# SQLite is the default (zero config). For PostgreSQL, either: -# - Use --profile with-db for a local container, and set in .env: -# DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent -# - Or point DATABASE_URL to an external database (Supabase, Neon, etc.) +# Local PostgreSQL is started by default. The app connects to the bundled +# postgres service unless DATABASE_URL is explicitly overridden in .env. +# Default internal URL: +# postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@postgres:5432/${POSTGRES_DB:-archon} # # Data: # Set ARCHON_DATA in .env to control where Archon stores data on the host: @@ -35,15 +34,20 @@ services: env_file: .env environment: ARCHON_DOCKER: "true" - ports: - - "${PORT:-3000}:${PORT:-3000}" + PORT: ${ARCHON_PORT:-3000} + DATABASE_URL: ${DATABASE_URL:-postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@postgres:5432/${POSTGRES_DB:-archon}} + DEFAULT_AI_ASSISTANT: ${DEFAULT_AI_ASSISTANT:-claude} + LOG_LEVEL: ${LOG_LEVEL:-info} + MAX_CONCURRENT_CONVERSATIONS: ${MAX_CONCURRENT_CONVERSATIONS:-10} + expose: + - "${ARCHON_PORT:-3000}" volumes: - ${ARCHON_DATA:-archon_data}:/.archon networks: - archon-network restart: unless-stopped healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:${PORT:-3000}/api/health"] + test: ["CMD", "curl", "-f", "http://localhost:${ARCHON_PORT:-3000}/api/health"] interval: 30s timeout: 10s retries: 3 @@ -53,28 +57,30 @@ services: - 8.8.4.4 sysctls: - net.ipv6.conf.all.disable_ipv6=1 + depends_on: + postgres: + condition: service_healthy # ------------------------------------------------------------------------- - # PostgreSQL (optional: --profile with-db) - # Set DATABASE_URL in .env to connect the app to this container. + # PostgreSQL (default) + # Set DATABASE_URL in .env only when you want the app to use an external DB. # ------------------------------------------------------------------------- postgres: - image: postgres:17-alpine - profiles: ["with-db"] + image: postgres:16-alpine environment: - POSTGRES_DB: remote_coding_agent - POSTGRES_USER: postgres + POSTGRES_DB: ${POSTGRES_DB:-archon} + POSTGRES_USER: ${POSTGRES_USER:-postgres} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} volumes: - postgres_data:/var/lib/postgresql/data - ./migrations/000_combined.sql:/docker-entrypoint-initdb.d/000_combined.sql:ro - ./migrations:/migrations:ro - ports: - - "127.0.0.1:${POSTGRES_PORT:-5432}:5432" + expose: + - "5432" networks: - archon-network healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-archon}"] interval: 5s timeout: 5s retries: 5 diff --git a/migrations/000_combined.sql b/migrations/000_combined.sql index 176963b40e..2bf528ffe7 100644 --- a/migrations/000_combined.sql +++ b/migrations/000_combined.sql @@ -1,5 +1,5 @@ -- Remote Coding Agent - Combined Schema --- Version: Combined (final state after migrations 001-020) +-- Version: Combined (final state after migrations 001-022) -- Description: Complete database schema (idempotent - safe to run multiple times) -- -- 8 Tables: @@ -11,6 +11,7 @@ -- 5. remote_agent_workflow_runs -- 6. remote_agent_workflow_events -- 7. remote_agent_messages +-- 8. remote_agent_webhook_rules -- -- Dropped tables (via migrations): -- - remote_agent_command_templates (017) @@ -252,6 +253,29 @@ CREATE TABLE IF NOT EXISTS remote_agent_messages ( CREATE INDEX IF NOT EXISTS idx_messages_conversation_id ON remote_agent_messages(conversation_id, created_at ASC); +-- ============================================================================ +-- Table 8: Webhook Rules +-- ============================================================================ + +CREATE TABLE IF NOT EXISTS remote_agent_webhook_rules ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + codebase_id UUID NOT NULL REFERENCES remote_agent_codebases(id) ON DELETE CASCADE, + path_slug TEXT NOT NULL, + workflow_name TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_webhook_rules_codebase + ON remote_agent_webhook_rules(codebase_id); + +CREATE INDEX IF NOT EXISTS idx_webhook_rules_path_slug + ON remote_agent_webhook_rules(path_slug); + +CREATE UNIQUE INDEX IF NOT EXISTS idx_webhook_rules_path_slug_unique + ON remote_agent_webhook_rules(path_slug); + -- ============================================================================ -- Cleanup: Drop legacy objects from older schemas -- ============================================================================ diff --git a/migrations/022_webhook_rules.sql b/migrations/022_webhook_rules.sql new file mode 100644 index 0000000000..f78cd31d7f --- /dev/null +++ b/migrations/022_webhook_rules.sql @@ -0,0 +1,37 @@ +CREATE TABLE IF NOT EXISTS remote_agent_webhook_rules ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + codebase_id UUID NOT NULL REFERENCES remote_agent_codebases(id) ON DELETE CASCADE, + path_slug TEXT, + workflow_name TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +ALTER TABLE remote_agent_webhook_rules + ADD COLUMN IF NOT EXISTS path_slug TEXT; + +UPDATE remote_agent_webhook_rules + SET path_slug = CONCAT('legacy-', SUBSTRING(MD5(id::text) FROM 1 FOR 12)) + WHERE path_slug IS NULL OR BTRIM(path_slug) = ''; + +ALTER TABLE remote_agent_webhook_rules + ALTER COLUMN path_slug SET NOT NULL; + +ALTER TABLE remote_agent_webhook_rules + DROP COLUMN IF EXISTS provider; + +ALTER TABLE remote_agent_webhook_rules + DROP COLUMN IF EXISTS event_type; + +CREATE INDEX IF NOT EXISTS idx_webhook_rules_codebase + ON remote_agent_webhook_rules(codebase_id); + +CREATE INDEX IF NOT EXISTS idx_webhook_rules_path_slug + ON remote_agent_webhook_rules(path_slug); + +DROP INDEX IF EXISTS idx_webhook_rules_provider_event; +DROP INDEX IF EXISTS idx_webhook_rules_active_unique; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_webhook_rules_path_slug_unique + ON remote_agent_webhook_rules(path_slug); diff --git a/packages/adapters/src/community/forge/gitea/adapter.ts b/packages/adapters/src/community/forge/gitea/adapter.ts index 4e68be8f32..21b44925a4 100644 --- a/packages/adapters/src/community/forge/gitea/adapter.ts +++ b/packages/adapters/src/community/forge/gitea/adapter.ts @@ -350,6 +350,19 @@ export class GiteaAdapter implements IPlatformAdapter { }; } + // Detect newly opened PRs for rule-based processing + if (event.pull_request && event.action === 'opened') { + return { + owner, + repo, + number: event.pull_request.number, + comment: event.pull_request.body ?? '', + eventType: 'pull_request', + isPR: true, + pullRequest: event.pull_request, + }; + } + // issue_comment (covers both issues and PRs in Gitea) if (event.comment) { const number = event.issue?.number ?? event.pull_request?.number; @@ -774,139 +787,119 @@ Use 'tea pr view ${String(pr.number)}' for full details if needed.`; return; } - // 5. Check @mention - if (!this.hasMention(comment)) return; + const isMentionComment = eventType === 'issue_comment' && this.hasMention(comment); + if (!isMentionComment) return; - getLog().info({ eventType, owner, repo, number, isPR }, 'webhook_processing'); + getLog().info({ eventType, owner, repo, number, isPR, isMentionComment }, 'webhook_processing'); - // 6. Build conversationId const conversationId = this.buildConversationId(owner, repo, number, isPR); - // 7. Check if new conversation - const existingConv = await db.getOrCreateConversation('gitea', conversationId); - const isNewConversation = !existingConv.codebase_id; + if (isMentionComment) { + const existingConv = await db.getOrCreateConversation('gitea', conversationId); + const isNewConversation = !existingConv.codebase_id; + const { + codebase, + repoPath, + isNew: isNewCodebase, + } = await this.getOrCreateCodebaseForRepo(owner, repo); - // 8. Get/create codebase (checks for existing first!) - const { - codebase, - repoPath, - isNew: isNewCodebase, - } = await this.getOrCreateCodebaseForRepo(owner, repo); - - // 8b. Link conversation to codebase - if (isNewConversation) { - try { - await db.updateConversation(existingConv.id, { - codebase_id: codebase.id, - cwd: repoPath, - }); - } catch (updateError) { - if (updateError instanceof ConversationNotFoundError) { - getLog().error( - { conversationId: existingConv.id, codebaseId: codebase.id }, - 'conversation_codebase_link_failed' - ); - // Re-throw as this is a critical setup step - throw new Error('Failed to set up Gitea conversation - please try again'); + if (isNewConversation) { + try { + await db.updateConversation(existingConv.id, { + codebase_id: codebase.id, + cwd: repoPath, + }); + } catch (updateError) { + if (updateError instanceof ConversationNotFoundError) { + getLog().error( + { conversationId: existingConv.id, codebaseId: codebase.id }, + 'conversation_codebase_link_failed' + ); + throw new Error('Failed to set up Gitea conversation - please try again'); + } + throw updateError; } - throw updateError; } - } - - // 9. Get default branch from repository info - const defaultBranch = event.repository.default_branch; - - // 10. Ensure repo ready (clone if needed, sync if new conversation) - await this.ensureRepoReady(owner, repo, defaultBranch, repoPath, isNewCodebase); - // 11. Auto-load commands if new codebase - if (isNewCodebase) { - await this.autoDetectAndLoadCommands(repoPath, codebase.id); - } - - // 12. Gather isolation hints for orchestrator - const isolationHints: IsolationHints = { - workflowType: isPR ? 'pr' : 'issue', - workflowId: String(number), - }; + const defaultBranch = event.repository.default_branch; + await this.ensureRepoReady(owner, repo, defaultBranch, repoPath, isNewCodebase); + if (isNewCodebase) { + await this.autoDetectAndLoadCommands(repoPath, codebase.id); + } - // For PRs: get branch info from the event payload - if (isPR && pullRequest?.head) { - isolationHints.prBranch = toBranchName(pullRequest.head.ref); - isolationHints.prSha = pullRequest.head.sha; + const isolationHints: IsolationHints = { + workflowType: isPR ? 'pr' : 'issue', + workflowId: String(number), + }; - // Detect if PR is from a fork - const headRepoFullName = pullRequest.head.repo?.full_name; - const baseRepoFullName = pullRequest.base?.repo?.full_name; - isolationHints.isForkPR = headRepoFullName !== baseRepoFullName; + if (isPR && pullRequest?.head) { + isolationHints.prBranch = toBranchName(pullRequest.head.ref); + isolationHints.prSha = pullRequest.head.sha; + const headRepoFullName = pullRequest.head.repo?.full_name; + const baseRepoFullName = pullRequest.base?.repo?.full_name; + isolationHints.isForkPR = headRepoFullName !== baseRepoFullName; - getLog().info( - { - prNumber: number, - headRef: pullRequest.head.ref, - headSha: pullRequest.head.sha?.substring(0, 7), - isFork: isolationHints.isForkPR, - }, - 'pr_head_info' - ); - } - - // 13. Build message with context - const strippedComment = this.stripMention(comment); - let finalMessage = strippedComment; - let contextToAppend: string | undefined; + getLog().info( + { + prNumber: number, + headRef: pullRequest.head.ref, + headSha: pullRequest.head.sha?.substring(0, 7), + isFork: isolationHints.isForkPR, + }, + 'pr_head_info' + ); + } - // IMPORTANT: Slash commands must be processed deterministically (not by AI) - const isSlashCommand = strippedComment.trim().startsWith('/'); + const strippedComment = this.stripMention(comment); + let finalMessage = strippedComment; + let contextToAppend: string | undefined; + const isSlashCommand = strippedComment.trim().startsWith('/'); - if (isSlashCommand) { - // For slash commands, use only the first line - finalMessage = strippedComment.split('\n')[0].trim(); - getLog().debug({ command: finalMessage }, 'slash_command_processing'); + if (isSlashCommand) { + finalMessage = strippedComment.split('\n')[0].trim(); + getLog().debug({ command: finalMessage }, 'slash_command_processing'); - // Add issue/PR reference context - if (isPR && pullRequest) { - contextToAppend = `Gitea Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'tea pr view ${String(pullRequest.number)}' for full details if needed.`; - } else if (issue) { - contextToAppend = `Gitea Issue #${String(issue.number)}: "${issue.title}"\nUse 'tea issue view ${String(issue.number)}' for full details if needed.`; - } - } else { - // For non-command messages, add rich context - if (isPR && pullRequest) { + if (isPR && pullRequest) { + contextToAppend = `Gitea Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'tea pr view ${String(pullRequest.number)}' for full details if needed.`; + } else if (issue) { + contextToAppend = `Gitea Issue #${String(issue.number)}: "${issue.title}"\nUse 'tea issue view ${String(issue.number)}' for full details if needed.`; + } + } else if (isPR && pullRequest) { finalMessage = this.buildPRContext(pullRequest, strippedComment); contextToAppend = `Gitea Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'tea pr view ${String(pullRequest.number)}' for full details if needed.`; } else if (issue) { finalMessage = this.buildIssueContext(issue, strippedComment); contextToAppend = `Gitea Issue #${String(issue.number)}: "${issue.title}"\nUse 'tea issue view ${String(issue.number)}' for full details if needed.`; } - } - // 14. Fetch comment history for thread context - const commentHistory = await this.fetchCommentHistory(owner, repo, number); - const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; - getLog().debug( - { commentCount: threadContext ? commentHistory.length : 0, conversationId }, - 'thread_context_loaded' - ); + const commentHistory = await this.fetchCommentHistory(owner, repo, number); + const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; + getLog().debug( + { commentCount: threadContext ? commentHistory.length : 0, conversationId }, + 'thread_context_loaded' + ); - // 15. Route to orchestrator with isolation hints (with lock for concurrency control) - await this.lockManager.acquireLock(conversationId, async () => { - try { - await handleMessage(this, conversationId, finalMessage, { - issueContext: contextToAppend, - threadContext, - isolationHints, - }); - } catch (error) { - const err = toError(error); - getLog().error({ err, conversationId }, 'message_handling_error'); + await this.lockManager.acquireLock(conversationId, async () => { try { - const userMessage = classifyAndFormatError(err); - await this.sendMessage(conversationId, userMessage); - } catch (sendError) { - getLog().error({ err: toError(sendError), conversationId }, 'error_message_send_failed'); + await handleMessage(this, conversationId, finalMessage, { + issueContext: contextToAppend, + threadContext, + isolationHints, + }); + } catch (error) { + const err = toError(error); + getLog().error({ err, conversationId }, 'message_handling_error'); + try { + await this.sendMessage(conversationId, classifyAndFormatError(err)); + } catch (sendError) { + getLog().error( + { err: toError(sendError), conversationId }, + 'error_message_send_failed' + ); + } } - } - }); + }); + return; + } } } diff --git a/packages/adapters/src/community/forge/gitlab/adapter.ts b/packages/adapters/src/community/forge/gitlab/adapter.ts index 1a96a315d8..55a9f8c232 100644 --- a/packages/adapters/src/community/forge/gitlab/adapter.ts +++ b/packages/adapters/src/community/forge/gitlab/adapter.ts @@ -307,6 +307,16 @@ export class GitLabAdapter implements IPlatformAdapter { // MR closed or merged if (event.object_kind === 'merge_request') { const action = event.object_attributes.action; + if (action === 'open') { + return { + projectPath, + iid: event.object_attributes.iid, + comment: event.object_attributes.description ?? '', + eventType: 'merge_request', + isMR: true, + mergeRequest: event.object_attributes, + }; + } if (action === 'close' || action === 'merge') { return { projectPath, @@ -658,128 +668,121 @@ Use 'glab mr view ${String(mr.iid)}' for full details and 'glab mr diff ${String return; } - // 6. Check @mention - if (!this.hasMention(comment)) return; + const isMentionComment = eventType === 'note' && this.hasMention(comment); + if (!isMentionComment) return; - getLog().info({ eventType, projectPath, iid, isMR }, 'gitlab.webhook_processing'); + getLog().info( + { eventType, projectPath, iid, isMR, isMentionComment }, + 'gitlab.webhook_processing' + ); // Steps 7-13 wrapped in try-catch so user gets error feedback on setup failures try { - // 7. Conversation + codebase setup const conversationId = this.buildConversationId(projectPath, iid, isMR); - const existingConv = await db.getOrCreateConversation('gitlab', conversationId); - const isNewConversation = !existingConv.codebase_id; - - const { - codebase, - repoPath, - isNew: isNewCodebase, - } = await this.getOrCreateCodebaseForRepo(projectPath); - - if (isNewConversation) { - try { - await db.updateConversation(existingConv.id, { - codebase_id: codebase.id, - cwd: repoPath, - }); - } catch (updateError) { - if (updateError instanceof ConversationNotFoundError) { - getLog().error( - { conversationId: existingConv.id, codebaseId: codebase.id }, - 'gitlab.conversation_codebase_link_failed' - ); - throw new Error('Failed to set up GitLab conversation - please try again'); + if (isMentionComment) { + const existingConv = await db.getOrCreateConversation('gitlab', conversationId); + const isNewConversation = !existingConv.codebase_id; + const { + codebase, + repoPath, + isNew: isNewCodebase, + } = await this.getOrCreateCodebaseForRepo(projectPath); + + if (isNewConversation) { + try { + await db.updateConversation(existingConv.id, { + codebase_id: codebase.id, + cwd: repoPath, + }); + } catch (updateError) { + if (updateError instanceof ConversationNotFoundError) { + getLog().error( + { conversationId: existingConv.id, codebaseId: codebase.id }, + 'gitlab.conversation_codebase_link_failed' + ); + throw new Error('Failed to set up GitLab conversation - please try again'); + } + throw updateError; } - throw updateError; } - } - - // 8. Get default branch - const defaultBranch = event.project.default_branch; - - // 9. Ensure repo ready - await this.ensureRepoReady(projectPath, defaultBranch, repoPath, isNewCodebase); - - // 10. Auto-load commands - if (isNewCodebase) { - await this.autoDetectAndLoadCommands(repoPath, codebase.id); - } - // 11. Isolation hints - const isolationHints: IsolationHints = { - workflowType: isMR ? 'pr' : 'issue', - workflowId: String(iid), - }; - - if (isMR && mergeRequest) { - isolationHints.prBranch = toBranchName(mergeRequest.source_branch); - isolationHints.isForkPR = mergeRequest.source_project_id !== mergeRequest.target_project_id; + const defaultBranch = event.project.default_branch; + await this.ensureRepoReady(projectPath, defaultBranch, repoPath, isNewCodebase); + if (isNewCodebase) { + await this.autoDetectAndLoadCommands(repoPath, codebase.id); + } - getLog().info( - { - mrIid: iid, - sourceBranch: mergeRequest.source_branch, - isFork: isolationHints.isForkPR, - }, - 'gitlab.mr_head_info' - ); - } + const isolationHints: IsolationHints = { + workflowType: isMR ? 'pr' : 'issue', + workflowId: String(iid), + }; - // 12. Build message with context - const strippedComment = this.stripMention(comment); - let finalMessage = strippedComment; - let contextToAppend: string | undefined; + if (isMR && mergeRequest) { + isolationHints.prBranch = toBranchName(mergeRequest.source_branch); + isolationHints.isForkPR = + mergeRequest.source_project_id !== mergeRequest.target_project_id; + + getLog().info( + { + mrIid: iid, + sourceBranch: mergeRequest.source_branch, + isFork: isolationHints.isForkPR, + }, + 'gitlab.mr_head_info' + ); + } - const isSlashCommand = strippedComment.trim().startsWith('/'); + const strippedComment = this.stripMention(comment); + let finalMessage = strippedComment; + let contextToAppend: string | undefined; + const isSlashCommand = strippedComment.trim().startsWith('/'); - if (isSlashCommand) { - finalMessage = strippedComment.split('\n')[0].trim(); - getLog().debug({ command: finalMessage }, 'gitlab.slash_command_processing'); + if (isSlashCommand) { + finalMessage = strippedComment.split('\n')[0].trim(); + getLog().debug({ command: finalMessage }, 'gitlab.slash_command_processing'); - if (isMR && mergeRequest) { - contextToAppend = `GitLab Merge Request !${String(mergeRequest.iid)}: "${mergeRequest.title}"\nUse 'glab mr view ${String(mergeRequest.iid)}' for full details if needed.`; - } else if (issue) { - contextToAppend = `GitLab Issue #${String(issue.iid)}: "${issue.title}"\nUse 'glab issue view ${String(issue.iid)}' for full details if needed.`; - } - } else { - if (isMR && mergeRequest) { + if (isMR && mergeRequest) { + contextToAppend = `GitLab Merge Request !${String(mergeRequest.iid)}: "${mergeRequest.title}"\nUse 'glab mr view ${String(mergeRequest.iid)}' for full details if needed.`; + } else if (issue) { + contextToAppend = `GitLab Issue #${String(issue.iid)}: "${issue.title}"\nUse 'glab issue view ${String(issue.iid)}' for full details if needed.`; + } + } else if (isMR && mergeRequest) { finalMessage = this.buildMRContext(mergeRequest, strippedComment); contextToAppend = `GitLab Merge Request !${String(mergeRequest.iid)}: "${mergeRequest.title}"\nUse 'glab mr view ${String(mergeRequest.iid)}' for full details if needed.`; } else if (issue) { finalMessage = this.buildIssueContext(issue, strippedComment); contextToAppend = `GitLab Issue #${String(issue.iid)}: "${issue.title}"\nUse 'glab issue view ${String(issue.iid)}' for full details if needed.`; } - } - // 13. Thread context + dispatch - const commentHistory = await this.fetchCommentHistory(projectPath, iid, isMR); - const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; - getLog().debug( - { commentCount: threadContext ? commentHistory.length : 0, conversationId }, - 'gitlab.thread_context_loaded' - ); + const commentHistory = await this.fetchCommentHistory(projectPath, iid, isMR); + const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; + getLog().debug( + { commentCount: threadContext ? commentHistory.length : 0, conversationId }, + 'gitlab.thread_context_loaded' + ); - await this.lockManager.acquireLock(conversationId, async () => { - try { - await handleMessage(this, conversationId, finalMessage, { - issueContext: contextToAppend, - threadContext, - isolationHints, - }); - } catch (error) { - const err = toError(error); - getLog().error({ err, conversationId }, 'gitlab.message_handling_error'); + await this.lockManager.acquireLock(conversationId, async () => { try { - const userMessage = classifyAndFormatError(err); - await this.sendMessage(conversationId, userMessage); - } catch (sendError) { - getLog().error( - { err: toError(sendError), conversationId }, - 'gitlab.error_message_send_failed' - ); + await handleMessage(this, conversationId, finalMessage, { + issueContext: contextToAppend, + threadContext, + isolationHints, + }); + } catch (error) { + const err = toError(error); + getLog().error({ err, conversationId }, 'gitlab.message_handling_error'); + try { + await this.sendMessage(conversationId, classifyAndFormatError(err)); + } catch (sendError) { + getLog().error( + { err: toError(sendError), conversationId }, + 'gitlab.error_message_send_failed' + ); + } } - } - }); + }); + return; + } } catch (error) { const err = toError(error); const conversationId = this.buildConversationId(projectPath, iid, isMR); diff --git a/packages/adapters/src/forge/github/adapter.ts b/packages/adapters/src/forge/github/adapter.ts index 91ea9cc52a..37060241c9 100644 --- a/packages/adapters/src/forge/github/adapter.ts +++ b/packages/adapters/src/forge/github/adapter.ts @@ -301,9 +301,10 @@ export class GitHubAdapter implements IPlatformAdapter { * Handles: * - issues.closed / pull_request.closed → cleanup (isCloseEvent: true) * - issue_comment.created → bot @mention detection + * - pull_request.opened → initial PR review processing * * Does NOT handle: - * - issues.opened / pull_request.opened → returns null (see #96) + * - issues.opened → returns null (see #96) */ private parseEvent(event: WebhookEvent): { owner: string; @@ -346,6 +347,18 @@ export class GitHubAdapter implements IPlatformAdapter { }; } + // Detect newly opened PRs for automatic review processing. + if (event.pull_request && event.action === 'opened') { + return { + owner, + repo, + number: event.pull_request.number, + comment: event.pull_request.body ?? '', + eventType: 'pull_request', + pullRequest: event.pull_request, + }; + } + // issue_comment (covers both issues and PRs) if (event.comment) { const number = event.issue?.number ?? event.pull_request?.number; @@ -361,11 +374,10 @@ export class GitHubAdapter implements IPlatformAdapter { }; } - // Note: We intentionally do NOT handle issues.opened or pull_request.opened - // events here. Issue/PR descriptions often contain example commands or - // documentation about how to use the bot - these are NOT command invocations. - // Only actual comments (issue_comment events) trigger bot responses. - // See issue #96 for details. + // Note: We intentionally do NOT handle issues.opened events here. Issue + // descriptions often contain example commands or documentation about how to + // use the bot - these are NOT command invocations. PR descriptions are + // carried as context when the PR is opened for review processing. return null; } @@ -573,39 +585,15 @@ export class GitHubAdapter implements IPlatformAdapter { repoPath: string; isNew: boolean; }> { - // Try both with and without .git suffix to match existing clones - const repoUrlNoGit = `https://github.com/${owner}/${repo}`; - const repoUrlWithGit = `${repoUrlNoGit}.git`; - - let existing = await codebaseDb.findCodebaseByRepoUrl(repoUrlNoGit); - existing ??= await codebaseDb.findCodebaseByRepoUrl(repoUrlWithGit); - - // Canonical path includes owner to prevent collisions between repos with same name - // e.g., alice/utils and bob/utils get separate directories - const canonicalPath = join(getArchonWorkspacesPath(), owner, repo); - + const existing = await this.findExistingCodebaseForRepo(owner, repo); if (existing) { - // Check if existing codebase points to a worktree path - fix it if so - // Either it's an actual worktree, or it looks like one (contains /worktrees/ in path) - const looksLikeWorktreePath = existing.default_cwd.includes('/worktrees/'); - if (looksLikeWorktreePath || (await isWorktreePath(existing.default_cwd))) { - getLog().info( - { codebaseName: existing.name, canonicalPath }, - 'github.stale_worktree_path_fixed' - ); - await codebaseDb.updateCodebase(existing.id, { default_cwd: canonicalPath }); - existing.default_cwd = canonicalPath; - } - - getLog().info( - { codebaseName: existing.name, path: existing.default_cwd }, - 'github.existing_codebase_found' - ); - return { codebase: existing, repoPath: existing.default_cwd, isNew: false }; + return { ...existing, isNew: false }; } // Include owner in name to distinguish repos with same name from different owners // resolve() converts relative paths to absolute (cross-platform) + const repoUrlNoGit = `https://github.com/${owner}/${repo}`; + const canonicalPath = join(getArchonWorkspacesPath(), owner, repo); const codebase = await codebaseDb.createCodebase({ name: `${owner}/${repo}`, repository_url: repoUrlNoGit, // Store without .git for consistency @@ -616,6 +604,38 @@ export class GitHubAdapter implements IPlatformAdapter { return { codebase, repoPath: canonicalPath, isNew: true }; } + private async findExistingCodebaseForRepo( + owner: string, + repo: string + ): Promise<{ + codebase: { id: string; name: string; default_cwd: string }; + repoPath: string; + } | null> { + const repoUrlNoGit = `https://github.com/${owner}/${repo}`; + const repoUrlWithGit = `${repoUrlNoGit}.git`; + + let existing = await codebaseDb.findCodebaseByRepoUrl(repoUrlNoGit); + existing ??= await codebaseDb.findCodebaseByRepoUrl(repoUrlWithGit); + if (!existing) return null; + + const canonicalPath = join(getArchonWorkspacesPath(), owner, repo); + const looksLikeWorktreePath = existing.default_cwd.includes('/worktrees/'); + if (looksLikeWorktreePath || (await isWorktreePath(existing.default_cwd))) { + getLog().info( + { codebaseName: existing.name, canonicalPath }, + 'github.stale_worktree_path_fixed' + ); + await codebaseDb.updateCodebase(existing.id, { default_cwd: canonicalPath }); + existing.default_cwd = canonicalPath; + } + + getLog().info( + { codebaseName: existing.name, path: existing.default_cwd }, + 'github.existing_codebase_found' + ); + return { codebase: existing, repoPath: existing.default_cwd }; + } + /** * Clean up worktree when an issue/PR is closed * Delegates to cleanup service for unified handling @@ -741,212 +761,168 @@ ${userComment}`; return; } - // 5. Check @mention - if (!this.hasMention(comment)) return; + const isMentionComment = eventType === 'issue_comment' && this.hasMention(comment); + if (!isMentionComment) return; - getLog().info({ eventType, owner, repo, number }, 'github.webhook_processing'); + getLog().info( + { eventType, owner, repo, number, isMentionComment }, + 'github.webhook_processing' + ); - // 4. Build conversationId const conversationId = this.buildConversationId(owner, repo, number); - // 5. Check if new conversation - const existingConv = await db.getOrCreateConversation('github', conversationId); - const isNewConversation = !existingConv.codebase_id; + if (isMentionComment) { + const existingConv = await db.getOrCreateConversation('github', conversationId); + const isNewConversation = !existingConv.codebase_id; + const { + codebase, + repoPath, + isNew: isNewCodebase, + } = await this.getOrCreateCodebaseForRepo(owner, repo); - // 6. Get/create codebase (checks for existing first!) - const { - codebase, - repoPath, - isNew: isNewCodebase, - } = await this.getOrCreateCodebaseForRepo(owner, repo); + if (isNewConversation) { + try { + await db.updateConversation(existingConv.id, { + codebase_id: codebase.id, + cwd: repoPath, + }); + } catch (updateError) { + if (updateError instanceof ConversationNotFoundError) { + getLog().error( + { conversationId: existingConv.id, codebaseId: codebase.id }, + 'github.conversation_codebase_link_failed' + ); + throw new Error('Failed to set up GitHub conversation - please try again'); + } + throw updateError; + } + } - // 6b. Link conversation to codebase (fixes #97) - if (isNewConversation) { + let defaultBranch: string; try { - await db.updateConversation(existingConv.id, { - codebase_id: codebase.id, - cwd: repoPath, - }); - } catch (updateError) { - if (updateError instanceof ConversationNotFoundError) { + const { data: repoData } = await this.octokit.rest.repos.get({ owner, repo }); + defaultBranch = repoData.default_branch; + } catch (error) { + const err = toError(error); + getLog().error({ err, owner, repo, conversationId }, 'github.repo_metadata_fetch_failed'); + try { + await this.sendMessage(conversationId, classifyAndFormatError(err)); + } catch (sendError) { getLog().error( - { conversationId: existingConv.id, codebaseId: codebase.id }, - 'github.conversation_codebase_link_failed' + { err: toError(sendError), conversationId }, + 'github.error_message_send_failed' ); - // Re-throw as this is a critical setup step - throw new Error('Failed to set up GitHub conversation - please try again'); } - throw updateError; + return; } - } - // 7. Get default branch - let defaultBranch: string; - try { - const { data: repoData } = await this.octokit.rest.repos.get({ owner, repo }); - defaultBranch = repoData.default_branch; - } catch (error) { - const err = toError(error); - getLog().error({ err, owner, repo, conversationId }, 'github.repo_metadata_fetch_failed'); - try { - const userMessage = classifyAndFormatError(err); - await this.sendMessage(conversationId, userMessage); - } catch (sendError) { - getLog().error( - { err: toError(sendError), conversationId }, - 'github.error_message_send_failed' - ); + await this.ensureRepoReady(owner, repo, defaultBranch, repoPath, isNewCodebase); + if (isNewCodebase) { + await this.autoDetectAndLoadCommands(repoPath, codebase.id); } - return; - } - // 8. Ensure repo ready (clone if needed, sync if new conversation) - await this.ensureRepoReady(owner, repo, defaultBranch, repoPath, isNewCodebase); - - // 9. Auto-load commands if new codebase (defaults loaded at runtime, not copied) - if (isNewCodebase) { - await this.autoDetectAndLoadCommands(repoPath, codebase.id); - } - - // 10. Gather isolation hints for orchestrator - // The orchestrator now handles all isolation decisions - const isPR = eventType === 'pull_request' || !!pullRequest || !!issue?.pull_request; - - // Build isolation hints for orchestrator - const isolationHints: IsolationHints = { - workflowType: isPR ? 'pr' : 'issue', - workflowId: String(number), - }; - - // For PRs: get linked issues and branch info - if (isPR) { - // Get linked issues for worktree sharing - const linkedIssues = await getLinkedIssueNumbers(owner, repo, number); - if (linkedIssues.length > 0) { - isolationHints.linkedIssues = linkedIssues; - getLog().info({ prNumber: number, linkedIssues }, 'github.pr_linked_issues'); - } + const isPR = !!pullRequest || !!issue?.pull_request; + const isolationHints: IsolationHints = { + workflowType: isPR ? 'pr' : 'issue', + workflowId: String(number), + }; - // Fetch PR head branch, SHA, and fork status for isolation - try { - const { data: prData } = await this.octokit.rest.pulls.get({ - owner, - repo, - pull_number: number, - }); - isolationHints.prBranch = toBranchName(prData.head.ref); - isolationHints.prSha = prData.head.sha; - - // Detect if PR is from a fork (different repo than base) - // For fork PRs: head.repo is different from base.repo - // For same-repo PRs: head.repo.full_name === base.repo.full_name - // Note: head.repo can be null if the fork was deleted after PR creation - // In that case, we treat it as a fork (can't push to deleted repo anyway) - const headRepoFullName = prData.head.repo?.full_name; - const baseRepoFullName = prData.base.repo.full_name; - isolationHints.isForkPR = headRepoFullName !== baseRepoFullName; - - getLog().info( - { - prNumber: number, - headRef: prData.head.ref, - headSha: prData.head.sha.substring(0, 7), - isFork: isolationHints.isForkPR, - }, - 'github.pr_head_info' - ); - } catch (error) { - const err = error as Error; - // Log at appropriate level based on error type - const isNonTransient = - err.message.includes('rate limit') || - err.message.includes('403') || - err.message.includes('401') || - err.message.includes('Bad credentials'); - - const logData = { err, owner, repo, prNumber: number }; - if (isNonTransient) { - getLog().error(logData, 'github.pr_head_fetch_failed'); - } else { - getLog().warn(logData, 'github.pr_head_fetch_failed'); + if (isPR) { + const linkedIssues = await getLinkedIssueNumbers(owner, repo, number); + if (linkedIssues.length > 0) { + isolationHints.linkedIssues = linkedIssues; + getLog().info({ prNumber: number, linkedIssues }, 'github.pr_linked_issues'); } - // Mark degraded mode - worktree isolation will use fallback naming - isolationHints.prFetchFailed = true; + try { + const { data: prData } = await this.octokit.rest.pulls.get({ + owner, + repo, + pull_number: number, + }); + isolationHints.prBranch = toBranchName(prData.head.ref); + isolationHints.prSha = prData.head.sha; + const headRepoFullName = prData.head.repo?.full_name; + const baseRepoFullName = prData.base.repo.full_name; + isolationHints.isForkPR = headRepoFullName !== baseRepoFullName; + + getLog().info( + { + prNumber: number, + headRef: prData.head.ref, + headSha: prData.head.sha.substring(0, 7), + isFork: isolationHints.isForkPR, + }, + 'github.pr_head_info' + ); + } catch (error) { + const err = error as Error; + const isNonTransient = + err.message.includes('rate limit') || + err.message.includes('403') || + err.message.includes('401') || + err.message.includes('Bad credentials'); + + if (isNonTransient) { + getLog().error({ err, owner, repo, prNumber: number }, 'github.pr_head_fetch_failed'); + } else { + getLog().warn({ err, owner, repo, prNumber: number }, 'github.pr_head_fetch_failed'); + } + + isolationHints.prFetchFailed = true; + } } - } - - // 11. Build message with context - const strippedComment = this.stripMention(comment); - let finalMessage = strippedComment; - let contextToAppend: string | undefined; - // IMPORTANT: Slash commands must be processed deterministically (not by AI) - const isSlashCommand = strippedComment.trim().startsWith('/'); + const strippedComment = this.stripMention(comment); + let finalMessage = strippedComment; + let contextToAppend: string | undefined; + const isSlashCommand = strippedComment.trim().startsWith('/'); - if (isSlashCommand) { - // For slash commands, use only the first line - finalMessage = strippedComment.split('\n')[0].trim(); - getLog().debug({ command: finalMessage }, 'github.slash_command_processing'); + if (isSlashCommand) { + finalMessage = strippedComment.split('\n')[0].trim(); + getLog().debug({ command: finalMessage }, 'github.slash_command_processing'); - // Add issue/PR reference context - if (eventType === 'issue' && issue) { - contextToAppend = `GitHub Issue #${String(issue.number)}: "${issue.title}"\nUse 'gh issue view ${String(issue.number)}' for full details if needed.`; - } else if (eventType === 'pull_request' && pullRequest) { - contextToAppend = `GitHub Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'gh pr view ${String(pullRequest.number)}' for full details if needed.`; - } else if (eventType === 'issue_comment') { if (pullRequest) { contextToAppend = `GitHub Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'gh pr view ${String(pullRequest.number)}' for full details if needed.`; } else if (issue) { contextToAppend = `GitHub Issue #${String(issue.number)}: "${issue.title}"\nUse 'gh issue view ${String(issue.number)}' for full details if needed.`; } - } - } else { - // For non-command messages, add rich context and issue/PR reference for workflows - if (eventType === 'issue' && issue) { - finalMessage = this.buildIssueContext(issue, strippedComment); - contextToAppend = `GitHub Issue #${String(issue.number)}: "${issue.title}"\nUse 'gh issue view ${String(issue.number)}' for full details if needed.`; - } else if (eventType === 'issue_comment' && issue) { - finalMessage = this.buildIssueContext(issue, strippedComment); - contextToAppend = `GitHub Issue #${String(issue.number)}: "${issue.title}"\nUse 'gh issue view ${String(issue.number)}' for full details if needed.`; - } else if (eventType === 'pull_request' && pullRequest) { - finalMessage = this.buildPRContext(pullRequest, strippedComment); - contextToAppend = `GitHub Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'gh pr view ${String(pullRequest.number)}' for full details if needed.`; - } else if (eventType === 'issue_comment' && pullRequest) { + } else if (pullRequest) { finalMessage = this.buildPRContext(pullRequest, strippedComment); contextToAppend = `GitHub Pull Request #${String(pullRequest.number)}: "${pullRequest.title}"\nUse 'gh pr view ${String(pullRequest.number)}' for full details if needed.`; + } else if (issue) { + finalMessage = this.buildIssueContext(issue, strippedComment); + contextToAppend = `GitHub Issue #${String(issue.number)}: "${issue.title}"\nUse 'gh issue view ${String(issue.number)}' for full details if needed.`; } - } - // 12. Fetch comment history for thread context - const commentHistory = await this.fetchCommentHistory(owner, repo, number); - const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; - getLog().debug( - { commentCount: threadContext ? commentHistory.length : 0, conversationId }, - 'github.thread_context_loaded' - ); + const commentHistory = await this.fetchCommentHistory(owner, repo, number); + const threadContext = commentHistory.length > 0 ? commentHistory.join('\n') : undefined; + getLog().debug( + { commentCount: threadContext ? commentHistory.length : 0, conversationId }, + 'github.thread_context_loaded' + ); - // 13. Route to orchestrator with isolation hints (with lock for concurrency control) - await this.lockManager.acquireLock(conversationId, async () => { - try { - await handleMessage(this, conversationId, finalMessage, { - issueContext: contextToAppend, - threadContext, - isolationHints, - }); - } catch (error) { - const err = toError(error); - getLog().error({ err, conversationId }, 'github.message_handling_error'); + await this.lockManager.acquireLock(conversationId, async () => { try { - const userMessage = classifyAndFormatError(err); - await this.sendMessage(conversationId, userMessage); - } catch (sendError) { - getLog().error( - { err: toError(sendError), conversationId }, - 'github.error_message_send_failed' - ); + await handleMessage(this, conversationId, finalMessage, { + issueContext: contextToAppend, + threadContext, + isolationHints, + }); + } catch (error) { + const err = toError(error); + getLog().error({ err, conversationId }, 'github.message_handling_error'); + try { + await this.sendMessage(conversationId, classifyAndFormatError(err)); + } catch (sendError) { + getLog().error( + { err: toError(sendError), conversationId }, + 'github.error_message_send_failed' + ); + } } - } - }); + }); + return; + } } } diff --git a/packages/adapters/src/forge/github/context.test.ts b/packages/adapters/src/forge/github/context.test.ts index 914e77eb88..4e766fefa0 100644 --- a/packages/adapters/src/forge/github/context.test.ts +++ b/packages/adapters/src/forge/github/context.test.ts @@ -187,6 +187,40 @@ function createIssueCommentPayload( }); } +function createPullRequestPayload( + prBody: string | null, + options: { + prNumber?: number; + prTitle?: string; + author?: string; + } = {} +): string { + const { prNumber = 42, prTitle = 'Test PR Title', author = 'user123' } = options; + + return JSON.stringify({ + action: 'opened', + pull_request: { + number: prNumber, + title: prTitle, + body: prBody, + user: { login: author }, + state: 'open', + merged: false, + changed_files: 3, + additions: 20, + deletions: 5, + }, + repository: { + owner: { login: 'testuser' }, + name: 'testrepo', + full_name: 'testuser/testrepo', + html_url: 'https://github.com/testuser/testrepo', + default_branch: 'main', + }, + sender: { login: author }, + }); +} + /** * Create an adapter with mocked internals for testing handleWebhook. */ diff --git a/packages/core/package.json b/packages/core/package.json index d0d93635b6..5f11226cf0 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -23,7 +23,7 @@ "./state/*": "./src/state/*.ts" }, "scripts": { - "test": "bun test src/clients/codex-binary-guard.test.ts && bun test src/utils/codex-binary-resolver.test.ts && bun test src/utils/codex-binary-resolver-dev.test.ts && bun test src/clients/claude.test.ts src/clients/codex.test.ts src/clients/factory.test.ts && bun test src/handlers/command-handler.test.ts && bun test src/handlers/clone.test.ts && bun test src/db/adapters/postgres.test.ts && bun test src/db/adapters/sqlite.test.ts src/db/codebases.test.ts src/db/connection.test.ts src/db/conversations.test.ts src/db/env-vars.test.ts src/db/isolation-environments.test.ts src/db/messages.test.ts src/db/sessions.test.ts src/db/workflow-events.test.ts src/db/workflows.test.ts src/utils/defaults-copy.test.ts src/utils/worktree-sync.test.ts src/utils/conversation-lock.test.ts src/utils/credential-sanitizer.test.ts src/utils/port-allocation.test.ts src/utils/error.test.ts src/utils/error-formatter.test.ts src/utils/github-graphql.test.ts src/utils/env-allowlist.test.ts src/utils/env-leak-scanner.test.ts src/config/ src/state/ && bun test src/utils/path-validation.test.ts && bun test src/services/cleanup-service.test.ts && bun test src/services/title-generator.test.ts && bun test src/workflows/ && bun test src/operations/workflow-operations.test.ts && bun test src/operations/isolation-operations.test.ts && bun test src/orchestrator/orchestrator.test.ts && bun test src/orchestrator/orchestrator-agent.test.ts && bun test src/orchestrator/orchestrator-isolation.test.ts", + "test": "bun test src/clients/codex-binary-guard.test.ts && bun test src/utils/codex-binary-resolver.test.ts && bun test src/utils/codex-binary-resolver-dev.test.ts && bun test src/clients/claude.test.ts src/clients/codex.test.ts src/clients/factory.test.ts && bun test src/handlers/command-handler.test.ts && bun test src/handlers/clone.test.ts && bun test src/db/adapters/postgres.test.ts && bun test src/db/adapters/sqlite.test.ts src/db/codebases.test.ts src/db/connection.test.ts src/db/conversations.test.ts src/db/env-vars.test.ts src/db/isolation-environments.test.ts src/db/messages.test.ts src/db/sessions.test.ts src/db/webhook-rules.test.ts src/db/workflow-events.test.ts src/db/workflows.test.ts src/utils/defaults-copy.test.ts src/utils/worktree-sync.test.ts src/utils/conversation-lock.test.ts src/utils/credential-sanitizer.test.ts src/utils/port-allocation.test.ts src/utils/error.test.ts src/utils/error-formatter.test.ts src/utils/github-graphql.test.ts src/utils/env-allowlist.test.ts src/utils/env-leak-scanner.test.ts src/config/ src/state/ && bun test src/utils/path-validation.test.ts && bun test src/services/cleanup-service.test.ts && bun test src/services/title-generator.test.ts && bun test src/workflows/ && bun test src/operations/workflow-operations.test.ts && bun test src/operations/isolation-operations.test.ts && bun test src/orchestrator/orchestrator.test.ts && bun test src/orchestrator/orchestrator-agent.test.ts && bun test src/orchestrator/orchestrator-isolation.test.ts", "type-check": "bun x tsc --noEmit", "build": "echo 'No build needed - Bun runs TypeScript directly'" }, @@ -33,7 +33,7 @@ "@archon/isolation": "workspace:*", "@archon/paths": "workspace:*", "@archon/workflows": "workspace:*", - "@openai/codex-sdk": "^0.116.0", + "@openai/codex-sdk": "^0.120.0", "pg": "^8.11.0", "zod": "^3" }, diff --git a/packages/core/src/clients/codex.test.ts b/packages/core/src/clients/codex.test.ts index cfa329e7c1..4206a9f226 100644 --- a/packages/core/src/clients/codex.test.ts +++ b/packages/core/src/clients/codex.test.ts @@ -862,21 +862,24 @@ describe('CodexClient', () => { ); }); - test('throws actionable model-access message for unavailable configured model', async () => { - mockRunStreamed.mockRejectedValue(new Error('403 Forbidden: model not available')); + test('throws actionable model-access message for unsupported configured model', async () => { + mockRunStreamed.mockRejectedValue(new Error('403 Forbidden: model not supported')); const consumeGenerator = async () => { for await (const _ of client.sendQuery('test', '/workspace', undefined, { - model: 'gpt-5.3-codex', + model: 'gpt-5.4', })) { // consume } }; await expect(consumeGenerator()).rejects.toThrow( - 'Model "gpt-5.3-codex" is not available for your account' + 'Model "gpt-5.4" is not available for your account' ); - await expect(consumeGenerator()).rejects.toThrow('model: gpt-5.2-codex'); + await expect(consumeGenerator()).rejects.toThrow( + 'unavailable or not supported in your current Codex runtime' + ); + await expect(consumeGenerator()).rejects.toThrow('model: gpt-5.3-codex'); }); test('uses generic dashboard guidance when fallback mapping is unknown', async () => { diff --git a/packages/core/src/clients/codex.ts b/packages/core/src/clients/codex.ts index e6e9d1dd09..a91fba4d9c 100644 --- a/packages/core/src/clients/codex.ts +++ b/packages/core/src/clients/codex.ts @@ -83,6 +83,8 @@ function buildThreadOptions(cwd: string, options?: AssistantRequestOptions): Thr } const CODEX_MODEL_FALLBACKS: Record = { + 'gpt-5.4': 'gpt-5.3-codex', + 'gpt-5.4-mini': 'gpt-5.4', 'gpt-5.3-codex': 'gpt-5.2-codex', }; @@ -90,7 +92,10 @@ function isModelAccessError(errorMessage: string): boolean { const m = errorMessage.toLowerCase(); const hasModel = m.includes('model'); const hasAvailabilitySignal = - m.includes('not available') || m.includes('not found') || m.includes('access denied'); + m.includes('not available') || + m.includes('not found') || + m.includes('access denied') || + m.includes('not supported'); return hasModel && hasAvailabilitySignal; } @@ -107,7 +112,7 @@ function buildModelAccessMessage(model?: string): string { ? `Or set it per-workflow with \`model: ${suggested}\` in workflow YAML.` : 'Or set it per-workflow with a valid `model:` in workflow YAML.'; - return `❌ Model "${selectedModel}" is not available for your account.\n\n${fixLine}\n\n${workflowLine}`; + return `❌ Model "${selectedModel}" is not available for your account.\n\nThis can happen when the model is unavailable or not supported in your current Codex runtime.\n\n${fixLine}\n\n${workflowLine}`; } /** Max retries for transient failures (3 = 4 total attempts). diff --git a/packages/core/src/config/config-loader.test.ts b/packages/core/src/config/config-loader.test.ts index da18deded7..facb868146 100644 --- a/packages/core/src/config/config-loader.test.ts +++ b/packages/core/src/config/config-loader.test.ts @@ -529,7 +529,7 @@ defaultAssistant: codex botName: MyBot assistants: codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: medium `); diff --git a/packages/core/src/config/config-loader.ts b/packages/core/src/config/config-loader.ts index 8ee702c613..e7d8d421bb 100644 --- a/packages/core/src/config/config-loader.ts +++ b/packages/core/src/config/config-loader.ts @@ -83,7 +83,7 @@ const DEFAULT_CONFIG_CONTENT = `# Archon Global Configuration # claude: # model: sonnet # codex: -# model: gpt-5.3-codex +# model: gpt-5.4 # modelReasoningEffort: medium # webSearchMode: disabled # additionalDirectories: diff --git a/packages/core/src/db/adapters/sqlite.ts b/packages/core/src/db/adapters/sqlite.ts index 2864e4fc43..260bf1658e 100644 --- a/packages/core/src/db/adapters/sqlite.ts +++ b/packages/core/src/db/adapters/sqlite.ts @@ -231,6 +231,92 @@ export class SqliteAdapter implements IDatabase { } catch (e: unknown) { getLog().warn({ err: e as Error }, 'db.sqlite_migration_codebases_columns_failed'); } + + // Webhook rules columns/indexes + try { + let webhookCols = this.db + .prepare("PRAGMA table_info('remote_agent_webhook_rules')") + .all() as { + name: string; + }[]; + let webhookColNames = new Set(webhookCols.map(c => c.name)); + + const hasLegacyProviderColumns = + webhookColNames.has('provider') || webhookColNames.has('event_type'); + + if (hasLegacyProviderColumns) { + this.db.run('BEGIN'); + try { + this.db.run(` + CREATE TABLE IF NOT EXISTS remote_agent_webhook_rules__new ( + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))), + codebase_id TEXT NOT NULL REFERENCES remote_agent_codebases(id) ON DELETE CASCADE, + path_slug TEXT NOT NULL, + workflow_name TEXT NOT NULL, + enabled INTEGER NOT NULL DEFAULT 1, + created_at TEXT DEFAULT (datetime('now')), + updated_at TEXT DEFAULT (datetime('now')) + ) + `); + + this.db.run(` + INSERT INTO remote_agent_webhook_rules__new + (id, codebase_id, path_slug, workflow_name, enabled, created_at, updated_at) + SELECT + id, + codebase_id, + CASE + WHEN path_slug IS NULL OR trim(path_slug) = '' + THEN 'legacy-' || lower(hex(randomblob(6))) + ELSE path_slug + END, + workflow_name, + COALESCE(enabled, 1), + COALESCE(created_at, datetime('now')), + COALESCE(updated_at, datetime('now')) + FROM remote_agent_webhook_rules + `); + + this.db.run('DROP TABLE remote_agent_webhook_rules'); + this.db.run( + 'ALTER TABLE remote_agent_webhook_rules__new RENAME TO remote_agent_webhook_rules' + ); + this.db.run('COMMIT'); + + webhookCols = this.db + .prepare("PRAGMA table_info('remote_agent_webhook_rules')") + .all() as { + name: string; + }[]; + webhookColNames = new Set(webhookCols.map(c => c.name)); + } catch (migrationError) { + this.db.run('ROLLBACK'); + throw migrationError; + } + } + + if (!webhookColNames.has('path_slug')) { + this.db.run('ALTER TABLE remote_agent_webhook_rules ADD COLUMN path_slug TEXT'); + } + + this.db.run(` + UPDATE remote_agent_webhook_rules + SET path_slug = 'legacy-' || lower(hex(randomblob(6))) + WHERE path_slug IS NULL OR trim(path_slug) = '' + `); + + this.db.run( + 'CREATE INDEX IF NOT EXISTS idx_webhook_rules_path_slug ON remote_agent_webhook_rules(path_slug)' + ); + this.db.run('DROP INDEX IF EXISTS idx_webhook_rules_provider_event'); + this.db.run('DROP INDEX IF EXISTS idx_webhook_rules_active_unique'); + this.db.run(` + CREATE UNIQUE INDEX IF NOT EXISTS idx_webhook_rules_path_slug_unique + ON remote_agent_webhook_rules(path_slug) + `); + } catch (e: unknown) { + getLog().warn({ err: e as Error }, 'db.sqlite_migration_webhook_rule_columns_failed'); + } } /** @@ -353,6 +439,17 @@ export class SqliteAdapter implements IDatabase { created_at TEXT DEFAULT (datetime('now')) ); + -- Webhook rules table + CREATE TABLE IF NOT EXISTS remote_agent_webhook_rules ( + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))), + codebase_id TEXT NOT NULL REFERENCES remote_agent_codebases(id) ON DELETE CASCADE, + path_slug TEXT NOT NULL, + workflow_name TEXT NOT NULL, + enabled INTEGER NOT NULL DEFAULT 1, + created_at TEXT DEFAULT (datetime('now')), + updated_at TEXT DEFAULT (datetime('now')) + ); + -- Messages table (conversation history for Web UI) CREATE TABLE IF NOT EXISTS remote_agent_messages ( id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))), @@ -374,6 +471,7 @@ export class SqliteAdapter implements IDatabase { CREATE INDEX IF NOT EXISTS idx_workflow_runs_status ON remote_agent_workflow_runs(status); CREATE INDEX IF NOT EXISTS idx_workflow_events_run_id ON remote_agent_workflow_events(workflow_run_id); CREATE INDEX IF NOT EXISTS idx_workflow_events_type ON remote_agent_workflow_events(event_type); + CREATE INDEX IF NOT EXISTS idx_webhook_rules_codebase ON remote_agent_webhook_rules(codebase_id); CREATE INDEX IF NOT EXISTS idx_messages_conversation_id ON remote_agent_messages(conversation_id, created_at ASC); CREATE INDEX IF NOT EXISTS idx_workflow_runs_parent_conv ON remote_agent_workflow_runs(parent_conversation_id); CREATE INDEX IF NOT EXISTS idx_conversations_hidden ON remote_agent_conversations(hidden); diff --git a/packages/core/src/db/webhook-rules.test.ts b/packages/core/src/db/webhook-rules.test.ts new file mode 100644 index 0000000000..e1962aa1b8 --- /dev/null +++ b/packages/core/src/db/webhook-rules.test.ts @@ -0,0 +1,135 @@ +import { beforeEach, describe, expect, mock, test } from 'bun:test'; +import { createQueryResult, mockPostgresDialect } from '../test/mocks/database'; +import type { WebhookRule, WebhookRuleWithCodebaseName } from '../webhooks/types'; + +const mockQuery = mock(() => Promise.resolve(createQueryResult([]))); + +mock.module('./connection', () => ({ + pool: { + query: mockQuery, + }, + getDialect: () => mockPostgresDialect, +})); + +import { + createWebhookRule, + listWebhookRules, + findWebhookRuleBySlug, + updateWebhookRule, + isWebhookRuleConflictError, + WebhookRuleConflictError, +} from './webhook-rules'; + +describe('webhook-rules', () => { + const baseRule: WebhookRule = { + id: 'rule-1', + codebase_id: 'codebase-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + created_at: new Date('2026-01-01T00:00:00Z'), + updated_at: new Date('2026-01-01T00:00:00Z'), + }; + + beforeEach(() => { + mockQuery.mockReset(); + mockQuery.mockImplementation(() => Promise.resolve(createQueryResult([]))); + }); + + test('createWebhookRule inserts and returns the created rule', async () => { + mockQuery.mockResolvedValueOnce(createQueryResult([baseRule])); + + const result = await createWebhookRule({ + codebase_id: 'codebase-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + }); + + expect(result).toEqual(baseRule); + expect(mockQuery).toHaveBeenCalledWith( + expect.stringContaining('INSERT INTO remote_agent_webhook_rules'), + ['codebase-1', 'kokot-pr-review', 'archon-smart-pr-review', true] + ); + }); + + test('listWebhookRules returns joined rules with normalized enabled flag', async () => { + const row: WebhookRuleWithCodebaseName = { + ...baseRule, + enabled: 1 as unknown as boolean, + codebase_name: 'SmelhausJosef/KoKot', + }; + mockQuery.mockResolvedValueOnce(createQueryResult([row])); + + const result = await listWebhookRules(); + + expect(result).toHaveLength(1); + expect(result[0]?.enabled).toBe(true); + expect(result[0]?.codebase_name).toBe('SmelhausJosef/KoKot'); + expect(mockQuery).toHaveBeenCalledWith( + expect.stringContaining('FROM remote_agent_webhook_rules r') + ); + }); + + test('findWebhookRuleBySlug filters by slug and enabled', async () => { + mockQuery.mockResolvedValueOnce(createQueryResult([baseRule])); + + const result = await findWebhookRuleBySlug('kokot-pr-review'); + + expect(result).toEqual(baseRule); + expect(mockQuery).toHaveBeenCalledWith(expect.stringContaining('AND enabled = $2'), [ + 'kokot-pr-review', + true, + ]); + }); + + test('updateWebhookRule updates requested fields and reloads the record', async () => { + const updatedRule: WebhookRule = { + ...baseRule, + path_slug: 'kokot-triage', + workflow_name: 'triage', + enabled: false, + }; + + mockQuery.mockResolvedValueOnce(createQueryResult([], 1)); + mockQuery.mockResolvedValueOnce(createQueryResult([updatedRule])); + + const result = await updateWebhookRule('rule-1', { + path_slug: 'kokot-triage', + workflow_name: 'triage', + enabled: false, + }); + + expect(result).toEqual(updatedRule); + expect(mockQuery).toHaveBeenNthCalledWith( + 1, + 'UPDATE remote_agent_webhook_rules\n SET path_slug = $1, workflow_name = $2, enabled = $3, updated_at = NOW()\n WHERE id = $4', + ['kokot-triage', 'triage', false, 'rule-1'] + ); + expect(mockQuery).toHaveBeenNthCalledWith( + 2, + 'SELECT * FROM remote_agent_webhook_rules WHERE id = $1', + ['rule-1'] + ); + }); + + test('createWebhookRule maps unique violations to WebhookRuleConflictError', async () => { + mockQuery.mockRejectedValueOnce({ code: '23505', message: 'duplicate key value' }); + + await expect( + createWebhookRule({ + codebase_id: 'codebase-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + }) + ).rejects.toBeInstanceOf(WebhookRuleConflictError); + }); + + test('isWebhookRuleConflictError detects sqlite unique failures', () => { + expect( + isWebhookRuleConflictError({ + message: 'UNIQUE constraint failed: remote_agent_webhook_rules.path_slug', + }) + ).toBe(true); + }); +}); diff --git a/packages/core/src/db/webhook-rules.ts b/packages/core/src/db/webhook-rules.ts new file mode 100644 index 0000000000..e43ce215f4 --- /dev/null +++ b/packages/core/src/db/webhook-rules.ts @@ -0,0 +1,161 @@ +import { createLogger } from '@archon/paths'; +import { getDialect, pool } from './connection'; +import type { WebhookRule, WebhookRuleWithCodebaseName } from '../webhooks/types'; + +/** Lazy-initialized logger (deferred so test mocks can intercept createLogger). */ +let cachedLog: ReturnType | undefined; +function getLog(): ReturnType { + if (!cachedLog) cachedLog = createLogger('db.webhook-rules'); + return cachedLog; +} + +export class WebhookRuleConflictError extends Error { + constructor(message = 'Webhook rule already exists for this URL slug') { + super(message); + this.name = 'WebhookRuleConflictError'; + } +} + +function normalizeRuleEnabled(rule: T): T & { enabled: boolean } { + return { ...rule, enabled: Boolean(rule.enabled) }; +} + +function coerceConflictError(error: unknown): never { + if (isWebhookRuleConflictError(error)) { + throw new WebhookRuleConflictError(); + } + throw error; +} + +export function isWebhookRuleConflictError(error: unknown): boolean { + const err = error as { code?: string; message?: string } | undefined; + const message = err?.message ?? ''; + return ( + err?.code === '23505' || + message.includes('idx_webhook_rules_path_slug_unique') || + message.includes('duplicate key value violates unique constraint') || + message.includes('UNIQUE constraint failed: remote_agent_webhook_rules.path_slug') + ); +} + +export async function createWebhookRule(input: { + codebase_id: string; + path_slug: string; + workflow_name: string; + enabled?: boolean; +}): Promise { + try { + const result = await pool.query( + `INSERT INTO remote_agent_webhook_rules + (codebase_id, path_slug, workflow_name, enabled) + VALUES ($1, $2, $3, $4) + RETURNING *`, + [input.codebase_id, input.path_slug, input.workflow_name, input.enabled ?? true] + ); + if (!result.rows[0]) { + throw new Error('Failed to create webhook rule: INSERT returned no row'); + } + return normalizeRuleEnabled(result.rows[0]); + } catch (error) { + coerceConflictError(error); + } +} + +export async function getWebhookRule(id: string): Promise { + const result = await pool.query( + 'SELECT * FROM remote_agent_webhook_rules WHERE id = $1', + [id] + ); + return result.rows[0] ? normalizeRuleEnabled(result.rows[0]) : null; +} + +export async function listWebhookRules(): Promise { + const result = await pool.query( + `SELECT r.*, c.name AS codebase_name + FROM remote_agent_webhook_rules r + INNER JOIN remote_agent_codebases c ON c.id = r.codebase_id + ORDER BY c.name ASC, r.path_slug ASC` + ); + return result.rows.map(row => normalizeRuleEnabled(row)); +} + +export async function findWebhookRuleBySlug(pathSlug: string): Promise { + const result = await pool.query( + `SELECT * FROM remote_agent_webhook_rules + WHERE path_slug = $1 + AND enabled = $2 + LIMIT 1`, + [pathSlug, true] + ); + return result.rows[0] ? normalizeRuleEnabled(result.rows[0]) : null; +} + +export async function updateWebhookRule( + id: string, + updates: { + codebase_id?: string; + path_slug?: string; + workflow_name?: string; + enabled?: boolean; + } +): Promise { + const dialect = getDialect(); + const setClauses: string[] = []; + const params: (string | boolean)[] = []; + let paramIndex = 1; + + if (updates.codebase_id !== undefined) { + setClauses.push(`codebase_id = $${paramIndex++}`); + params.push(updates.codebase_id); + } + if (updates.path_slug !== undefined) { + setClauses.push(`path_slug = $${paramIndex++}`); + params.push(updates.path_slug); + } + if (updates.workflow_name !== undefined) { + setClauses.push(`workflow_name = $${paramIndex++}`); + params.push(updates.workflow_name); + } + if (updates.enabled !== undefined) { + setClauses.push(`enabled = $${paramIndex++}`); + params.push(updates.enabled); + } + + if (setClauses.length === 0) { + const existing = await getWebhookRule(id); + if (!existing) { + throw new Error(`Webhook rule ${id} not found`); + } + return existing; + } + + setClauses.push(`updated_at = ${dialect.now()}`); + params.push(id); + + try { + const result = await pool.query( + `UPDATE remote_agent_webhook_rules + SET ${setClauses.join(', ')} + WHERE id = $${paramIndex}`, + params + ); + if ((result.rowCount ?? 0) === 0) { + throw new Error(`Webhook rule ${id} not found`); + } + } catch (error) { + coerceConflictError(error); + } + + const updated = await getWebhookRule(id); + if (!updated) { + throw new Error(`Webhook rule ${id} not found after update`); + } + return updated; +} + +export async function deleteWebhookRule(id: string): Promise { + const result = await pool.query('DELETE FROM remote_agent_webhook_rules WHERE id = $1', [id]); + if ((result.rowCount ?? 0) === 0) { + getLog().debug({ webhookRuleId: id }, 'db.webhook_rule_delete_noop'); + } +} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index e212eb10c9..e9e41b1046 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -48,6 +48,7 @@ export * as sessionDb from './db/sessions'; export * as isolationEnvDb from './db/isolation-environments'; export * as workflowDb from './db/workflows'; export * as messageDb from './db/messages'; +export * as webhookRuleDb from './db/webhook-rules'; // Re-export SessionNotFoundError for error handling export { SessionNotFoundError } from './db/sessions'; @@ -78,9 +79,20 @@ export * as isolationOperations from './operations/isolation-operations'; // ============================================================================= // Orchestrator // ============================================================================= -export { handleMessage } from './orchestrator/orchestrator-agent'; +export { handleMessage, dispatchNamedWorkflow } from './orchestrator/orchestrator-agent'; export { buildOrchestratorPrompt, buildProjectScopedPrompt } from './orchestrator/prompt-builder'; +// ============================================================================= +// Webhooks +// ============================================================================= +export { type WebhookRule, type WebhookRuleWithCodebaseName } from './webhooks/types'; +export { + matchRuleBySlug, + normalizeWebhookPayload, + buildWebhookWorkflowInput, + dispatchMatchedWebhookRule, +} from './webhooks/rules-engine'; + // ============================================================================= // Handlers // ============================================================================= diff --git a/packages/core/src/orchestrator/orchestrator-agent.ts b/packages/core/src/orchestrator/orchestrator-agent.ts index 97d989f47c..bd740c1f68 100644 --- a/packages/core/src/orchestrator/orchestrator-agent.ts +++ b/packages/core/src/orchestrator/orchestrator-agent.ts @@ -317,6 +317,37 @@ async function dispatchOrchestratorWorkflow( } } +export async function dispatchNamedWorkflow( + platform: IPlatformAdapter, + conversationId: string, + conversation: Conversation, + codebase: Codebase, + workflowName: string, + userMessage: string, + isolationHints?: HandleMessageContext['isolationHints'] +): Promise { + const discoverResult = await discoverAllWorkflows({ + ...conversation, + codebase_id: codebase.id, + }); + const workflowDefinitions = discoverResult.workflows.map(entry => entry.workflow); + const workflow = findWorkflow(workflowName, workflowDefinitions); + + if (!workflow) { + throw new Error(`Workflow "${workflowName}" not found for codebase ${codebase.name}`); + } + + await dispatchOrchestratorWorkflow( + platform, + conversationId, + conversation, + codebase, + workflow, + userMessage, + isolationHints + ); +} + // ─── Session Helpers ──────────────────────────────────────────────────────── async function tryPersistSessionId(sessionId: string, assistantSessionId: string): Promise { diff --git a/packages/core/src/webhooks/rules-engine.ts b/packages/core/src/webhooks/rules-engine.ts new file mode 100644 index 0000000000..c20f313e10 --- /dev/null +++ b/packages/core/src/webhooks/rules-engine.ts @@ -0,0 +1,110 @@ +import { createLogger } from '@archon/paths'; +import type { Codebase, Conversation, HandleMessageContext, IPlatformAdapter } from '../types'; +import { findWebhookRuleBySlug } from '../db/webhook-rules'; +import { dispatchNamedWorkflow } from '../orchestrator/orchestrator-agent'; +import type { WebhookRule } from './types'; + +/** Lazy-initialized logger (deferred so test mocks can intercept createLogger). */ +let cachedLog: ReturnType | undefined; +function getLog(): ReturnType { + if (!cachedLog) cachedLog = createLogger('webhook-rules'); + return cachedLog; +} + +export async function matchRuleBySlug(pathSlug: string): Promise { + return findWebhookRuleBySlug(pathSlug); +} + +export function normalizeWebhookPayload(rawBody: string, contentType?: string | null): string { + const trimmedBody = rawBody.trim(); + if (!trimmedBody) { + return '(empty)'; + } + + const shouldTryJson = + (contentType?.toLowerCase().includes('json') ?? false) || + trimmedBody.startsWith('{') || + trimmedBody.startsWith('['); + + if (!shouldTryJson) { + return trimmedBody; + } + + try { + return JSON.stringify(JSON.parse(trimmedBody), null, 2); + } catch { + return trimmedBody; + } +} + +export function buildWebhookWorkflowInput(params: { + pathSlug: string; + rawBody: string; + contentType?: string | null; +}): string { + const normalizedBody = normalizeWebhookPayload(params.rawBody, params.contentType); + + return [ + 'A webhook rule matched this request.', + `Webhook slug: ${params.pathSlug}`, + `Content-Type: ${params.contentType?.trim() || 'unknown'}`, + '', + 'Request body:', + normalizedBody, + ].join('\n'); +} + +export async function dispatchMatchedWebhookRule(params: { + platform: IPlatformAdapter; + conversationId: string; + conversation: Conversation; + codebase: Codebase; + pathSlug: string; + rawBody: string; + contentType?: string | null; + isolationHints?: HandleMessageContext['isolationHints']; + matchedRule?: WebhookRule | null; +}): Promise { + const matchedRule = params.matchedRule ?? (await matchRuleBySlug(params.pathSlug)); + + if (!matchedRule) { + getLog().debug({ pathSlug: params.pathSlug }, 'webhook_rule_not_matched'); + return null; + } + + const userMessage = buildWebhookWorkflowInput({ + pathSlug: params.pathSlug, + rawBody: params.rawBody, + contentType: params.contentType, + }); + + getLog().info( + { + codebaseId: params.codebase.id, + workflowName: matchedRule.workflow_name, + pathSlug: params.pathSlug, + }, + 'webhook_rule_dispatch_started' + ); + + await dispatchNamedWorkflow( + params.platform, + params.conversationId, + params.conversation, + params.codebase, + matchedRule.workflow_name, + userMessage, + params.isolationHints + ); + + getLog().info( + { + codebaseId: params.codebase.id, + workflowName: matchedRule.workflow_name, + pathSlug: params.pathSlug, + }, + 'webhook_rule_dispatch_completed' + ); + + return matchedRule; +} diff --git a/packages/core/src/webhooks/types.ts b/packages/core/src/webhooks/types.ts new file mode 100644 index 0000000000..b0b1675d57 --- /dev/null +++ b/packages/core/src/webhooks/types.ts @@ -0,0 +1,13 @@ +export interface WebhookRule { + id: string; + codebase_id: string; + path_slug: string; + workflow_name: string; + enabled: boolean; + created_at: Date; + updated_at: Date; +} + +export interface WebhookRuleWithCodebaseName extends WebhookRule { + codebase_name: string; +} diff --git a/packages/docs-web/src/content/docs/adapters/community/gitea.md b/packages/docs-web/src/content/docs/adapters/community/gitea.md index 94264248a0..f2552c718b 100644 --- a/packages/docs-web/src/content/docs/adapters/community/gitea.md +++ b/packages/docs-web/src/content/docs/adapters/community/gitea.md @@ -99,7 +99,11 @@ Interact by @mentioning the bot in issue or PR **comments**: - Maintains full context across comments :::note -Only comments trigger the bot. @mentions in issue or PR descriptions are ignored -- descriptions often contain example commands or documentation that are not intended as bot invocations. +Only comments trigger the bot directly. @mentions in issue or PR descriptions are ignored -- descriptions often contain example commands or documentation that are not intended as bot invocations. +::: + +:::note +Gitea webhooks are used here for explicit `@archon` comment flows and cleanup only. Automatic workflow dispatch now uses slug-based webhook rules via `/webhooks/`. ::: ## How It Works diff --git a/packages/docs-web/src/content/docs/adapters/community/gitlab.md b/packages/docs-web/src/content/docs/adapters/community/gitlab.md index 64b54bca27..fe2912f7d0 100644 --- a/packages/docs-web/src/content/docs/adapters/community/gitlab.md +++ b/packages/docs-web/src/content/docs/adapters/community/gitlab.md @@ -111,6 +111,10 @@ Mention your bot in issue or MR comments: **Subsequent mentions** resume the existing conversation with full context. +:::note +GitLab webhooks are used here for explicit `@archon` comment flows and cleanup only. Automatic workflow dispatch now uses slug-based webhook rules via `/webhooks/`. +::: + ## Conversation ID Format | Type | Format | Example | @@ -126,7 +130,7 @@ Mention your bot in issue or MR comments: | **Note Hook** (comment with @mention) | Triggers AI conversation | | **Issue Hook** (close) | Cleans up isolation environment | | **MR Hook** (close/merge) | Cleans up isolation environment | -| Issue/MR opened | Ignored (descriptions are not commands) | +| Issue opened | Ignored (descriptions are not commands) | ## Adding Additional Projects diff --git a/packages/docs-web/src/content/docs/adapters/github.md b/packages/docs-web/src/content/docs/adapters/github.md index 6067d0a7f4..3735eaadf1 100644 --- a/packages/docs-web/src/content/docs/adapters/github.md +++ b/packages/docs-web/src/content/docs/adapters/github.md @@ -114,7 +114,7 @@ Interact by @mentioning your bot in issue or PR **comments**: - Maintains full context across comments :::note -Only comments trigger the bot. @mentions in issue or PR descriptions are ignored -- descriptions often contain example commands or documentation that are not intended as bot invocations. +Issue descriptions are ignored. GitHub webhooks are used here for explicit `@archon` comment flows and cleanup only. If you want to trigger a workflow automatically, configure a slug-based webhook rule in the Web UI and POST to `/webhooks/`. ::: ## Adding Additional Repositories diff --git a/packages/docs-web/src/content/docs/deployment/docker.md b/packages/docs-web/src/content/docs/deployment/docker.md index fc1add6678..7c50041535 100644 --- a/packages/docs-web/src/content/docs/deployment/docker.md +++ b/packages/docs-web/src/content/docs/deployment/docker.md @@ -49,7 +49,8 @@ nano /opt/archon/.env # Set at minimum: # CLAUDE_CODE_OAUTH_TOKEN=sk-ant-oat01-... # DOMAIN=archon.example.com -# DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent +# POSTGRES_PASSWORD=change-me +# DATABASE_URL=postgresql://postgres:change-me@postgres:5432/remote_coding_agent # (Optional) Set up basic auth to protect Web UI: # docker run caddy caddy hash-password --plaintext 'YOUR_PASSWORD' @@ -57,7 +58,7 @@ nano /opt/archon/.env # Start cd /opt/archon -docker compose --profile with-db --profile cloud up -d +docker compose --profile cloud up -d ``` > **Don't forget DNS**: Before starting, point your domain's A record to the server's IP. @@ -76,7 +77,7 @@ docker compose --profile with-db --profile cloud up -d ## Local Docker Desktop (Windows / macOS) -Run Archon locally with Docker Desktop — no domain, no VPS required. Uses SQLite and the Web UI only. +Run Archon locally with Docker Desktop — no domain, no VPS required. The default stack includes the Web UI and a local PostgreSQL container. ### Quick start @@ -111,22 +112,22 @@ git reset --hard | Feature | Status | |---------|--------| | Web UI | http://localhost:3000 | -| Database | SQLite (automatic, zero setup) | +| Database | PostgreSQL 17 (automatic, local container) | | HTTPS / Caddy | Not needed locally | | Auth | None (single-user, localhost only) | | Platform adapters | Optional (Telegram, Slack, etc.) | -### Using PostgreSQL locally (optional) +### Database defaults -```bash -docker compose --profile with-db up -d -``` +`docker compose up -d` now starts both `app` and `postgres`. By default, the app uses the bundled database service: -Then add to `.env`: ```env +POSTGRES_PASSWORD=postgres DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent ``` +Set `DATABASE_URL` only if you want the app to use an external PostgreSQL instance instead. + --- ## Manual Server Setup @@ -177,8 +178,9 @@ CLAUDE_CODE_OAUTH_TOKEN=sk-ant-oat01-xxxxx DOMAIN=archon.example.com # Database — connect to the Docker PostgreSQL container -# Without this, the app uses SQLite (fine for getting started, but PostgreSQL recommended) -DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent +# Default Docker setup uses the bundled postgres service +POSTGRES_PASSWORD=change-me +DATABASE_URL=postgresql://postgres:change-me@postgres:5432/remote_coding_agent # Basic Auth (optional) — protects Web UI when exposed to the internet # Skip if using IP-based firewall rules instead. @@ -195,7 +197,7 @@ DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent > **Docker does not support `CLAUDE_USE_GLOBAL_AUTH=true`** — there is no local `claude` CLI inside the container. You must provide either `CLAUDE_CODE_OAUTH_TOKEN` or `CLAUDE_API_KEY` explicitly. > -> **If you use `--profile with-db` without setting `DATABASE_URL`**, the app will fall back to SQLite and log a warning. The PostgreSQL container runs but is unused. +> **Docker Compose now starts PostgreSQL by default.** If `DATABASE_URL` is omitted, the root compose file injects a default connection string that points to the bundled `postgres` service. ### 4. Point your domain to the server @@ -219,7 +221,7 @@ sudo ufw --force enable ### 6. Start ```bash -docker compose --profile with-db --profile cloud up -d +docker compose --profile cloud up -d ``` This starts three containers: @@ -231,7 +233,7 @@ This starts three containers: ```bash # Check all containers are running -docker compose --profile with-db --profile cloud ps +docker compose --profile cloud ps # Watch logs docker compose logs -f app @@ -247,28 +249,26 @@ Open **https://archon.example.com** in your browser — you should see the Archo ## Profiles -Archon uses Docker Compose profiles to optionally add PostgreSQL and/or HTTPS. Mix and match: +Archon uses Docker Compose profiles only for optional HTTPS/auth layers. PostgreSQL is part of the default stack: | Command | What runs | |---------|-----------| -| `docker compose up -d` | App with SQLite | -| `docker compose --profile with-db up -d` | App + PostgreSQL | -| `docker compose --profile cloud up -d` | App + Caddy (HTTPS) | -| `docker compose --profile with-db --profile cloud up -d` | App + PostgreSQL + Caddy | +| `docker compose up -d` | App + PostgreSQL | +| `docker compose --profile cloud up -d` | App + PostgreSQL + Caddy | +| `docker compose --profile cloud --profile auth up -d` | App + PostgreSQL + Caddy + form auth | :::note There is no `external-db` profile. When using an external PostgreSQL database (Supabase, Neon, etc.), just set `DATABASE_URL` in `.env` and run `docker compose up -d` without any profile. The base `app` service always starts. ::: -### No profile (SQLite) - -Zero-config default. No database container needed — SQLite file is stored in the `archon_data` volume. +### No profile (App + PostgreSQL) -### `--profile with-db` (PostgreSQL) +Default stack. Starts the app plus a PostgreSQL 17 container. -Starts a PostgreSQL 17 container. Set the connection URL in `.env`: +Set in `.env`: ```ini +POSTGRES_PASSWORD=postgres DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent ``` @@ -355,7 +355,7 @@ An alternative to basic auth that serves a styled HTML login form instead of the 5. Start with both `cloud` and `auth` profiles: ```bash - docker compose --profile with-db --profile cloud --profile auth up -d + docker compose --profile cloud --profile auth up -d ``` 6. Visit your domain — you should be redirected to `/login`. @@ -494,7 +494,7 @@ cp .env.example .env docker compose up -d ``` -Uses `ghcr.io/coleam00/archon:latest`. To add PostgreSQL, uncomment the `postgres` service in the compose file and set `DATABASE_URL` in `.env`. +Uses `ghcr.io/coleam00/archon:latest`. To switch this deploy stack to a local Dockerfile build plus bundled PostgreSQL, copy `deploy/docker-compose.override.example.yml` to `docker-compose.override.yml` and run `docker compose up -d --build`. To layer custom tools on top of the pre-built image, see [Customizing the Image](#customizing-the-image). @@ -535,7 +535,8 @@ To add extra tools without modifying the tracked Dockerfile: 3. Copy the override file: - **Local/dev**: `cp docker-compose.override.example.yml docker-compose.override.yml` - **Server/deploy**: `cp deploy/docker-compose.override.example.yml docker-compose.override.yml` -4. Run `docker compose up -d` — Compose merges the override automatically. +4. If you want the deploy override to build from `Dockerfile.user`, set `ARCHON_DOCKERFILE=Dockerfile.user` in `.env` (otherwise it builds from the tracked `Dockerfile`). +5. Run `docker compose up -d --build` — Compose merges the override automatically. `Dockerfile.user` and `docker-compose.override.yml` are gitignored so your customizations stay local. @@ -555,7 +556,7 @@ docker compose logs --tail=100 app # Last 100 lines ```bash git pull -docker compose --profile with-db --profile cloud up -d --build +docker compose --profile cloud up -d --build ``` ### Restart @@ -643,7 +644,7 @@ curl http://localhost:3000/api/health ### PostgreSQL connection refused -When using `--profile with-db`, ensure: +When using the bundled PostgreSQL container, ensure: 1. `DATABASE_URL` uses `postgres` as hostname (Docker service name), not `localhost`: ```ini diff --git a/packages/docs-web/src/content/docs/deployment/index.md b/packages/docs-web/src/content/docs/deployment/index.md index 2c4762b218..e614d5cfbe 100644 --- a/packages/docs-web/src/content/docs/deployment/index.md +++ b/packages/docs-web/src/content/docs/deployment/index.md @@ -24,9 +24,9 @@ Archon can run locally for development or be deployed to a server for always-on | Option | Setup | Best For | |--------|-------|----------| -| **SQLite** (default) | Zero config, just omit `DATABASE_URL` | Single-user, CLI usage, local development | +| **SQLite** (default outside Docker) | Zero config, just omit `DATABASE_URL` | Single-user, CLI usage, local development | | **Remote PostgreSQL** | Set `DATABASE_URL` to hosted DB | Cloud deployments, shared access | -| **Local PostgreSQL** | Docker `--profile with-db` | Self-hosted, Docker-based setups | +| **Local PostgreSQL** | Docker `docker compose up -d` | Self-hosted, Docker-based setups | SQLite stores data at `~/.archon/archon.db` (or `/.archon/archon.db` in Docker). It is auto-initialized on first run. diff --git a/packages/docs-web/src/content/docs/deployment/local.md b/packages/docs-web/src/content/docs/deployment/local.md index 2e3c9f9618..fc5b235634 100644 --- a/packages/docs-web/src/content/docs/deployment/local.md +++ b/packages/docs-web/src/content/docs/deployment/local.md @@ -56,7 +56,7 @@ In development mode, two servers run simultaneously: If you prefer PostgreSQL for local development: ```bash -docker compose --profile with-db up -d postgres +docker compose up -d postgres # Set DATABASE_URL=postgresql://postgres:postgres@localhost:5432/remote_coding_agent in .env ``` @@ -142,7 +142,7 @@ Use this option to run both the app and PostgreSQL in Docker containers. The dat # Set: DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent # 2. Start both containers -docker compose --profile with-db up -d --build +docker compose up -d --build # 3. Wait for startup (watch logs) docker compose logs -f app @@ -172,7 +172,7 @@ docker compose exec postgres psql -U postgres -d remote_coding_agent ### Stop ```bash -docker compose --profile with-db down +docker compose down ``` --- @@ -189,7 +189,7 @@ For deploying to a VPS (DigitalOcean, Linode, AWS EC2, etc.) with automatic HTTP |--------|-------|----------| | **SQLite** (default) | Zero config, just omit `DATABASE_URL` | Single-user, CLI usage, local development | | **Remote PostgreSQL** | Set `DATABASE_URL` to hosted DB | Cloud deployments, shared access | -| **Local PostgreSQL** | Docker `--profile with-db` | Self-hosted, Docker-based setups | +| **Local PostgreSQL** | Docker `docker compose up -d` | Self-hosted, Docker-based setups | SQLite stores data at `~/.archon/archon.db` (or `/.archon/archon.db` in Docker). It is auto-initialized on first run. @@ -238,7 +238,7 @@ curl http://localhost:3090/health/concurrency # Concurrency status ```bash # Check logs docker compose logs app # default (SQLite or external DB) -docker compose logs app # --profile with-db +docker compose logs app # bundled PostgreSQL # Verify environment docker compose config diff --git a/packages/docs-web/src/content/docs/getting-started/ai-assistants.md b/packages/docs-web/src/content/docs/getting-started/ai-assistants.md index c856c9ccd4..ebe625f107 100644 --- a/packages/docs-web/src/content/docs/getting-started/ai-assistants.md +++ b/packages/docs-web/src/content/docs/getting-started/ai-assistants.md @@ -115,7 +115,7 @@ You can configure Codex's behavior in `.archon/config.yaml`: ```yaml assistants: codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: medium # 'minimal' | 'low' | 'medium' | 'high' | 'xhigh' webSearchMode: live # 'disabled' | 'cached' | 'live' additionalDirectories: diff --git a/packages/docs-web/src/content/docs/getting-started/configuration.md b/packages/docs-web/src/content/docs/getting-started/configuration.md index ec836f1202..78d7430347 100644 --- a/packages/docs-web/src/content/docs/getting-started/configuration.md +++ b/packages/docs-web/src/content/docs/getting-started/configuration.md @@ -33,7 +33,7 @@ assistants: settingSources: - project codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: medium # docs: diff --git a/packages/docs-web/src/content/docs/guides/authoring-workflows.md b/packages/docs-web/src/content/docs/guides/authoring-workflows.md index 6481aefac7..babf46d96e 100644 --- a/packages/docs-web/src/content/docs/guides/authoring-workflows.md +++ b/packages/docs-web/src/content/docs/guides/authoring-workflows.md @@ -554,7 +554,7 @@ model: sonnet # Model override (default: from config assistants.claude.mo - `inherit` - Use model from previous session **Codex models:** -- Any OpenAI model ID (e.g., `gpt-5.3-codex`, `o5-pro`) +- Any OpenAI model ID (e.g., `gpt-5.4`, `o5-pro`) - Cannot use Claude model aliases ### Codex-Specific Options @@ -562,7 +562,7 @@ model: sonnet # Model override (default: from config assistants.claude.mo ```yaml name: my-workflow provider: codex -model: gpt-5.3-codex +model: gpt-5.4 modelReasoningEffort: medium # 'minimal' | 'low' | 'medium' | 'high' | 'xhigh' webSearchMode: live # 'disabled' | 'cached' | 'live' additionalDirectories: @@ -650,7 +650,7 @@ assistants: claude: model: haiku # Fast model for most tasks codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: low webSearchMode: disabled ``` diff --git a/packages/docs-web/src/content/docs/reference/configuration.md b/packages/docs-web/src/content/docs/reference/configuration.md index a1024c530c..165a55af51 100644 --- a/packages/docs-web/src/content/docs/reference/configuration.md +++ b/packages/docs-web/src/content/docs/reference/configuration.md @@ -61,7 +61,7 @@ assistants: - project # Project-level CLAUDE.md (always recommended) - user # Also load ~/.claude/CLAUDE.md (global preferences) codex: - model: gpt-5.3-codex + model: gpt-5.4 modelReasoningEffort: medium webSearchMode: disabled additionalDirectories: @@ -105,7 +105,7 @@ assistants: settingSources: # Override global settingSources for this repo - project codex: - model: gpt-5.3-codex + model: gpt-5.4 webSearchMode: live # Commands configuration diff --git a/packages/docs-web/src/content/docs/reference/database.md b/packages/docs-web/src/content/docs/reference/database.md index 6cab854622..69023998ce 100644 --- a/packages/docs-web/src/content/docs/reference/database.md +++ b/packages/docs-web/src/content/docs/reference/database.md @@ -9,7 +9,7 @@ sidebar: order: 5 --- -Archon supports two database backends: **SQLite** (default, zero setup) and **PostgreSQL** (optional, for cloud/advanced deployments). The database backend is selected automatically based on whether the `DATABASE_URL` environment variable is set. +Archon supports two database backends: **SQLite** (default outside Docker, zero setup) and **PostgreSQL** (default in the root Docker Compose stack, optional elsewhere). The database backend is selected automatically based on whether the `DATABASE_URL` environment variable is set. ## SQLite (Default - No Setup Required) @@ -68,15 +68,16 @@ psql $DATABASE_URL < migrations/020_codebase_env_vars.sql ## Local PostgreSQL via Docker -Use the `with-db` Docker Compose profile for automatic PostgreSQL setup. +The root `docker-compose.yml` starts PostgreSQL by default. Set in `.env`: ```ini +POSTGRES_PASSWORD=postgres DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent ``` -**For fresh installations**, the database schema is created automatically when you start with `docker compose --profile with-db`. The combined migration runs on first startup. +**For fresh installations**, the database schema is created automatically when you start with `docker compose up -d`. The combined migration runs on first startup. **For updates to existing Docker installations**, you need to manually run new migrations: diff --git a/packages/server/package.json b/packages/server/package.json index 7de8c49955..eaf0ea2a15 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -6,7 +6,7 @@ "scripts": { "dev": "bun --watch src/index.ts", "start": "bun src/index.ts", - "test": "bun test src/routes/api.workflows.test.ts && bun test src/routes/api.conversations.test.ts && bun test src/routes/api.codebases.test.ts && bun test src/routes/api.messages.test.ts && bun test src/routes/api.health.test.ts && bun test src/routes/api.workflow-runs.test.ts && bun test src/adapters/web/transport.test.ts && bun test src/adapters/web/persistence.test.ts", + "test": "bun test src/routes/api.workflows.test.ts && bun test src/routes/api.webhook-rules.test.ts && bun test src/routes/api.conversations.test.ts && bun test src/routes/api.codebases.test.ts && bun test src/routes/api.messages.test.ts && bun test src/routes/api.health.test.ts && bun test src/routes/api.workflow-runs.test.ts && bun test src/adapters/web/transport.test.ts && bun test src/adapters/web/persistence.test.ts", "type-check": "bun x tsc --noEmit", "setup-auth": "bun src/scripts/setup-auth.ts" }, diff --git a/packages/server/src/routes/api.codebases.test.ts b/packages/server/src/routes/api.codebases.test.ts index 0265a359e1..6b475936c2 100644 --- a/packages/server/src/routes/api.codebases.test.ts +++ b/packages/server/src/routes/api.codebases.test.ts @@ -49,6 +49,7 @@ mock.module('@archon/core', () => ({ } }, scanPathForSensitiveKeys: mock((_p: string) => ({ path: _p, findings: [] })), + dispatchMatchedWebhookRule: mock(async () => undefined), EnvLeakError: class EnvLeakError extends Error { constructor(public report: { path: string; findings: { file: string; keys: string[] }[] }) { super( @@ -164,6 +165,20 @@ mock.module('@archon/core/db/messages', () => ({ listMessages: mock(async () => []), })); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); + mock.module('@archon/core/utils/commands', () => ({ findMarkdownFilesRecursive: mock(async () => []), })); diff --git a/packages/server/src/routes/api.conversations.test.ts b/packages/server/src/routes/api.conversations.test.ts index c5b53d9122..0d37fb83b3 100644 --- a/packages/server/src/routes/api.conversations.test.ts +++ b/packages/server/src/routes/api.conversations.test.ts @@ -40,6 +40,7 @@ mock.module('@archon/core', () => ({ } }, generateAndSetTitle: mockGenerateAndSetTitle, + dispatchMatchedWebhookRule: mock(async () => undefined), getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', createLogger: () => ({ fatal: mock(() => undefined), @@ -85,6 +86,19 @@ const mockAddMessage = mock(async (_convId: string, _role: string, _content: str mock.module('@archon/core/db/messages', () => ({ addMessage: mockAddMessage, })); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); mock.module('@archon/core/db/codebases', () => ({ listCodebases: mock(async () => [{ default_cwd: '/tmp/project' }]), getCodebase: mock(async () => null), diff --git a/packages/server/src/routes/api.health.test.ts b/packages/server/src/routes/api.health.test.ts index 6cf895464e..0f6ec19e07 100644 --- a/packages/server/src/routes/api.health.test.ts +++ b/packages/server/src/routes/api.health.test.ts @@ -41,6 +41,7 @@ mock.module('@archon/core', () => ({ getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', toSafeConfig: (config: unknown) => config, generateAndSetTitle: mock(async () => {}), + dispatchMatchedWebhookRule: mock(async () => undefined), createLogger: () => ({ fatal: mock(() => undefined), error: mock(() => undefined), @@ -163,6 +164,20 @@ mock.module('@archon/core/db/messages', () => ({ listMessages: mock(async () => []), })); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); + mock.module('@archon/core/utils/commands', () => ({ findMarkdownFilesRecursive: mock(async () => []), })); diff --git a/packages/server/src/routes/api.messages.test.ts b/packages/server/src/routes/api.messages.test.ts index 3e799c41d4..bfbb74a18d 100644 --- a/packages/server/src/routes/api.messages.test.ts +++ b/packages/server/src/routes/api.messages.test.ts @@ -51,6 +51,7 @@ mock.module('@archon/core', () => ({ }, getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', generateAndSetTitle: mock(async () => {}), + dispatchMatchedWebhookRule: mock(async () => undefined), createLogger: () => ({ fatal: mock(() => undefined), error: mock(() => undefined), @@ -148,6 +149,20 @@ mock.module('@archon/core/db/messages', () => ({ listMessages: mockListMessages, })); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); + mock.module('@archon/core/utils/commands', () => ({ findMarkdownFilesRecursive: mock(async () => []), })); diff --git a/packages/server/src/routes/api.ts b/packages/server/src/routes/api.ts index ed267c1d41..58df7bd20b 100644 --- a/packages/server/src/routes/api.ts +++ b/packages/server/src/routes/api.ts @@ -27,6 +27,7 @@ import { registerRepository, ConversationNotFoundError, generateAndSetTitle, + dispatchMatchedWebhookRule, EnvLeakError, scanPathForSensitiveKeys, } from '@archon/core'; @@ -69,6 +70,7 @@ import * as isolationEnvDb from '@archon/core/db/isolation-environments'; import * as workflowDb from '@archon/core/db/workflows'; import * as workflowEventDb from '@archon/core/db/workflow-events'; import * as messageDb from '@archon/core/db/messages'; +import * as webhookRuleDb from '@archon/core/db/webhook-rules'; import { errorSchema } from './schemas/common.schemas'; import { updateCheckResponseSchema } from './schemas/system.schemas'; import { @@ -122,6 +124,15 @@ import { configResponseSchema, codebaseEnvironmentsResponseSchema, } from './schemas/config.schemas'; +import { + webhookRuleSchema, + webhookRuleListResponseSchema, + webhookRuleBodySchema, + webhookRuleUpdateBodySchema, + webhookRuleIdParamsSchema, + webhookRuleOptionsResponseSchema, + deleteWebhookRuleResponseSchema, +} from './schemas/webhook-rule.schemas'; // Read app version: use build-time constant in binary, package.json in dev let appVersion = 'unknown'; @@ -269,6 +280,97 @@ const getCommandsRoute = createRoute({ }, }); +const listWebhookRulesRoute = createRoute({ + method: 'get', + path: '/api/webhook-rules', + tags: ['Webhook Rules'], + summary: 'List configured webhook rules', + responses: { + 200: { + content: { 'application/json': { schema: webhookRuleListResponseSchema } }, + description: 'Configured webhook rules', + }, + 500: jsonError('Server error'), + }, +}); + +const getWebhookRuleOptionsRoute = createRoute({ + method: 'get', + path: '/api/webhook-rules/options', + tags: ['Webhook Rules'], + summary: 'List codebase and workflow options for webhook rules', + responses: { + 200: { + content: { 'application/json': { schema: webhookRuleOptionsResponseSchema } }, + description: 'Webhook rule options', + }, + 500: jsonError('Server error'), + }, +}); + +const createWebhookRuleRoute = createRoute({ + method: 'post', + path: '/api/webhook-rules', + tags: ['Webhook Rules'], + summary: 'Create a slug-based webhook rule', + request: { + body: { + content: { 'application/json': { schema: webhookRuleBodySchema } }, + required: true, + }, + }, + responses: { + 200: { + content: { 'application/json': { schema: webhookRuleSchema } }, + description: 'Created webhook rule', + }, + 400: jsonError('Bad request'), + 409: jsonError('Conflict'), + 500: jsonError('Server error'), + }, +}); + +const updateWebhookRuleRoute = createRoute({ + method: 'patch', + path: '/api/webhook-rules/{id}', + tags: ['Webhook Rules'], + summary: 'Update a slug-based webhook rule', + request: { + params: webhookRuleIdParamsSchema, + body: { + content: { 'application/json': { schema: webhookRuleUpdateBodySchema } }, + required: true, + }, + }, + responses: { + 200: { + content: { 'application/json': { schema: webhookRuleSchema } }, + description: 'Updated webhook rule', + }, + 400: jsonError('Bad request'), + 404: jsonError('Not found'), + 409: jsonError('Conflict'), + 500: jsonError('Server error'), + }, +}); + +const deleteWebhookRuleRoute = createRoute({ + method: 'delete', + path: '/api/webhook-rules/{id}', + tags: ['Webhook Rules'], + summary: 'Delete a webhook rule', + request: { + params: webhookRuleIdParamsSchema, + }, + responses: { + 200: { + content: { 'application/json': { schema: deleteWebhookRuleResponseSchema } }, + description: 'Webhook rule deleted', + }, + 500: jsonError('Server error'), + }, +}); + // ========================================================================= // Conversation route configs // ========================================================================= @@ -865,7 +967,7 @@ export function registerApiRoutes( ): void { function apiError( c: Context, - status: 400 | 404 | 422 | 500, + status: 400 | 404 | 409 | 422 | 500, message: string, detail?: string ): Response { @@ -885,10 +987,168 @@ export function registerApiRoutes( }); } + function serializeWebhookRule( + rule: + | Awaited> + | Awaited>[number], + codebaseName?: string + ): { + id: string; + codebaseId: string; + codebaseName: string; + urlSlug: string; + workflowName: string; + enabled: boolean; + createdAt: string; + updatedAt: string; + } { + if (!rule) { + throw new Error('Webhook rule serialization requires a rule'); + } + + return { + id: rule.id, + codebaseId: rule.codebase_id, + codebaseName: + codebaseName ?? + ('codebase_name' in rule && typeof rule.codebase_name === 'string' + ? rule.codebase_name + : ''), + urlSlug: rule.path_slug, + workflowName: rule.workflow_name, + enabled: rule.enabled, + createdAt: + rule.created_at instanceof Date ? rule.created_at.toISOString() : String(rule.created_at), + updatedAt: + rule.updated_at instanceof Date ? rule.updated_at.toISOString() : String(rule.updated_at), + }; + } + + async function discoverWorkflowsForCodebase( + codebase: Awaited> + ): Promise<{ name: string; description: string | null; source: 'project' | 'bundled' }[]> { + if (!codebase) return []; + + const result = await discoverWorkflowsWithConfig(codebase.default_cwd, loadConfig); + return result.workflows.map(entry => ({ + name: entry.workflow.name, + description: entry.workflow.description ?? null, + source: entry.source, + })); + } + + async function validateWebhookRuleTarget(input: { + codebaseId: string; + urlSlug: string; + workflowName: string; + }): Promise< + | { ok: true; codebase: NonNullable>> } + | { ok: false; status: 400 | 404; message: string; detail?: string } + > { + const codebase = await codebaseDb.getCodebase(input.codebaseId); + if (!codebase) { + return { + ok: false, + status: 404, + message: 'Codebase not found', + detail: `No codebase with id "${input.codebaseId}"`, + }; + } + + if (!input.urlSlug.trim()) { + return { + ok: false, + status: 400, + message: 'Webhook URL slug is required', + detail: 'urlSlug must not be empty', + }; + } + + const workflows = await discoverWorkflowsForCodebase(codebase); + if (!workflows.some(workflow => workflow.name === input.workflowName)) { + return { + ok: false, + status: 400, + message: 'Workflow not found for codebase', + detail: `Workflow "${input.workflowName}" is not available for codebase "${codebase.name}"`, + }; + } + + return { ok: true, codebase }; + } + // CORS for Web UI — allow-all is fine for a single-developer tool. // Override with WEB_UI_ORIGIN env var to restrict if exposing publicly. app.use('/api/*', cors({ origin: process.env.WEB_UI_ORIGIN || '*' })); + app.post('/webhooks/:slug', async c => { + const pathSlug = c.req.param('slug') ?? ''; + const contentType = c.req.header('content-type'); + + try { + const matchedRule = await webhookRuleDb.findWebhookRuleBySlug(pathSlug); + if (!matchedRule) { + return c.json({ error: 'Webhook rule not found' }, 404); + } + + const codebase = await codebaseDb.getCodebase(matchedRule.codebase_id); + if (!codebase) { + getLog().error( + { pathSlug, codebaseId: matchedRule.codebase_id }, + 'generic_webhook_codebase_not_found' + ); + return c.json({ error: 'Webhook rule target codebase not found' }, 404); + } + + const rawBody = await c.req.text(); + const conversationId = `webhook-${pathSlug}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; + const conversation = await conversationDb.getOrCreateConversation( + 'web', + conversationId, + codebase.id + ); + webAdapter.setConversationDbId(conversation.platform_conversation_id, conversation.id); + + if (conversation.codebase_id !== codebase.id || conversation.cwd !== codebase.default_cwd) { + await conversationDb.updateConversation(conversation.id, { + codebase_id: codebase.id, + cwd: codebase.default_cwd, + }); + } + + await lockManager.acquireLock(conversationId, async () => { + await dispatchMatchedWebhookRule({ + platform: webAdapter, + conversationId, + conversation: { + ...conversation, + codebase_id: codebase.id, + cwd: codebase.default_cwd, + }, + codebase, + pathSlug, + rawBody, + contentType, + matchedRule, + isolationHints: { workflowType: 'thread', workflowId: conversationId }, + }); + }); + + return c.json( + { + accepted: true, + conversationId, + ruleId: matchedRule.id, + workflowName: matchedRule.workflow_name, + }, + 202 + ); + } catch (error) { + getLog().error({ err: error, pathSlug }, 'generic_webhook_dispatch_failed'); + return c.json({ error: 'Failed to dispatch webhook' }, 500); + } + }); + // Shared lock/dispatch/error handling for message and workflow endpoints /** Maximum allowed upload size per file (10 MB) */ const MAX_UPLOAD_BYTES = 10 * 1024 * 1024; @@ -1716,6 +1976,127 @@ export function registerApiRoutes( } }); + // GET /api/webhook-rules - List configured webhook rules + registerOpenApiRoute(listWebhookRulesRoute, async c => { + try { + const rules = await webhookRuleDb.listWebhookRules(); + return c.json({ rules: rules.map(rule => serializeWebhookRule(rule)) }); + } catch (error) { + getLog().error({ err: error }, 'list_webhook_rules_failed'); + return apiError(c, 500, 'Failed to list webhook rules'); + } + }); + + // GET /api/webhook-rules/options - Codebase and workflow options + // MUST be registered before /api/webhook-rules/{id} + registerOpenApiRoute(getWebhookRuleOptionsRoute, async c => { + try { + const codebases = await codebaseDb.listCodebases(); + const workflowsByCodebase = await Promise.all( + codebases.map(async codebase => ({ + codebaseId: codebase.id, + workflows: await discoverWorkflowsForCodebase(codebase), + })) + ); + + return c.json({ + codebases: codebases.map(codebase => ({ + id: codebase.id, + name: codebase.name, + })), + workflowsByCodebase, + }); + } catch (error) { + getLog().error({ err: error }, 'get_webhook_rule_options_failed'); + return apiError(c, 500, 'Failed to load webhook rule options'); + } + }); + + // POST /api/webhook-rules - Create a webhook rule + registerOpenApiRoute(createWebhookRuleRoute, async c => { + try { + const body = getValidatedBody(c, webhookRuleBodySchema); + const validation = await validateWebhookRuleTarget(body); + if (!validation.ok) { + return apiError(c, validation.status, validation.message, validation.detail); + } + + const rule = await webhookRuleDb.createWebhookRule({ + codebase_id: body.codebaseId, + path_slug: body.urlSlug, + workflow_name: body.workflowName, + enabled: body.enabled, + }); + + return c.json(serializeWebhookRule(rule, validation.codebase.name)); + } catch (error) { + if (webhookRuleDb.isWebhookRuleConflictError(error)) { + return apiError( + c, + 409, + 'Webhook rule conflict', + 'A webhook rule already exists for this URL slug' + ); + } + getLog().error({ err: error }, 'create_webhook_rule_failed'); + return apiError(c, 500, 'Failed to create webhook rule'); + } + }); + + // PATCH /api/webhook-rules/:id - Update a webhook rule + registerOpenApiRoute(updateWebhookRuleRoute, async c => { + try { + const id = c.req.param('id') ?? ''; + const body = getValidatedBody(c, webhookRuleUpdateBodySchema); + const existingRule = await webhookRuleDb.getWebhookRule(id); + if (!existingRule) { + return apiError(c, 404, 'Webhook rule not found'); + } + + const nextState = { + codebaseId: body.codebaseId ?? existingRule.codebase_id, + urlSlug: body.urlSlug ?? existingRule.path_slug, + workflowName: body.workflowName ?? existingRule.workflow_name, + }; + const validation = await validateWebhookRuleTarget(nextState); + if (!validation.ok) { + return apiError(c, validation.status, validation.message, validation.detail); + } + + const updatedRule = await webhookRuleDb.updateWebhookRule(id, { + codebase_id: body.codebaseId, + path_slug: body.urlSlug, + workflow_name: body.workflowName, + enabled: body.enabled, + }); + + return c.json(serializeWebhookRule(updatedRule, validation.codebase.name)); + } catch (error) { + if (webhookRuleDb.isWebhookRuleConflictError(error)) { + return apiError( + c, + 409, + 'Webhook rule conflict', + 'A webhook rule already exists for this URL slug' + ); + } + getLog().error({ err: error }, 'update_webhook_rule_failed'); + return apiError(c, 500, 'Failed to update webhook rule'); + } + }); + + // DELETE /api/webhook-rules/:id - Delete a webhook rule + registerOpenApiRoute(deleteWebhookRuleRoute, async c => { + try { + const id = c.req.param('id') ?? ''; + await webhookRuleDb.deleteWebhookRule(id); + return c.json({ success: true }); + } catch (error) { + getLog().error({ err: error }, 'delete_webhook_rule_failed'); + return apiError(c, 500, 'Failed to delete webhook rule'); + } + }); + /** * Register a route with OpenAPI spec generation and input validation. * Zod validates inputs (query, params, body) at runtime via defaultHook. diff --git a/packages/server/src/routes/api.webhook-rules.test.ts b/packages/server/src/routes/api.webhook-rules.test.ts new file mode 100644 index 0000000000..df6e9d0492 --- /dev/null +++ b/packages/server/src/routes/api.webhook-rules.test.ts @@ -0,0 +1,472 @@ +import { beforeEach, describe, expect, mock, test } from 'bun:test'; +import { OpenAPIHono } from '@hono/zod-openapi'; +import type { ConversationLockManager } from '@archon/core'; +import type { WebAdapter } from '../adapters/web'; +import { validationErrorHook } from './openapi-defaults'; + +const mockDiscoverWorkflowsWithConfig = mock(async (cwd: string) => { + if (cwd === '/tmp/project-one') { + return { + workflows: [ + { + workflow: { name: 'archon-smart-pr-review', description: 'Review PR' }, + source: 'project' as const, + }, + { workflow: { name: 'triage', description: 'Triage issue' }, source: 'bundled' as const }, + ], + errors: [], + }; + } + + return { + workflows: [ + { workflow: { name: 'assist', description: 'Assist workflow' }, source: 'bundled' as const }, + ], + errors: [], + }; +}); + +const mockListCodebases = mock(async () => [ + { + id: 'cb-1', + name: 'SmelhausJosef/KoKot', + repository_url: 'https://github.com/SmelhausJosef/KoKot.git', + default_cwd: '/tmp/project-one', + ai_assistant_type: 'claude', + allow_env_keys: false, + commands: {}, + created_at: new Date('2026-01-01T00:00:00Z').toISOString(), + updated_at: new Date('2026-01-01T00:00:00Z').toISOString(), + }, + { + id: 'cb-2', + name: 'SmelhausJosef/Other', + repository_url: null, + default_cwd: '/tmp/project-two', + ai_assistant_type: 'claude', + allow_env_keys: false, + commands: {}, + created_at: new Date('2026-01-01T00:00:00Z').toISOString(), + updated_at: new Date('2026-01-01T00:00:00Z').toISOString(), + }, +]); + +const mockGetCodebase = mock(async (id: string) => { + const codebases = await mockListCodebases(); + return codebases.find(codebase => codebase.id === id) ?? null; +}); + +const mockListWebhookRules = mock(async () => [ + { + id: 'rule-1', + codebase_id: 'cb-1', + codebase_name: 'SmelhausJosef/KoKot', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + created_at: new Date('2026-01-02T00:00:00Z'), + updated_at: new Date('2026-01-02T00:00:00Z'), + }, +]); + +const mockGetWebhookRule = mock(async (_id: string) => ({ + id: 'rule-1', + codebase_id: 'cb-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + created_at: new Date('2026-01-02T00:00:00Z'), + updated_at: new Date('2026-01-02T00:00:00Z'), +})); + +const mockFindWebhookRuleBySlug = mock(async (_slug: string) => null); + +const mockCreateWebhookRule = mock(async () => ({ + id: 'rule-2', + codebase_id: 'cb-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + created_at: new Date('2026-01-03T00:00:00Z'), + updated_at: new Date('2026-01-03T00:00:00Z'), +})); + +const mockUpdateWebhookRule = mock(async () => ({ + id: 'rule-1', + codebase_id: 'cb-1', + path_slug: 'kokot-triage', + workflow_name: 'triage', + enabled: false, + created_at: new Date('2026-01-02T00:00:00Z'), + updated_at: new Date('2026-01-04T00:00:00Z'), +})); + +const mockDeleteWebhookRule = mock(async (_id: string) => {}); +const mockIsWebhookRuleConflictError = mock(() => false); +const mockDispatchMatchedWebhookRule = mock(async () => undefined); + +mock.module('@archon/core', () => ({ + handleMessage: mock(async () => {}), + getDatabaseType: () => 'sqlite', + loadConfig: mock(async () => ({})), + toSafeConfig: (config: unknown) => config, + updateGlobalConfig: mock(async () => {}), + cloneRepository: mock(async () => ({ codebaseId: 'x', alreadyExisted: false })), + registerRepository: mock(async () => ({ codebaseId: 'x', alreadyExisted: false })), + dispatchMatchedWebhookRule: mockDispatchMatchedWebhookRule, + ConversationNotFoundError: class ConversationNotFoundError extends Error { + constructor(id: string) { + super(`Conversation not found: ${id}`); + this.name = 'ConversationNotFoundError'; + } + }, + generateAndSetTitle: mock(async () => {}), + EnvLeakError: class EnvLeakError extends Error {}, + scanPathForSensitiveKeys: mock(async () => ({ findings: [] })), + getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', + createLogger: () => ({ + fatal: mock(() => undefined), + error: mock(() => undefined), + warn: mock(() => undefined), + info: mock(() => undefined), + debug: mock(() => undefined), + trace: mock(() => undefined), + child: mock(function (this: unknown) { + return this; + }), + bindings: mock(() => ({ module: 'test' })), + isLevelEnabled: mock(() => true), + level: 'info', + }), +})); + +mock.module('@archon/paths', () => ({ + createLogger: () => ({ + fatal: mock(() => undefined), + error: mock(() => undefined), + warn: mock(() => undefined), + info: mock(() => undefined), + debug: mock(() => undefined), + trace: mock(() => undefined), + child: mock(function (this: unknown) { + return this; + }), + bindings: mock(() => ({ module: 'test' })), + isLevelEnabled: mock(() => true), + level: 'info', + }), + getWorkflowFolderSearchPaths: mock(() => ['.archon/workflows']), + getCommandFolderSearchPaths: mock(() => ['.archon/commands']), + getDefaultCommandsPath: mock(() => '/tmp/.archon-test-nonexistent/commands/defaults'), + getDefaultWorkflowsPath: mock(() => '/tmp/.archon-test-nonexistent/workflows/defaults'), + getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', + getRunArtifactsPath: () => '/tmp/.archon/artifacts', + getArchonHome: () => '/tmp/.archon', + isDocker: () => false, + checkForUpdate: mock(async () => null), + BUNDLED_IS_BINARY: false, + BUNDLED_VERSION: '0.0.0-test', +})); + +mock.module('@archon/workflows/workflow-discovery', () => ({ + discoverWorkflowsWithConfig: mockDiscoverWorkflowsWithConfig, +})); +mock.module('@archon/workflows/loader', () => ({ + parseWorkflow: mock(() => ({ + workflow: null, + error: { filename: '', error: 'stub', errorType: 'parse_error' }, + })), +})); +mock.module('@archon/workflows/command-validation', () => ({ + isValidCommandName: mock(() => true), +})); +mock.module('@archon/workflows/defaults', () => ({ + BUNDLED_WORKFLOWS: {}, + BUNDLED_COMMANDS: {}, + isBinaryBuild: mock(() => false), +})); +mock.module('@archon/git', () => ({ + removeWorktree: mock(async () => {}), + toRepoPath: (path: string) => path, + toWorktreePath: (path: string) => path, +})); +mock.module('@archon/core/db/conversations', () => ({ + findConversationByPlatformId: mock(async () => null), + listConversations: mock(async () => []), + getOrCreateConversation: mock(async (_platform: string, conversationId: string) => ({ + id: 'conv-1', + platform_conversation_id: conversationId, + title: null, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + platform_type: 'web', + deleted_at: null, + codebase_id: null, + ai_assistant_type: 'claude', + cwd: null, + })), + softDeleteConversation: mock(async () => {}), + updateConversationTitle: mock(async () => {}), + getConversationById: mock(async () => null), + updateConversation: mock(async () => {}), +})); +mock.module('@archon/core/db/codebases', () => ({ + listCodebases: mockListCodebases, + getCodebase: mockGetCodebase, + deleteCodebase: mock(async () => {}), +})); +mock.module('@archon/core/db/env-vars', () => ({ + listCodebaseEnvVars: mock(async () => []), + setCodebaseEnvVar: mock(async () => {}), + deleteCodebaseEnvVar: mock(async () => {}), +})); +mock.module('@archon/core/db/isolation-environments', () => ({ + listByCodebase: mock(async () => []), + updateStatus: mock(async () => {}), +})); +mock.module('@archon/core/db/workflows', () => ({ + listWorkflowRuns: mock(async () => []), + listDashboardRuns: mock(async () => ({ + runs: [], + total: 0, + counts: { all: 0, running: 0, completed: 0, failed: 0, cancelled: 0, pending: 0 }, + })), + getWorkflowRun: mock(async () => null), + cancelWorkflowRun: mock(async () => {}), + getWorkflowRunByWorkerPlatformId: mock(async () => null), + getRunningWorkflows: mock(async () => []), +})); +mock.module('@archon/core/db/workflow-events', () => ({ + listWorkflowEvents: mock(async () => []), +})); +mock.module('@archon/core/db/messages', () => ({ + addMessage: mock(async () => ({ + id: 'msg-1', + conversation_id: 'conv-1', + role: 'user', + content: 'hi', + metadata: '{}', + created_at: new Date().toISOString(), + })), + listMessages: mock(async () => []), +})); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mockListWebhookRules, + getWebhookRule: mockGetWebhookRule, + findWebhookRuleBySlug: mockFindWebhookRuleBySlug, + createWebhookRule: mockCreateWebhookRule, + updateWebhookRule: mockUpdateWebhookRule, + deleteWebhookRule: mockDeleteWebhookRule, + isWebhookRuleConflictError: mockIsWebhookRuleConflictError, +})); +mock.module('@archon/core/utils/commands', () => ({ + findMarkdownFilesRecursive: mock(async () => []), +})); + +import { registerApiRoutes } from './api'; + +function makeApp(): OpenAPIHono { + const app = new OpenAPIHono({ defaultHook: validationErrorHook }); + const webAdapter = { + setConversationDbId: mock(() => {}), + } as unknown as WebAdapter; + const lockManager = { + acquireLock: mock(async (_id: string, handler: () => Promise) => { + await handler(); + }), + } as unknown as ConversationLockManager; + registerApiRoutes(app, webAdapter, lockManager); + return app; +} + +describe('webhook rules routes', () => { + beforeEach(() => { + mockListCodebases.mockClear(); + mockGetCodebase.mockClear(); + mockListWebhookRules.mockClear(); + mockGetWebhookRule.mockClear(); + mockFindWebhookRuleBySlug.mockReset(); + mockFindWebhookRuleBySlug.mockImplementation(async () => null); + mockCreateWebhookRule.mockClear(); + mockUpdateWebhookRule.mockClear(); + mockDeleteWebhookRule.mockClear(); + mockDispatchMatchedWebhookRule.mockClear(); + mockIsWebhookRuleConflictError.mockReset(); + mockIsWebhookRuleConflictError.mockImplementation(() => false); + mockDiscoverWorkflowsWithConfig.mockClear(); + }); + + test('GET /api/webhook-rules lists configured rules', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules'); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + rules: Array<{ codebaseName: string; urlSlug: string; workflowName: string }>; + }; + + expect(body.rules).toHaveLength(1); + expect(body.rules[0]).toMatchObject({ + codebaseName: 'SmelhausJosef/KoKot', + urlSlug: 'kokot-pr-review', + workflowName: 'archon-smart-pr-review', + }); + }); + + test('GET /api/webhook-rules/options returns codebases and workflows', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules/options'); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + codebases: Array<{ id: string; name: string }>; + workflowsByCodebase: Array<{ codebaseId: string; workflows: Array<{ name: string }> }>; + }; + + expect(body.codebases).toEqual([ + { id: 'cb-1', name: 'SmelhausJosef/KoKot' }, + { id: 'cb-2', name: 'SmelhausJosef/Other' }, + ]); + expect(body.workflowsByCodebase).toContainEqual({ + codebaseId: 'cb-1', + workflows: [ + { name: 'archon-smart-pr-review', description: 'Review PR', source: 'project' }, + { name: 'triage', description: 'Triage issue', source: 'bundled' }, + ], + }); + }); + + test('POST /api/webhook-rules creates a rule when slug and workflow are valid', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + codebaseId: 'cb-1', + urlSlug: 'kokot-pr-review', + workflowName: 'archon-smart-pr-review', + enabled: true, + }), + }); + + expect(response.status).toBe(200); + expect(mockCreateWebhookRule).toHaveBeenCalledWith({ + codebase_id: 'cb-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + }); + + const body = (await response.json()) as { codebaseName: string; workflowName: string }; + expect(body).toMatchObject({ + codebaseName: 'SmelhausJosef/KoKot', + workflowName: 'archon-smart-pr-review', + }); + }); + + test('POST /api/webhook-rules rejects unknown workflows', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + codebaseId: 'cb-1', + urlSlug: 'kokot-unknown', + workflowName: 'does-not-exist', + }), + }); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: string }; + expect(body.error).toBe('Workflow not found for codebase'); + }); + + test('POST /api/webhook-rules returns conflict when slug already exists', async () => { + mockCreateWebhookRule.mockRejectedValueOnce(new Error('duplicate')); + mockIsWebhookRuleConflictError.mockReturnValueOnce(true); + + const app = makeApp(); + const response = await app.request('/api/webhook-rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + codebaseId: 'cb-1', + urlSlug: 'kokot-pr-review', + workflowName: 'archon-smart-pr-review', + }), + }); + + expect(response.status).toBe(409); + const body = (await response.json()) as { error: string }; + expect(body.error).toBe('Webhook rule conflict'); + }); + + test('PATCH /api/webhook-rules/:id updates a rule after revalidating the target', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules/rule-1', { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + urlSlug: 'kokot-triage', + workflowName: 'triage', + enabled: false, + }), + }); + + const body = (await response.json()) as { + urlSlug: string; + workflowName: string; + enabled: boolean; + }; + expect(response.status).toBe(200); + expect(mockUpdateWebhookRule).toHaveBeenCalledWith('rule-1', { + codebase_id: undefined, + path_slug: 'kokot-triage', + workflow_name: 'triage', + enabled: false, + }); + + expect(body).toMatchObject({ + urlSlug: 'kokot-triage', + workflowName: 'triage', + enabled: false, + }); + }); + + test('DELETE /api/webhook-rules/:id deletes a rule', async () => { + const app = makeApp(); + const response = await app.request('/api/webhook-rules/rule-1', { method: 'DELETE' }); + + expect(response.status).toBe(200); + expect(mockDeleteWebhookRule).toHaveBeenCalledWith('rule-1'); + expect(await response.json()).toEqual({ success: true }); + }); + + test('POST /webhooks/:slug dispatches the configured workflow', async () => { + mockFindWebhookRuleBySlug.mockResolvedValueOnce({ + id: 'rule-1', + codebase_id: 'cb-1', + path_slug: 'kokot-pr-review', + workflow_name: 'archon-smart-pr-review', + enabled: true, + created_at: new Date('2026-01-02T00:00:00Z'), + updated_at: new Date('2026-01-02T00:00:00Z'), + }); + + const app = makeApp(); + const response = await app.request('/webhooks/kokot-pr-review', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ hello: 'world' }), + }); + + expect(response.status).toBe(202); + expect(mockDispatchMatchedWebhookRule).toHaveBeenCalledTimes(1); + expect(mockDispatchMatchedWebhookRule.mock.calls[0]?.[0]).toMatchObject({ + pathSlug: 'kokot-pr-review', + contentType: 'application/json', + matchedRule: expect.objectContaining({ workflow_name: 'archon-smart-pr-review' }), + codebase: expect.objectContaining({ id: 'cb-1' }), + }); + }); +}); diff --git a/packages/server/src/routes/api.workflow-runs.test.ts b/packages/server/src/routes/api.workflow-runs.test.ts index 41bee85003..c14b87ed91 100644 --- a/packages/server/src/routes/api.workflow-runs.test.ts +++ b/packages/server/src/routes/api.workflow-runs.test.ts @@ -89,6 +89,7 @@ mock.module('@archon/core', () => ({ }, getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', generateAndSetTitle: mockGenerateAndSetTitle, + dispatchMatchedWebhookRule: mock(async () => undefined), createLogger: () => ({ fatal: mock(() => undefined), error: mock(() => undefined), @@ -190,6 +191,20 @@ mock.module('@archon/core/db/messages', () => ({ listMessages: mock(async () => []), })); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); + mock.module('@archon/core/utils/commands', () => ({ findMarkdownFilesRecursive: mock(async () => []), })); diff --git a/packages/server/src/routes/api.workflows.test.ts b/packages/server/src/routes/api.workflows.test.ts index e50b252640..072016a66e 100644 --- a/packages/server/src/routes/api.workflows.test.ts +++ b/packages/server/src/routes/api.workflows.test.ts @@ -39,6 +39,7 @@ mock.module('@archon/core', () => ({ removeWorktree: mock(async () => ({ success: true })), ConversationNotFoundError: class extends Error {}, getArchonWorkspacesPath: () => '/tmp/.archon/workspaces', + dispatchMatchedWebhookRule: mock(async () => undefined), createLogger: () => ({ fatal: mock(() => undefined), error: mock(() => undefined), @@ -91,6 +92,19 @@ mock.module('@archon/core/db/isolation-environments', () => ({})); mock.module('@archon/core/db/workflows', () => ({})); mock.module('@archon/core/db/workflow-events', () => ({})); mock.module('@archon/core/db/messages', () => ({})); +mock.module('@archon/core/db/webhook-rules', () => ({ + listWebhookRules: mock(async () => []), + getWebhookRule: mock(async () => null), + findWebhookRuleBySlug: mock(async () => null), + createWebhookRule: mock(async () => { + throw new Error('createWebhookRule not mocked in this test'); + }), + updateWebhookRule: mock(async () => { + throw new Error('updateWebhookRule not mocked in this test'); + }), + deleteWebhookRule: mock(async () => {}), + isWebhookRuleConflictError: mock(() => false), +})); const mockListCodebases = mock(async () => [{ default_cwd: '/tmp/project' }]); mock.module('@archon/core/db/codebases', () => ({ diff --git a/packages/server/src/routes/schemas/webhook-rule.schemas.ts b/packages/server/src/routes/schemas/webhook-rule.schemas.ts new file mode 100644 index 0000000000..153452f537 --- /dev/null +++ b/packages/server/src/routes/schemas/webhook-rule.schemas.ts @@ -0,0 +1,83 @@ +import { z } from '@hono/zod-openapi'; +import { workflowSourceSchema } from './workflow.schemas'; + +export const webhookSlugSchema = z + .string() + .min(1) + .max(80) + .regex(/^[a-z0-9][a-z0-9-_]*$/) + .openapi('WebhookSlug'); + +export const webhookRuleSchema = z + .object({ + id: z.string(), + codebaseId: z.string(), + codebaseName: z.string(), + urlSlug: webhookSlugSchema, + workflowName: z.string(), + enabled: z.boolean(), + createdAt: z.string(), + updatedAt: z.string(), + }) + .openapi('WebhookRule'); + +export const webhookRuleListResponseSchema = z + .object({ + rules: z.array(webhookRuleSchema), + }) + .openapi('WebhookRuleListResponse'); + +export const webhookRuleBodySchema = z + .object({ + codebaseId: z.string(), + urlSlug: webhookSlugSchema, + workflowName: z.string().min(1), + enabled: z.boolean().optional(), + }) + .openapi('CreateWebhookRuleBody'); + +export const webhookRuleUpdateBodySchema = z + .object({ + codebaseId: z.string().optional(), + urlSlug: webhookSlugSchema.optional(), + workflowName: z.string().min(1).optional(), + enabled: z.boolean().optional(), + }) + .openapi('UpdateWebhookRuleBody'); + +export const webhookRuleIdParamsSchema = z.object({ id: z.string() }); + +export const webhookRuleCodebaseOptionSchema = z + .object({ + id: z.string(), + name: z.string(), + }) + .openapi('WebhookRuleCodebaseOption'); + +export const webhookWorkflowOptionSchema = z + .object({ + name: z.string(), + description: z.string().nullable(), + source: workflowSourceSchema, + }) + .openapi('WebhookWorkflowOption'); + +export const webhookWorkflowsByCodebaseSchema = z + .object({ + codebaseId: z.string(), + workflows: z.array(webhookWorkflowOptionSchema), + }) + .openapi('WebhookWorkflowsByCodebase'); + +export const webhookRuleOptionsResponseSchema = z + .object({ + codebases: z.array(webhookRuleCodebaseOptionSchema), + workflowsByCodebase: z.array(webhookWorkflowsByCodebaseSchema), + }) + .openapi('WebhookRuleOptionsResponse'); + +export const deleteWebhookRuleResponseSchema = z + .object({ + success: z.boolean(), + }) + .openapi('DeleteWebhookRuleResponse'); diff --git a/packages/web/src/App.tsx b/packages/web/src/App.tsx index d308640c9e..55c9eb300a 100644 --- a/packages/web/src/App.tsx +++ b/packages/web/src/App.tsx @@ -11,6 +11,7 @@ import { WorkflowsPage } from '@/routes/WorkflowsPage'; import { WorkflowExecutionPage } from '@/routes/WorkflowExecutionPage'; import { WorkflowBuilderPage } from '@/routes/WorkflowBuilderPage'; import { SettingsPage } from '@/routes/SettingsPage'; +import { WebhooksPage } from '@/routes/WebhooksPage'; interface ErrorBoundaryState { hasError: boolean; @@ -76,6 +77,7 @@ export function App(): React.ReactElement { } /> } /> } /> + } /> } /> diff --git a/packages/web/src/components/layout/TopNav.tsx b/packages/web/src/components/layout/TopNav.tsx index 45924f5004..8fadea5cb3 100644 --- a/packages/web/src/components/layout/TopNav.tsx +++ b/packages/web/src/components/layout/TopNav.tsx @@ -1,6 +1,6 @@ import { NavLink, Link } from 'react-router'; import { useQuery } from '@tanstack/react-query'; -import { LayoutDashboard, MessageSquare, Workflow, Settings } from 'lucide-react'; +import { LayoutDashboard, MessageSquare, Workflow, Settings, Webhook } from 'lucide-react'; import { listWorkflowRuns, getUpdateCheck } from '@/lib/api'; import { cn } from '@/lib/utils'; @@ -8,6 +8,7 @@ const tabs = [ { to: '/chat', end: false, icon: MessageSquare, label: 'Chat' }, { to: '/dashboard', end: true, icon: LayoutDashboard, label: 'Dashboard' }, { to: '/workflows', end: false, icon: Workflow, label: 'Workflows' }, + { to: '/webhooks', end: false, icon: Webhook, label: 'Webhooks' }, { to: '/settings', end: false, icon: Settings, label: 'Settings' }, ] as const; diff --git a/packages/web/src/lib/api.generated.d.ts b/packages/web/src/lib/api.generated.d.ts index 193c619588..9906c724d7 100644 --- a/packages/web/src/lib/api.generated.d.ts +++ b/packages/web/src/lib/api.generated.d.ts @@ -726,6 +726,256 @@ export interface paths { patch?: never; trace?: never; }; + '/api/webhook-rules': { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** List configured webhook rules */ + get: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Configured webhook rules */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['WebhookRuleListResponse']; + }; + }; + /** @description Server error */ + 500: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + }; + }; + put?: never; + /** Create a slug-based webhook rule */ + post: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody: { + content: { + 'application/json': components['schemas']['CreateWebhookRuleBody']; + }; + }; + responses: { + /** @description Created webhook rule */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['WebhookRule']; + }; + }; + /** @description Bad request */ + 400: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + /** @description Conflict */ + 409: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + /** @description Server error */ + 500: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + }; + }; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + '/api/webhook-rules/options': { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** List codebase and workflow options for webhook rules */ + get: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Webhook rule options */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['WebhookRuleOptionsResponse']; + }; + }; + /** @description Server error */ + 500: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + }; + }; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + '/api/webhook-rules/{id}': { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** Delete a webhook rule */ + delete: { + parameters: { + query?: never; + header?: never; + path: { + id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Webhook rule deleted */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['DeleteWebhookRuleResponse']; + }; + }; + /** @description Server error */ + 500: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + }; + }; + options?: never; + head?: never; + /** Update a slug-based webhook rule */ + patch: { + parameters: { + query?: never; + header?: never; + path: { + id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + 'application/json': components['schemas']['UpdateWebhookRuleBody']; + }; + }; + responses: { + /** @description Updated webhook rule */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['WebhookRule']; + }; + }; + /** @description Bad request */ + 400: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + /** @description Not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + /** @description Conflict */ + 409: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + /** @description Server error */ + 500: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['Error']; + }; + }; + }; + }; + trace?: never; + }; '/api/workflows': { parameters: { query?: never; @@ -2039,6 +2289,54 @@ export interface components { key: string; value: string; }; + WebhookSlug: string; + WebhookRule: { + id: string; + codebaseId: string; + codebaseName: string; + urlSlug: components['schemas']['WebhookSlug']; + workflowName: string; + enabled: boolean; + createdAt: string; + updatedAt: string; + }; + WebhookRuleListResponse: { + rules: components['schemas']['WebhookRule'][]; + }; + WebhookRuleCodebaseOption: { + id: string; + name: string; + }; + /** @enum {string} */ + WorkflowSource: 'project' | 'bundled'; + WebhookWorkflowOption: { + name: string; + description: string | null; + source: components['schemas']['WorkflowSource']; + }; + WebhookWorkflowsByCodebase: { + codebaseId: string; + workflows: components['schemas']['WebhookWorkflowOption'][]; + }; + WebhookRuleOptionsResponse: { + codebases: components['schemas']['WebhookRuleCodebaseOption'][]; + workflowsByCodebase: components['schemas']['WebhookWorkflowsByCodebase'][]; + }; + CreateWebhookRuleBody: { + codebaseId: string; + urlSlug: components['schemas']['WebhookSlug']; + workflowName: string; + enabled?: boolean; + }; + UpdateWebhookRuleBody: { + codebaseId?: string; + urlSlug?: components['schemas']['WebhookSlug']; + workflowName?: string; + enabled?: boolean; + }; + DeleteWebhookRuleResponse: { + success: boolean; + }; DagNode: { id: string; depends_on?: string[]; @@ -2351,8 +2649,6 @@ export interface components { }; nodes: components['schemas']['DagNode'][]; }; - /** @enum {string} */ - WorkflowSource: 'project' | 'bundled'; WorkflowListEntry: { workflow: components['schemas']['WorkflowDefinition']; source: components['schemas']['WorkflowSource']; diff --git a/packages/web/src/lib/api.ts b/packages/web/src/lib/api.ts index 6c81aa66b1..e175110a89 100644 --- a/packages/web/src/lib/api.ts +++ b/packages/web/src/lib/api.ts @@ -8,6 +8,11 @@ import type { components } from '@/lib/api.generated'; export type WorkflowDefinition = components['schemas']['WorkflowDefinition']; export type DagNode = components['schemas']['DagNode']; +export type WebhookRuleResponse = components['schemas']['WebhookRule']; +export type WebhookWorkflowOption = components['schemas']['WebhookWorkflowOption']; +export type WebhookRulesOptionsResponse = components['schemas']['WebhookRuleOptionsResponse']; +export type CreateWebhookRuleBody = components['schemas']['CreateWebhookRuleBody']; +export type UpdateWebhookRuleBody = components['schemas']['UpdateWebhookRuleBody']; /** * Base URL for SSE streams. In dev, bypasses Vite proxy by connecting directly @@ -182,6 +187,42 @@ export async function deleteCodebase(id: string): Promise<{ success: boolean }> return fetchJSON<{ success: boolean }>(`/api/codebases/${id}`, { method: 'DELETE' }); } +export async function listWebhookRules(): Promise { + const result = await fetchJSON<{ rules: WebhookRuleResponse[] }>('/api/webhook-rules'); + return result.rules; +} + +export async function getWebhookRuleOptions(): Promise { + return fetchJSON('/api/webhook-rules/options'); +} + +export async function createWebhookRule( + input: CreateWebhookRuleBody +): Promise { + return fetchJSON('/api/webhook-rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input), + }); +} + +export async function updateWebhookRule( + id: string, + input: UpdateWebhookRuleBody +): Promise { + return fetchJSON(`/api/webhook-rules/${encodeURIComponent(id)}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input), + }); +} + +export async function deleteWebhookRule(id: string): Promise<{ success: boolean }> { + return fetchJSON<{ success: boolean }>(`/api/webhook-rules/${encodeURIComponent(id)}`, { + method: 'DELETE', + }); +} + export interface WorkflowRunResponse { id: string; workflow_name: string; diff --git a/packages/web/src/routes/SettingsPage.tsx b/packages/web/src/routes/SettingsPage.tsx index 07a07690fc..7599036597 100644 --- a/packages/web/src/routes/SettingsPage.tsx +++ b/packages/web/src/routes/SettingsPage.tsx @@ -553,7 +553,7 @@ function AssistantConfigSection({ config }: { config: SafeConfigResponse }): Rea onChange={e => { setCodexModel(e.target.value); }} - placeholder="gpt-5.3-codex" + placeholder="gpt-5.4" /> diff --git a/packages/web/src/routes/WebhooksPage.tsx b/packages/web/src/routes/WebhooksPage.tsx new file mode 100644 index 0000000000..a3ef0fe3bb --- /dev/null +++ b/packages/web/src/routes/WebhooksPage.tsx @@ -0,0 +1,393 @@ +import { useMemo, useState } from 'react'; +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { Link2, Pencil, Plus, Trash2, Webhook } from 'lucide-react'; +import { + type CreateWebhookRuleBody, + type UpdateWebhookRuleBody, + createWebhookRule, + deleteWebhookRule, + getWebhookRuleOptions, + listWebhookRules, + updateWebhookRule, +} from '@/lib/api'; +import type { WebhookRuleResponse, WebhookRulesOptionsResponse } from '@/lib/api'; +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; +import { Button } from '@/components/ui/button'; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from '@/components/ui/dialog'; + +const selectClass = + 'h-9 rounded-md border border-border bg-surface-elevated px-3 text-sm text-text-primary focus:outline-none focus:ring-1 focus:ring-ring [&>option]:bg-surface-elevated [&>option]:text-text-primary'; + +const inputClass = + 'h-9 rounded-md border border-border bg-surface-elevated px-3 text-sm text-text-primary placeholder:text-text-secondary/70 focus:outline-none focus:ring-1 focus:ring-ring'; + +interface RuleFormState { + codebaseId: string; + workflowName: string; + urlSlug: string; + enabled: boolean; +} + +function buildInitialForm(options: WebhookRulesOptionsResponse | undefined): RuleFormState { + const firstCodebase = options?.codebases[0]; + const firstWorkflow = firstCodebase + ? options?.workflowsByCodebase.find(entry => entry.codebaseId === firstCodebase.id) + ?.workflows[0] + : undefined; + + return { + codebaseId: firstCodebase?.id ?? '', + workflowName: firstWorkflow?.name ?? '', + urlSlug: '', + enabled: true, + }; +} + +export function WebhooksPage(): React.ReactElement { + const queryClient = useQueryClient(); + const [dialogOpen, setDialogOpen] = useState(false); + const [editingRule, setEditingRule] = useState(null); + const [formState, setFormState] = useState(buildInitialForm(undefined)); + const [mutationError, setMutationError] = useState(null); + + const { data: rules, isLoading: rulesLoading } = useQuery({ + queryKey: ['webhookRules'], + queryFn: listWebhookRules, + }); + + const { data: options, isLoading: optionsLoading } = useQuery({ + queryKey: ['webhookRuleOptions'], + queryFn: getWebhookRuleOptions, + }); + + const selectedWorkflows = useMemo(() => { + return ( + options?.workflowsByCodebase.find(entry => entry.codebaseId === formState.codebaseId) + ?.workflows ?? [] + ); + }, [formState.codebaseId, options]); + + const webhookPreviewUrl = useMemo(() => { + const slug = formState.urlSlug.trim(); + return slug + ? `${window.location.origin}/webhooks/${slug}` + : `${window.location.origin}/webhooks/`; + }, [formState.urlSlug]); + + const saveMutation = useMutation({ + mutationFn: async (payload: RuleFormState) => { + const normalizedPayload = { + codebaseId: payload.codebaseId, + workflowName: payload.workflowName, + urlSlug: payload.urlSlug.trim(), + enabled: payload.enabled, + }; + + if (editingRule) { + const updatePayload: UpdateWebhookRuleBody = normalizedPayload; + return updateWebhookRule(editingRule.id, updatePayload); + } + + const createPayload: CreateWebhookRuleBody = normalizedPayload; + return createWebhookRule(createPayload); + }, + onSuccess: () => { + setDialogOpen(false); + setEditingRule(null); + setMutationError(null); + void queryClient.invalidateQueries({ queryKey: ['webhookRules'] }); + }, + onError: error => { + setMutationError(error instanceof Error ? error.message : 'Failed to save webhook rule'); + }, + }); + + const toggleMutation = useMutation({ + mutationFn: ({ id, enabled }: { id: string; enabled: boolean }) => + updateWebhookRule(id, { enabled }), + onSuccess: () => { + void queryClient.invalidateQueries({ queryKey: ['webhookRules'] }); + }, + }); + + const deleteMutation = useMutation({ + mutationFn: deleteWebhookRule, + onSuccess: () => { + void queryClient.invalidateQueries({ queryKey: ['webhookRules'] }); + }, + }); + + function resetForm(): void { + setFormState(buildInitialForm(options)); + } + + function openCreateDialog(): void { + setEditingRule(null); + setMutationError(null); + setFormState(buildInitialForm(options)); + setDialogOpen(true); + } + + function openEditDialog(rule: WebhookRuleResponse): void { + setEditingRule(rule); + setMutationError(null); + setFormState({ + codebaseId: rule.codebaseId, + workflowName: rule.workflowName, + urlSlug: rule.urlSlug, + enabled: rule.enabled, + }); + setDialogOpen(true); + } + + function handleCodebaseChange(codebaseId: string): void { + const workflows = + options?.workflowsByCodebase.find(entry => entry.codebaseId === codebaseId)?.workflows ?? []; + + setFormState(current => ({ + ...current, + codebaseId, + workflowName: workflows.some(workflow => workflow.name === current.workflowName) + ? current.workflowName + : (workflows[0]?.name ?? ''), + })); + } + + function handleDelete(rule: WebhookRuleResponse): void { + const confirmed = window.confirm( + `Delete webhook rule for ${rule.codebaseName} (${rule.urlSlug})?` + ); + if (!confirmed) return; + deleteMutation.mutate(rule.id); + } + + const isLoading = rulesLoading || optionsLoading; + + return ( +
+
+
+ +

Webhooks

+
+ +
+ +
+ + + Webhook Rules + + + {isLoading ? ( +
Loading webhook rules…
+ ) : !rules || rules.length === 0 ? ( +
No webhook rules configured yet.
+ ) : ( +
+ + + + + + + + + + + + {rules.map(rule => ( + + + + + + + + ))} + +
ProjectWorkflowURLStatusActions
{rule.codebaseName}{rule.workflowName} +
+ + + /webhooks/{rule.urlSlug} + +
+
+ + {rule.enabled ? 'Enabled' : 'Disabled'} + + +
+ + + +
+
+
+ )} +
+
+
+ + { + setDialogOpen(open); + if (!open) { + setEditingRule(null); + setMutationError(null); + resetForm(); + } + }} + > + + + {editingRule ? 'Edit webhook rule' : 'Create webhook rule'} + + Choose a project, choose a workflow, and give the webhook a URL slug. + + + +
+ + + + + + + + + {mutationError ?

{mutationError}

: null} + {!selectedWorkflows.length ? ( +

+ No workflows are available for the selected project. +

+ ) : null} +
+ +
+ + +
+
+
+
+ ); +} diff --git a/packages/web/vite.config.ts b/packages/web/vite.config.ts index 77d49e9b98..52538e578c 100644 --- a/packages/web/vite.config.ts +++ b/packages/web/vite.config.ts @@ -8,7 +8,9 @@ import { defineConfig, loadEnv } from 'vite'; export default defineConfig(({ mode }) => { // Load env from repo root so PORT from .env is available const env = loadEnv(mode, path.resolve(__dirname, '../..'), ''); - const apiPort = env.PORT ?? '3090'; + // Allow the web app to override the API port explicitly when the backend + // runs on a different port than the repo-wide PORT value. + const apiPort = env.VITE_API_PORT ?? env.ARCHON_API_PORT ?? env.PORT ?? '3090'; // Read version from root package.json const rootPkgPath = path.resolve(__dirname, '../../package.json'); diff --git a/packages/workflows/src/dag-executor.ts b/packages/workflows/src/dag-executor.ts index facfbd1068..4788797e9c 100644 --- a/packages/workflows/src/dag-executor.ts +++ b/packages/workflows/src/dag-executor.ts @@ -721,6 +721,7 @@ async function executeNodeInternal( artifactsDir: string, logDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, nodeOutputs: Map, resumeSessionId: string | undefined, @@ -800,6 +801,7 @@ async function executeNodeInternal( workflowRun.user_message, artifactsDir, baseBranch, + canonicalRepoPath, docsDir, issueContext, `dag node '${node.id}' prompt` @@ -1312,6 +1314,7 @@ async function executeBashNode( artifactsDir: string, logDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, nodeOutputs: Map, issueContext?: string @@ -1352,7 +1355,10 @@ async function executeBashNode( artifactsDir, baseBranch, docsDir, - issueContext + issueContext, + undefined, + undefined, + canonicalRepoPath ); const finalScript = substituteNodeOutputRefs(substitutedScript, nodeOutputs, true); @@ -1462,6 +1468,7 @@ async function executeScriptNode( artifactsDir: string, logDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, nodeOutputs: Map, issueContext?: string @@ -1502,7 +1509,10 @@ async function executeScriptNode( artifactsDir, baseBranch, docsDir, - issueContext + issueContext, + undefined, + undefined, + canonicalRepoPath ); const finalScript = substituteNodeOutputRefs(substitutedScript, nodeOutputs, false); @@ -1709,6 +1719,7 @@ async function executeLoopNode( artifactsDir: string, logDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, nodeOutputs: Map, config: WorkflowConfig, @@ -1813,7 +1824,9 @@ async function executeLoopNode( baseBranch, docsDir, issueContext, - i === startIteration ? loopUserInput : '' + i === startIteration ? loopUserInput : '', + undefined, + canonicalRepoPath ); const finalPrompt = substituteNodeOutputRefs(substitutedPrompt, nodeOutputs); @@ -2011,7 +2024,10 @@ async function executeLoopNode( artifactsDir, baseBranch, docsDir, - issueContext + issueContext, + undefined, + undefined, + canonicalRepoPath ); const substitutedBash = substituteNodeOutputRefs( bashPrompt, @@ -2200,6 +2216,7 @@ async function executeApprovalNode( artifactsDir: string, logDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, nodeOutputs: Map, config: WorkflowConfig, @@ -2263,7 +2280,8 @@ async function executeApprovalNode( docsDir, issueContext, undefined, // loopUserInput - rejectionReason + rejectionReason, + canonicalRepoPath ); // Build a synthetic PromptNode to reuse executeNodeInternal @@ -2298,6 +2316,7 @@ async function executeApprovalNode( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, undefined, // fresh session @@ -2373,7 +2392,8 @@ export async function executeDagWorkflow( config: WorkflowConfig, configuredCommandFolder?: string, issueContext?: string, - priorCompletedNodes?: Map + priorCompletedNodes?: Map, + canonicalRepoPath = '' ): Promise { const dagStartTime = Date.now(); const workflowLevelOptions = { @@ -2591,6 +2611,7 @@ export async function executeDagWorkflow( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, issueContext @@ -2640,6 +2661,7 @@ export async function executeDagWorkflow( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, config, @@ -2662,6 +2684,7 @@ export async function executeDagWorkflow( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, config, @@ -2716,6 +2739,7 @@ export async function executeDagWorkflow( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, issueContext @@ -2763,6 +2787,7 @@ export async function executeDagWorkflow( artifactsDir, logDir, baseBranch, + canonicalRepoPath, docsDir, nodeOutputs, // Always pass the prior session ID — forkSession:true in executeNodeInternal diff --git a/packages/workflows/src/executor-preamble.test.ts b/packages/workflows/src/executor-preamble.test.ts index fd2b44ec3b..4d505807aa 100644 --- a/packages/workflows/src/executor-preamble.test.ts +++ b/packages/workflows/src/executor-preamble.test.ts @@ -37,6 +37,7 @@ mock.module('@archon/paths', () => ({ // --------------------------------------------------------------------------- mock.module('@archon/git', () => ({ + getCanonicalRepoPath: mock(async (p: string) => p), getDefaultBranch: mock(async () => 'main'), toRepoPath: mock((p: string) => p), })); diff --git a/packages/workflows/src/executor-shared.test.ts b/packages/workflows/src/executor-shared.test.ts index 84346f131e..a26fc5ec67 100644 --- a/packages/workflows/src/executor-shared.test.ts +++ b/packages/workflows/src/executor-shared.test.ts @@ -62,12 +62,41 @@ describe('substituteWorkflowVariables', () => { expect(prompt).toBe('Merge into develop'); }); + it('replaces $CANONICAL_REPO_PATH with resolved repo path', () => { + const { prompt } = substituteWorkflowVariables( + 'Save to $CANONICAL_REPO_PATH/.archon/workflows/test.yaml', + 'run-1', + 'msg', + '/tmp', + 'main', + 'docs/', + undefined, + undefined, + undefined, + '/repo/root' + ); + expect(prompt).toBe('Save to /repo/root/.archon/workflows/test.yaml'); + }); + it('throws when $BASE_BRANCH is referenced but empty', () => { expect(() => substituteWorkflowVariables('Merge into $BASE_BRANCH', 'run-1', 'msg', '/tmp', '', 'docs/') ).toThrow('No base branch could be resolved'); }); + it('throws when $CANONICAL_REPO_PATH is referenced but empty', () => { + expect(() => + substituteWorkflowVariables( + 'Save to $CANONICAL_REPO_PATH/.archon/workflows/test.yaml', + 'run-1', + 'msg', + '/tmp', + 'main', + 'docs/' + ) + ).toThrow('No canonical repository path could be resolved'); + }); + it('does not throw when $BASE_BRANCH is not referenced and baseBranch is empty', () => { const { prompt } = substituteWorkflowVariables( 'No branch reference here', @@ -216,6 +245,7 @@ describe('buildPromptWithContext', () => { 'msg', '/tmp', 'main', + '/repo/root', 'docs/', '## Issue #42\nDetails here', 'test prompt' @@ -231,6 +261,7 @@ describe('buildPromptWithContext', () => { 'msg', '/tmp', 'main', + '/repo/root', 'docs/', '## Issue #42\nDetails here', 'test prompt' @@ -247,6 +278,7 @@ describe('buildPromptWithContext', () => { 'msg', '/tmp', 'main', + '/repo/root', 'docs/', undefined, 'test prompt' diff --git a/packages/workflows/src/executor-shared.ts b/packages/workflows/src/executor-shared.ts index 0537609417..9aaa9ba924 100644 --- a/packages/workflows/src/executor-shared.ts +++ b/packages/workflows/src/executor-shared.ts @@ -257,6 +257,7 @@ export const CONTEXT_VAR_PATTERN_STR = '\\$(?:CONTEXT|EXTERNAL_CONTEXT|ISSUE_CON * - $USER_MESSAGE, $ARGUMENTS - The user's trigger message * - $ARTIFACTS_DIR - External artifacts directory for this workflow run * - $BASE_BRANCH - The base branch (from config or auto-detected) + * - $CANONICAL_REPO_PATH - The primary repository root, even when running from a worktree * - $CONTEXT, $EXTERNAL_CONTEXT, $ISSUE_CONTEXT - GitHub issue/PR context (if available) * - $DOCS_DIR - Documentation directory path (configured or default 'docs/') * - $LOOP_USER_INPUT - User feedback from interactive loop approval. Only populated on the @@ -275,7 +276,8 @@ export function substituteWorkflowVariables( docsDir: string, issueContext?: string, loopUserInput?: string, - rejectionReason?: string + rejectionReason?: string, + canonicalRepoPath?: string ): { prompt: string; contextSubstituted: boolean } { // Fail fast if the prompt references $BASE_BRANCH but no base branch could be resolved if (!baseBranch && prompt.includes('$BASE_BRANCH')) { @@ -284,6 +286,11 @@ export function substituteWorkflowVariables( 'Set the config value or use the --from flag to select a branch (e.g., --from dev).' ); } + if (!canonicalRepoPath && prompt.includes('$CANONICAL_REPO_PATH')) { + throw new Error( + 'No canonical repository path could be resolved. Workflow is running without a primary repo root.' + ); + } // Defensive: ensure docsDir always has a value (callers should resolve, but guard here) const resolvedDocsDir = docsDir || 'docs/'; @@ -295,6 +302,7 @@ export function substituteWorkflowVariables( .replace(/\$ARGUMENTS/g, userMessage) .replace(/\$ARTIFACTS_DIR/g, artifactsDir) .replace(/\$BASE_BRANCH/g, baseBranch) + .replace(/\$CANONICAL_REPO_PATH/g, canonicalRepoPath ?? '') .replace(/\$DOCS_DIR/g, resolvedDocsDir) .replace(/\$LOOP_USER_INPUT/g, loopUserInput ?? '') .replace(/\$REJECTION_REASON/g, rejectionReason ?? ''); @@ -330,6 +338,7 @@ export function substituteWorkflowVariables( * @param userMessage - The user's trigger message for variable substitution * @param artifactsDir - The external artifacts directory for $ARTIFACTS_DIR substitution * @param baseBranch - The resolved base branch for $BASE_BRANCH substitution + * @param canonicalRepoPath - The resolved primary repo root for $CANONICAL_REPO_PATH substitution * @param docsDir - The resolved docs directory for $DOCS_DIR substitution * @param issueContext - Optional GitHub issue/PR context to substitute or append * @param logLabel - Human-readable label for logging (e.g., 'workflow step prompt') @@ -341,6 +350,7 @@ export function buildPromptWithContext( userMessage: string, artifactsDir: string, baseBranch: string, + canonicalRepoPath: string, docsDir: string, issueContext: string | undefined, logLabel: string @@ -352,7 +362,10 @@ export function buildPromptWithContext( artifactsDir, baseBranch, docsDir, - issueContext + issueContext, + undefined, + undefined, + canonicalRepoPath ); if (issueContext && !contextSubstituted) { diff --git a/packages/workflows/src/executor.test.ts b/packages/workflows/src/executor.test.ts index 0a91ac8299..c95c4f35c2 100644 --- a/packages/workflows/src/executor.test.ts +++ b/packages/workflows/src/executor.test.ts @@ -28,6 +28,7 @@ mock.module('@archon/paths', () => ({ // --- Mock git --- mock.module('@archon/git', () => ({ + getCanonicalRepoPath: mock(async (p: string) => p), getDefaultBranch: mock(async () => 'main'), toRepoPath: mock((p: string) => p), })); diff --git a/packages/workflows/src/executor.ts b/packages/workflows/src/executor.ts index e87ea9065b..7a631ee29a 100644 --- a/packages/workflows/src/executor.ts +++ b/packages/workflows/src/executor.ts @@ -7,7 +7,7 @@ import type { IWorkflowPlatform, WorkflowMessageMetadata } from './deps'; import type { WorkflowDeps, WorkflowConfig } from './deps'; import * as archonPaths from '@archon/paths'; import { createLogger } from '@archon/paths'; -import { getDefaultBranch, toRepoPath } from '@archon/git'; +import { getCanonicalRepoPath, getDefaultBranch, toRepoPath } from '@archon/git'; import type { WorkflowDefinition, WorkflowRun, WorkflowExecutionResult } from './schemas'; import { executeDagWorkflow } from './dag-executor'; import { logWorkflowStart, logWorkflowError } from './logger'; @@ -273,6 +273,16 @@ export async function executeWorkflow( } } + let canonicalRepoPath = ''; + try { + canonicalRepoPath = await getCanonicalRepoPath(cwd); + } catch (error) { + getLog().warn( + { err: error as Error, errorType: (error as Error).constructor.name, cwd }, + 'workflow.canonical_repo_auto_detect_failed' + ); + } + const docsDir = config.docsPath ?? 'docs/'; // Resolve provider and model once (used by all nodes) @@ -635,7 +645,8 @@ export async function executeWorkflow( config, configuredCommandFolder, issueContext, - dagPriorCompletedNodes + dagPriorCompletedNodes, + canonicalRepoPath ); // executeDagWorkflow throws on fatal errors; check DB status for result